diff --git a/core/build.gradle b/core/build.gradle index 4025b5539c..dc2d9546cc 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -1,3 +1,5 @@ +import org.javacc.plugin.gradle.javacc.CompileJavaccTask + buildscript { dependencies { // Protobuf @@ -5,6 +7,10 @@ buildscript { } } +plugins { + id "org.javacc.javacc" version "$javacc_plugin_version" +} + group "org.polypheny" @@ -63,6 +69,8 @@ dependencies { api group: "io.activej", name: "activej-serializer", version: activej_serializer_version + + javacc group: "net.java.dev.javacc", name: "javacc", version: javacc_version // BSD 2-clause // GIS api group: "org.locationtech.jts", name: "jts-core", version: jts_version // Eclipse Public License 2.0 && Eclipse Distribution License 1.0 (BSD-3 Clause) api group: "org.locationtech.jts.io", name: "jts-io-common", version: jts_version // Eclipse Public License 2.0 && Eclipse Distribution License 1.0 (BSD-3 Clause) @@ -82,11 +90,18 @@ protobuf { } } +tasks.register('generateParser', CompileJavaccTask) { + getConventionMapping().map("classpath", { configurations.javacc }) + arguments = [static: "false"] + inputDirectory = file("src/main/codegen") + outputDirectory = file(project.buildDir.absolutePath + "/generated-sources/org/polypheny/db/algebra/polyalg/parser") +} + sourceSets { main { java { - srcDirs = ["src/main/java", "build/generated/source/proto/main/java"] // "build/generated/sources/annotationProcessor" + srcDirs = ["src/main/java", "build/generated/source/proto/main/java", files("${buildDir}/generated-sources").builtBy(generateParser)] // "build/generated/sources/annotationProcessor" } resources { srcDirs = ["src/main/resources"] @@ -116,6 +131,7 @@ tasks.register('generateVersionProperties', Copy) { compileJava { dependsOn("generateVersionProperties") + dependsOn("generateParser") dependsOn(":config:processResources") dependsOn(":information:processResources") dependsOn("generateProto") diff --git a/core/src/main/codegen/PolyAlgParser.jj b/core/src/main/codegen/PolyAlgParser.jj new file mode 100644 index 0000000000..c3e6490b1c --- /dev/null +++ b/core/src/main/codegen/PolyAlgParser.jj @@ -0,0 +1,697 @@ + +options { + STATIC = false; + IGNORE_CASE = true; + UNICODE_INPUT = true; +} + +PARSER_BEGIN(PolyAlgParserImpl) + +package org.polypheny.db.algebra.polyalg.parser; + +import java.io.StringReader; +import java.io.Reader; +import java.util.HashMap; +import java.util.Map; +import org.polypheny.db.languages.ParserFactory; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgLiteral; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgLiteral.LiteralType; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgAliasedArgument; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNamedArgument; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNode; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgDataType; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgOperator; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNodeList; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpression; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpressionExtension; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpressionExtension.ExtensionType; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.graph.PolyDictionary; +import org.polypheny.db.type.entity.graph.PolyNode; +import org.polypheny.db.type.entity.graph.PolyEdge; +import org.polypheny.db.type.entity.graph.PolyEdge.EdgeDirection; +import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.util.Quadruple; +import java.util.Arrays; +import java.util.List; +import java.util.ArrayList; + +/** + * Parses Poly Algebra. + */ +public class PolyAlgParserImpl extends PolyAlgAbstractParserImpl { + + /** + * ParserFactory implementation for creating parser. + */ + public static final ParserFactory FACTORY = new ParserFactory() { + public PolyAlgAbstractParserImpl getParser(Reader reader) { + final PolyAlgParserImpl parser = new PolyAlgParserImpl(reader); + return parser; + } + }; + + /** Main entry point. */ + public static void parse(String str) { + PolyAlgParserImpl parser = new PolyAlgParserImpl(new StringReader(str)); + + try { + parser.Input(); + System.out.println("Successfully parsed input!"); + } catch (Exception e) { + System.out.println("Could not parse input correctly:"); + System.out.println(e.getMessage()); + e.printStackTrace(); + } + } + + public PolyAlgParseException normalizeException(Throwable ex) { + try { + return convertException(ex); + } catch (ParseException e) { + throw new AssertionError(e); + } + } + + PolyAlgParseException convertException(Throwable ex) throws ParseException { + if (ex instanceof PolyAlgParseException) { + return (PolyAlgParseException) ex; + } + + ParserPos pos = null; + int[][] expectedTokenSequences = null; + String[] tokenImage = null; + if (ex instanceof ParseException) { + ParseException pex = (ParseException) ex; + expectedTokenSequences = pex.expectedTokenSequences; + tokenImage = pex.tokenImage; + if (pex.currentToken != null) { + final Token token = pex.currentToken.next; + pos = new ParserPos( + token.beginLine, + token.beginColumn, + token.endLine, + token.endColumn); + } + } else if (ex instanceof TokenMgrError) { + TokenMgrError tme = (TokenMgrError) ex; + expectedTokenSequences = null; + tokenImage = null; + // Example: + // Lexical error at line 3, column 24. Encountered "#" after "a". + final java.util.regex.Pattern pattern = java.util.regex.Pattern.compile( + "(?s)Lexical error at line ([0-9]+), column ([0-9]+).*"); + java.util.regex.Matcher matcher = pattern.matcher(ex.getMessage()); + if (matcher.matches()) { + int line = Integer.parseInt(matcher.group(1)); + int column = Integer.parseInt(matcher.group(2)); + pos = new ParserPos(line, column, line, column); + } + } + return new PolyAlgParseException(ex.getMessage(), pos, expectedTokenSequences, tokenImage, ex); + } + + + public PolyAlgNode parsePolyAlgEof() throws Exception { + return Input(); + } + +} + +PARSER_END(PolyAlgParserImpl) + +SKIP : +{ + " " +| "\t" +| "\n" +| "\r" +| "\f" +} + +TOKEN : +{ + +| +| +| +| +| +| +| "> | ="> | "> | +| +| +| +| +| +| +| +| +| +| +| // see AggregateCall +| +| +| +| +| +| +| +| | ) ( | | )*> + +| )+ > +| | ) > +| )+(".")?()* + | "."()+ + > +| > +| <#EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ > +| <#LETTER: ["_","a"-"z","A"-"Z","ö", "Ö", "ä", "Ä", "ü", "Ü", "à", "À", "ç","Ç", "á", "Á", "è", "È","í","Í", "î", "Î","ó","Ó","ò", "ô", "Ô", "Ò" , "í", "Í", "ë", "Ë", "â", "Â", "ï", "Ï", "é", "É", "ñ", "Ñ", "ß"] > +| <#DIGIT: ["0"-"9"]> +| <#IDENTIFIER_SYMBOL: ["#", "@", "$", "."]> +| +} + +JAVACODE protected ParserPos getPos() +{ + return new ParserPos( + token.beginLine, + token.beginColumn, + token.endLine, + token.endColumn); +} + +/** Root production. */ +PolyAlgNode Input() : +{ + PolyAlgNode n; +} +{ + n = Operator() + {return n;} +} + + +PolyAlgOperator Operator() : +{ + String opName; + List args = null; + List children = null; +} +{ + opName = OpName() [args = Arguments()] [ [children = ChildOperators()] ] + {return new PolyAlgOperator(opName, args, children, getPos());} +} + +List ChildOperators() : +{ + PolyAlgOperator o; + List children = new ArrayList<>(); +} +{ + o = Operator() {children.add(o);} ( o = Operator() {children.add(o);})* + {return children;} +} + +List Arguments() : +{ + PolyAlgNamedArgument n; + List args = new ArrayList<>(); +} +{ + n = NamedArgument() {args.add(n);} ( n = NamedArgument() {args.add(n);})* + {return args;} +} + +PolyAlgNamedArgument NamedArgument() : +{ + String name = null; + PolyAlgAliasedArgument arg; +} +{ + [LOOKAHEAD(2) name = ParamName() ] arg = AliasedArgument() + {return new PolyAlgNamedArgument(name, arg, getPos());} +} + + +PolyAlgAliasedArgument AliasedArgument() : +{ + PolyAlgNode arg; + String alias = null; +} +{ + arg = Argument() [ alias = AliasName() ] + {return new PolyAlgAliasedArgument(arg, alias, getPos()); } +} + +PolyAlgNode Argument() : +{ + PolyAlgNode n; +} +{ + ( + n = Expression() + | + n = ListArgument() + ) + {return n;} +} + +PolyAlgNode ListArgument() : // we return a PolyAlgNodeList instead of a List as an Argument() could also be a PolyAlgExpression +{ + List args = new ArrayList<>(); + PolyAlgNode n; +} +{ + [n = AliasedArgument() {args.add(n);} ( n = AliasedArgument() {args.add(n);})*] + {return new PolyAlgNodeList(args, getPos());} +} + +PolyAlgExpression Expression(): +{ + List literals = new ArrayList<>(); + List childExps = null; + Token t; + PolyAlgLiteral l; + PolyAlgDataType type = null; + PolyAlgExpression exp = null; + List extensions = new ArrayList<>(); + PolyAlgExpressionExtension extension; +} +{ + (l = Literal() {literals.add(l);})+ + [ [childExps = ChildExpressions()] {if (childExps == null) childExps = List.of();}] + [ type = DataType()] // TODO: handle OVER + (extension = ExpressionExtension() {extensions.add(extension);})* + {return new PolyAlgExpression(literals, childExps, type, extensions, getPos());} + | + [exp = Expression()] // optional outer parentheses + {return exp == null ? new PolyAlgExpression(List.of(), null, null, List.of(), getPos()) : exp;} +} + +List ChildExpressions() : // List of expressions separated by +{ + List exps = new ArrayList<>(); + PolyAlgExpression e; + PolyAlgDataType cast; +} +{ + e = Expression() {exps.add(e);} + [ cast = DataType() {e.setCast(cast);}] // cast type specification only makes sense if CAST(...) has 1 child expression + ( e = Expression() {exps.add(e);})* + {return exps;} +} + +PolyAlgExpressionExtension ExpressionExtension() : +{ + List literals = new ArrayList<>(); + PolyAlgLiteral l; + ExtensionType type; +} +{ + ( + l = Literal() {literals.add(l); type = ExtensionType.FILTER;} + | + {type = ExtensionType.APPROXIMATE;} + | + (l = Literal() {literals.add(l);})* {type = ExtensionType.OVER;} + ) + {return new PolyAlgExpressionExtension(literals, type); } + +} + + +PolyAlgLiteral Literal() : +{ + String num, docStr; + Token t = null; + LiteralType type = LiteralType.DEFAULT; + PolyValue polyValue; +} +{ + ( + t = { + if (t.image.startsWith(CorrelationId.CORREL_PREFIX)) { + type = LiteralType.CORRELATION_VAR; + } else if (t.image.startsWith(RexLocalRef.PREFIX)) { + type = LiteralType.LOCAL_REF; + } + } + | + LOOKAHEAD(2) num = Number() {return new PolyAlgLiteral(num, LiteralType.NUMBER, getPos());} + | + t = {type = LiteralType.BOOLEAN;} + | + t = {type = LiteralType.NULL;} | t = + | + t = {type = LiteralType.QUOTED;} + | + t = + | + t = | t = | t = | t = | t = | t = | t = | t = + | + t = {type = LiteralType.DIRECTION;} | t = {type = LiteralType.NULL_DIRECTION;} + | + t = {type = LiteralType.DYNAMIC_PARAM;} + | + docStr = DocumentStr() {return new PolyAlgLiteral(docStr, LiteralType.DEFAULT, getPos());} + | + polyValue = SpecialPolyValue() {return new PolyAlgLiteral(polyValue, LiteralType.POLY_VALUE, getPos());} + + ) + {return new PolyAlgLiteral(t.image, type, getPos());} + +} + +/** +* To be able to parse these PolyValues correctly, we need to prefix these PolyValues according to their type. +* Otherwise, we do not have enough context to e.g. decide if we have a PolyList or a ListArgument. +*/ +PolyValue SpecialPolyValue() : +{ + PolyValue value = null; + String str = null; +} +{ + ( + ( value = PolyNode()) + | + ( value = PolyPath()) + | + ( value = PolyEdgeWithEnds()) + | + ( str = ListStr() {value = PolyList.fromJson(str);} ) + | + ( str = DocumentStr() {value = PolyDocument.fromJson(str);}) + ) + + {return value;} +} + +PolyValue PolyNode() : +{ + Token t; + Map properties = new HashMap<>(); + PolyString name = null; + List labels = new ArrayList<>(); + Token key = null; + PolyValue value = null; +} +{ + + [t = {name = new PolyString( t.image );}] + + ( + t = {labels.add(new PolyString( t.image ));} + )* + + [[ + key = ( + t = {value = new PolyString(t.image);} + | + t = {value = new PolyInteger(Integer.parseInt(t.image));} + | + t = {value = new PolyDouble(Double.parseDouble(t.image));} + | + t = {value = new PolyString(t.image.substring(1, t.image.length() - 1));} + ) + {properties.put(new PolyString( key.image ), value);} + ( + key = ( + t = {value = new PolyString(t.image);} + | + t = {value = new PolyInteger(Integer.parseInt(t.image));} + | + t = {value = new PolyDouble(Double.parseDouble(t.image));} + | + t = {value = new PolyString(t.image.substring(1, t.image.length() - 1));} + ) + {properties.put(new PolyString( key.image ), value);} + )* + ]] + + {return new PolyNode(PolyDictionary.ofDict(properties), labels, name);} +} + +PolyValue PolyPath() : +{ + Token t; + PolyValue node = null; + Quadruple, PolyString, EdgeDirection> edge = null; + List nodes = new ArrayList<>(); + List, PolyString, EdgeDirection>> edgeArgs = new ArrayList<>(); +} +{ + node = PolyNode() {nodes.add((PolyNode) node);} + (LOOKAHEAD(2) + edge = PolyEdge() {edgeArgs.add(edge);} + node = PolyNode() {nodes.add((PolyNode) node);} + )+ + + {return PolyAlgUtils.buildPolyPath(nodes, edgeArgs);} +} + +// We cannot yet create a PolyEdge, since we do not know source and target node +Quadruple, PolyString, EdgeDirection> PolyEdge() : +{ + Token t; + Map properties = new HashMap<>(); + PolyString name = null; + List labels = new ArrayList<>(); + Token key = null; + PolyValue value = null; + EdgeDirection dir = EdgeDirection.NONE; +} +{ + + [ {dir = EdgeDirection.RIGHT_TO_LEFT;}] + [ + + [t = {name = new PolyString( t.image );}] + + ( + t = {labels.add(new PolyString( t.image ));} + )* + + [[ + key = ( + t = {value = new PolyString(t.image);} + | + t = {value = new PolyInteger(Integer.parseInt(t.image));} + | + t = {value = new PolyDouble(Double.parseDouble(t.image));} + | + t = {value = new PolyString(t.image.substring(1, t.image.length() - 1));} + ) + {properties.put(new PolyString( key.image ), value);} + ( + key = ( + t = {value = new PolyString(t.image);} + | + t = {value = new PolyInteger(Integer.parseInt(t.image));} + | + t = {value = new PolyDouble(Double.parseDouble(t.image));} + | + t = {value = new PolyString(t.image.substring(1, t.image.length() - 1));} + ) + {properties.put(new PolyString( key.image ), value);} + )* + ]] + + ] + [ {dir = EdgeDirection.LEFT_TO_RIGHT;}] + + {return new Quadruple<>(PolyDictionary.ofDict(properties), labels, name, dir);} +} + +PolyEdge PolyEdgeWithEnds() : +{ + Token t; + String source, target; + Quadruple, PolyString, EdgeDirection> edge = null; + +} +{ + source = UUID() + edge = PolyEdge() + target = UUID() + + {return new PolyEdge(edge.a, edge.b, new PolyString(source), new PolyString(target), edge.d, edge.c);} +} + +/** +* We do not parse serialized PolyLists and PolyDocuments ourselves (created with .toJson()). +* Instead we just return it as a string. They can then be parsed using for example PolyDocument.fromJson(str); +*/ +String ListStr() : +{ + StringBuilder sb = new StringBuilder(); + String str; +} +{ + {sb.append("[");} + ( + str = DocumentStr() { sb.append(str); } + | + str = ListStr() { sb.append(str); } + | + str = Number() { sb.append(str); } + | + { sb.append( token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + )* + {sb.append("]");} + {return sb.toString();} +} + +String DocumentStr() : +{ + StringBuilder sb = new StringBuilder(); + String str; +} +{ + {sb.append("{");} + ( + str = DocumentStr() { sb.append(str); } + | + { sb.append( token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + | + { sb.append(token.image); } + | + str = Number() { sb.append(str); } + )* + {sb.append("}");} + {return sb.toString();} +} + +PolyAlgDataType DataType() : +{ + String type; + Token t; + List args = new ArrayList<>(); + boolean nullable = true; + boolean isArray = false; +} +{ + t = {type = t.image;} [ + t = {args.add(Integer.parseInt(t.image));} ( t = {args.add(Integer.parseInt(t.image));})* + ] + [ {isArray = true;}] + [ | {nullable = false;} ] + {return new PolyAlgDataType(type, args, nullable, isArray, getPos());} +} + +String UUID() : +{ + StringBuilder sb = new StringBuilder(); +} +{ + ( {sb.append(token.image);} | {sb.append(token.image);} | {sb.append(token.image);})+ + {sb.append(token.image);} ( {sb.append(token.image);} | {sb.append(token.image);} | {sb.append(token.image);})+ + {sb.append(token.image);} ( {sb.append(token.image);} | {sb.append(token.image);} | {sb.append(token.image);})+ + {sb.append(token.image);} ( {sb.append(token.image);} | {sb.append(token.image);} | {sb.append(token.image);})+ + {sb.append(token.image);} ( {sb.append(token.image);} | {sb.append(token.image);} | {sb.append(token.image);})+ + {return sb.toString();} +} + +String OpName() : +{ + Token t; +} +{ + ( + t = + | + t = + ) + {return t.image;} +} + +String AliasName() : +{ + Token t; + String name; +} +{ + ( + name = OpName() + | + t = {name = t.image.substring(1, t.image.length()-1);} + ) + {return name;} +} + +String ParamName() : +{ + String name; +} +{ + name = OpName() + {return name;} +} + +String UnsignedNumber() : +{ + Token t; + String str; +} +{ + ( + t = + | + t = + | + t = + ) + { return t.image;} +} + +String Number() : +{ + String num; +} +{ + [] num = UnsignedNumber() {return num;} + | + num = UnsignedNumber() {return "-" + num;} +} diff --git a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java index 9aa8f1e040..ac1ee2c973 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AbstractAlgNode.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,9 @@ package org.polypheny.db.algebra; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.io.PrintWriter; @@ -41,6 +44,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import lombok.Getter; @@ -48,10 +52,21 @@ import lombok.experimental.SuperBuilder; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.SetOp; +import org.polypheny.db.algebra.core.common.Transformer; import org.polypheny.db.algebra.externalize.AlgWriterImpl; +import org.polypheny.db.algebra.logical.relational.LogicalRelProject; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.metadata.Metadata; import org.polypheny.db.algebra.metadata.MetadataFactory; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; +import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.Entity; import org.polypheny.db.plan.AlgCluster; @@ -307,8 +322,12 @@ public AlgNode accept( RexShuttle shuttle ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // by default, assume cost is proportional to number of rows - double tupleCount = mq.getTupleCount( this ); - return planner.getCostFactory().makeCost( tupleCount, tupleCount, 0 ); + Optional tupleCount = mq.getTupleCount( this ); + if ( tupleCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + + return planner.getCostFactory().makeCost( tupleCount.get(), tupleCount.get(), 0 ); } @@ -346,6 +365,137 @@ public AlgWriter explainTerms( AlgWriter pw ) { } + @Override + public void buildPolyAlgebra( StringBuilder sb, String prefix ) { + final String INDENT = " "; + String nextPrefix = prefix == null ? null : prefix + INDENT; + PolyAlgDeclaration decl = getPolyAlgDeclaration(); + boolean makeUnique = makeFieldsUnique( decl ); + List inputFieldNames = makeUnique ? + PolyAlgUtils.uniquifiedInputFieldNames( this ) : + PolyAlgUtils.getInputFieldNamesList( this ); + sb.append( prefix == null ? "" : prefix ).append( decl.opName ); + if ( decl.hasParams() ) { + sb.append( bindArguments().toPolyAlgebra( this, inputFieldNames ) ); + } else { + sb.append( "[]" ); + } + + int size = getInputs().size(); + if ( size == 0 ) { + return; // skip parentheses for leaves + } + + sb.append( "(\n" ); + int inputIdx = 0; + for ( AlgNode child : getInputs() ) { + ListArg projections = makeUnique ? + PolyAlgUtils.getAuxProjections( child, inputFieldNames, inputIdx ) : + null; + inputIdx += child.getTupleType().getFieldCount(); + + if ( projections == null ) { + child.buildPolyAlgebra( sb, nextPrefix ); + } else { + if ( nextPrefix != null ) { + sb.append( nextPrefix ); + } + sb.append( PolyAlgRegistry.getDeclaration( LogicalRelProject.class ).opName ) + .append( projections.toPolyAlg( child, child.getTupleType().getFieldNames() ) ) + .append( "(\n" ); + child.buildPolyAlgebra( sb, nextPrefix == null ? null : nextPrefix + INDENT ); + sb.append( ")" ); + } + + size--; + if ( size > 0 ) { + sb.append( ", \n" ); + } + } + sb.append( ")" ); + } + + + @Override + public ObjectNode serializePolyAlgebra( ObjectMapper mapper, GlobalStats gs ) { + ObjectNode node = mapper.createObjectNode(); + + PolyAlgDeclaration decl = getPolyAlgDeclaration(); + boolean makeUnique = makeFieldsUnique( decl ); // set operations like UNION require duplicate field names + List inputFieldNames = makeUnique ? + PolyAlgUtils.uniquifiedInputFieldNames( this ) : + PolyAlgUtils.getInputFieldNamesList( this ); + + node.put( "opName", decl.opName ); + if ( decl.hasParams() ) { + node.set( "arguments", bindArguments().serialize( this, inputFieldNames, mapper ) ); + } else { + node.set( "arguments", mapper.createObjectNode() ); + } + node.set( "metadata", serializeMetadata( mapper, gs ) ); // set to null if gs is null + + ArrayNode inputs = mapper.createArrayNode(); + + int inputIdx = 0; + for ( AlgNode child : getInputs() ) { + ListArg projections = makeUnique ? + PolyAlgUtils.getAuxProjections( child, inputFieldNames, inputIdx ) : + null; + inputIdx += child.getTupleType().getFieldCount(); + + if ( projections == null ) { + inputs.add( child.serializePolyAlgebra( mapper, gs ) ); + } else { + inputs.add( PolyAlgUtils.wrapInRename( child, projections, child, child.getTupleType().getFieldNames(), mapper, gs ) ); + } + } + node.set( "inputs", inputs ); + + return node; + } + + + private ObjectNode serializeMetadata( ObjectMapper mapper, GlobalStats gs ) { + if ( gs == null ) { + return null; + } + PolyAlgMetadata meta = new PolyAlgMetadata( mapper, gs ); + AlgMetadataQuery mq = this.getCluster().getMetadataQuery(); + + mq.getTupleCount( this ).ifPresent( aDouble -> meta.addCosts( mq.getNonCumulativeCost( this ), mq.getCumulativeCost( this ), aDouble ) ); + + return meta.serialize(); + } + + + private boolean makeFieldsUnique( PolyAlgDeclaration decl ) { + // set operations like UNION require duplicate field names + return decl.mightRequireAuxiliaryProject() && + !(this instanceof SetOp) && + !(this instanceof Transformer); + } + + + /** + * If a declaration should be shared by multiple implementations, + * this method must be redefined. + * Otherwise, this implementation should cover most cases. + * + * @return The declaration associated with the runtime class of the instance. + */ + @Override + public PolyAlgDeclaration getPolyAlgDeclaration() { + return PolyAlgRegistry.getDeclaration( getClass(), getModel(), getInputs().size() ); + } + + + @Override + public PolyAlgArgs bindArguments() { + // Any AlgNode registered in the PolyAlgRegistry should probably not use this generic implementation! + return new PolyAlgArgs( getPolyAlgDeclaration() ); + } + + @Override public AlgNode onRegister( AlgPlanner planner ) { List oldInputs = getInputs(); diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgDistributions.java b/core/src/main/java/org/polypheny/db/algebra/AlgDistributions.java index 7e243e89b5..32865e897d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgDistributions.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgDistributions.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,7 @@ import java.util.List; import java.util.Objects; import javax.annotation.Nonnull; +import org.polypheny.db.algebra.AlgDistribution.Type; import org.polypheny.db.plan.AlgMultipleTrait; import org.polypheny.db.plan.AlgPlanner; import org.polypheny.db.plan.AlgTrait; @@ -105,6 +106,19 @@ public static AlgDistribution range( Collection numbers ) { } + public static AlgDistribution getDistribution( Type type, Collection numbers ) { + return switch ( type ) { + case SINGLETON -> AlgDistributions.SINGLETON; + case HASH_DISTRIBUTED -> hash( numbers ); + case RANGE_DISTRIBUTED -> range( numbers ); + case RANDOM_DISTRIBUTED -> AlgDistributions.RANDOM_DISTRIBUTED; + case ROUND_ROBIN_DISTRIBUTED -> AlgDistributions.ROUND_ROBIN_DISTRIBUTED; + case BROADCAST_DISTRIBUTED -> AlgDistributions.BROADCAST_DISTRIBUTED; + case ANY -> AlgDistributions.ANY; + }; + } + + /** * Implementation of {@link AlgDistribution}. */ diff --git a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java index c384aeec25..e5410d553b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/AlgNode.java +++ b/core/src/main/java/org/polypheny/db/algebra/AlgNode.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,8 @@ package org.polypheny.db.algebra; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import java.util.List; import java.util.Objects; import java.util.Set; @@ -45,6 +47,9 @@ import org.polypheny.db.algebra.externalize.AlgWriterImpl; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.metadata.Metadata; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.entity.Entity; import org.polypheny.db.catalog.logistic.DataModel; @@ -238,6 +243,62 @@ public interface AlgNode extends AlgOptNode, Cloneable { */ void explain( AlgWriter pw ); + /** + * Recursively constructs a string representation of the tree rooted at this AlgNode using the provided StringBuilder. + * The basic structure of PolyAlgebra is {@code OPERATOR[attributes](children)}. + * + * @param sb StringBuilder for building the representation + * @param prefix Prefix to be added in front of each operator (resulting in operators having a visual indentation) or null if not desired + */ + void buildPolyAlgebra( StringBuilder sb, String prefix ); + + default void buildPolyAlgebra( StringBuilder sb ) { + buildPolyAlgebra( sb, null ); + } + + default String buildPolyAlgebra( String prefix ) { + StringBuilder sb = new StringBuilder(); + buildPolyAlgebra( sb, prefix ); + return sb.toString(); + } + + /** + * Recursively constructs a JSON object structure that represents the tree rooted at this AlgNode using the provided ObjectMapper. + * + * @param mapper the ObjectMapper used for creating JsonNodes. + * @param gs the GlobalStats object containing the global maximums for stats or null if no metadata should be included + * @return a ObjectNode representing the AlgNode tree rooted at this node. + */ + ObjectNode serializePolyAlgebra( ObjectMapper mapper, GlobalStats gs ); + + /** + * Serialize this node without generating metadata. + * + * @param mapper the ObjectMapper used for creating JsonNodes. + * @return a ObjectNode representing the AlgNode tree rooted at this node. + */ + default ObjectNode serializePolyAlgebra( ObjectMapper mapper ) { + return serializePolyAlgebra( mapper, null ); + } + + /** + * Retrieves the PolyAlgDeclaration for this AlgNode implementation. + * This declaration is read only and can thus be a static object. + * + * @return PolyAlgDeclaration for this AlgNode implementation + */ + PolyAlgDeclaration getPolyAlgDeclaration(); + + /** + * Binds the arguments defining the state of this instance to the corresponding parameters. This is done for the purpose of creating the PolyAlgebra representation. + * The returned PolyAlgArgs contains every {@link org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg} relevant for this AlgNode instance. + * The corresponding {@link org.polypheny.db.algebra.polyalg.PolyAlgDeclaration} must be identical to {@code getPolyAlgDeclaration()}. + * Its parameters can be used to retrieve corresponding argument values. + * + * @return PolyAlgArgs that maps parameters of the declaration to {@link org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg} that wraps the corresponding attribute + */ + PolyAlgArgs bindArguments(); + /** * Receives notification that this expression is about to be registered. The implementation of this method must at least register all child expressions. * diff --git a/core/src/main/java/org/polypheny/db/algebra/SingleAlg.java b/core/src/main/java/org/polypheny/db/algebra/SingleAlg.java index ab781d5149..35d0e6e2a8 100644 --- a/core/src/main/java/org/polypheny/db/algebra/SingleAlg.java +++ b/core/src/main/java/org/polypheny/db/algebra/SingleAlg.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -78,7 +78,7 @@ public List getInputs() { @Override public double estimateTupleCount( AlgMetadataQuery mq ) { // Not necessarily correct, but a better default than AbstractAlgNode's 1.0 - return mq.getTupleCount( input ); + return mq.getTupleCount( input ).orElse( Double.MAX_VALUE ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/convert/ConverterImpl.java b/core/src/main/java/org/polypheny/db/algebra/convert/ConverterImpl.java index 6f8a9f1e94..69ef4b30d2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/convert/ConverterImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/convert/ConverterImpl.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.convert; +import java.util.Optional; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; @@ -70,12 +71,13 @@ protected ConverterImpl( AlgCluster cluster, AlgTraitDef traitDef, AlgTraitSe @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - Double dRows = mq.getTupleCount( getInput() ); - if ( dRows == null ) { - dRows = Double.MAX_VALUE; - } + Optional dRows = mq.getTupleCount( getInput() ); double dIo = 0; - return planner.getCostFactory().makeCost( dRows, dRows, dIo ); + if ( dRows.isEmpty() ) { + return planner.getCostFactory().makeCost( Double.MAX_VALUE, Double.MAX_VALUE, dIo ); + } + + return planner.getCostFactory().makeCost( dRows.get(), dRows.get(), dIo ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Aggregate.java b/core/src/main/java/org/polypheny/db/algebra/core/Aggregate.java index a5204a48d9..5ed75db90d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Aggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Aggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; @@ -50,6 +51,12 @@ import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.logical.relational.LogicalRelAggregate; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.algebra.polyalg.arguments.AggArg; +import org.polypheny.db.algebra.polyalg.arguments.FieldArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.rules.AggregateExpandDistinctAggregatesRule; import org.polypheny.db.algebra.rules.AggregateProjectPullUpConstantsRule; import org.polypheny.db.algebra.rules.AggregateReduceFunctionsRule; @@ -72,6 +79,7 @@ import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.Litmus; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.Util; @@ -274,7 +282,10 @@ public double estimateTupleCount( AlgMetadataQuery mq ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // REVIEW jvs: This is bogus, but no more bogus than what's currently in Join. - double rowCount = mq.getTupleCount( this ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } // Aggregates with more aggregate functions cost a bit more float multiplier = 1f + (float) aggCalls.size() * 0.125f; for ( AggregateCall aggCall : aggCalls ) { @@ -283,7 +294,7 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { multiplier += 0.0125f; } } - return planner.getCostFactory().makeCost( rowCount * multiplier, 0, 0 ); + return planner.getCostFactory().makeCost( rowCount.get() * multiplier, 0, 0 ); } @@ -374,6 +385,11 @@ private boolean typeMatchesInferred( final AggregateCall aggCall, final Litmus l AggCallBinding callBinding = aggCall.createBinding( this ); AlgDataType type = aggFunction.inferReturnType( callBinding ); AlgDataType expectedType = aggCall.type; + if ( type.isNullable() != expectedType.isNullable() ) { + // During PolyAlgebra parsing, the type might become non-nullable. We do not want to throw an error in this case. + AlgDataTypeFactory factory = AlgDataTypeFactory.DEFAULT; + expectedType = factory.createTypeWithNullability( expectedType, type.isNullable() ); + } return AlgOptUtil.eq( "aggCall type", expectedType, "inferred type", type, litmus ); } @@ -414,6 +430,43 @@ public String algCompareString() { } + protected static Triple, List> extractArgs( PolyAlgArgs args ) { + ListArg group = args.getListArg( "group", FieldArg.class ); + ListArg aggs = args.getListArg( "aggs", AggArg.class ); + List> groups = PolyAlgUtils.getNestedListArgAsList( args.getListArg( "groups", ListArg.class ) ); + List groupSets = groups.stream().map( + g -> ImmutableBitSet.of( + g.stream().map( FieldArg::getField ).toList() + ) + ).toList(); + if ( groupSets.isEmpty() ) { + groupSets = null; + } + return Triple.of( ImmutableBitSet.of( group.map( FieldArg::getField ) ), groupSets, aggs.map( AggArg::getAgg ) ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + PolyAlgArg groupArg = new ListArg<>( groupSet.asList(), FieldArg::new, args.getDecl().canUnpackValues() ); + PolyAlgArg aggsArg = new ListArg<>( aggCalls, AggArg::new ); + + args.put( "group", groupArg ); + args.put( "aggs", aggsArg ); + if ( getGroupType() != Group.SIMPLE ) { + PolyAlgArg groupSetArg = new ListArg<>( + groupSets, + set -> new ListArg<>( set.asList(), FieldArg::new ) ); + + args.put( "groups", groupSetArg ); + } + + return args; + } + + /** * What kind of roll-up is it? */ diff --git a/core/src/main/java/org/polypheny/db/algebra/core/AggregateCall.java b/core/src/main/java/org/polypheny/db/algebra/core/AggregateCall.java index add0896e10..3171a5deae 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/AggregateCall.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/AggregateCall.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -215,6 +215,18 @@ public AggregateCall rename( String name ) { public String toString() { + return toString( null ); + } + + + /** + * If fieldNames is null, args are serialized to "$" + arg. + * Otherwise, the value in fieldNames at index arg is used. + * + * @param fieldNames list containing the field names referenced by argList. + * @return string representation of this instance. + */ + public String toString( List fieldNames ) { StringBuilder buf = new StringBuilder( aggFunction.toString() ); buf.append( "(" ); if ( distinct ) { @@ -225,8 +237,12 @@ public String toString() { if ( ++i > 0 ) { buf.append( ", " ); } - buf.append( "$" ); - buf.append( arg ); + if ( fieldNames == null ) { + buf.append( "$" ); + buf.append( arg ); + } else { + buf.append( fieldNames.get( arg ) ); + } } buf.append( ")" ); if ( !collation.equals( AlgCollations.EMPTY ) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Calc.java b/core/src/main/java/org/polypheny/db/algebra/core/Calc.java index b24a360bf2..ce47051074 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Calc.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Calc.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,14 +42,20 @@ import org.polypheny.db.algebra.logical.relational.LogicalCalc; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgPlanner; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLocalRef; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.rex.RexProgramBuilder; import org.polypheny.db.rex.RexShuttle; import org.polypheny.db.util.Litmus; @@ -120,8 +126,8 @@ public double estimateTupleCount( AlgMetadataQuery mq ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double dRows = mq.getTupleCount( this ); - double dCpu = mq.getTupleCount( getInput() ) * program.getExprCount(); + double dRows = mq.getTupleCount( this ).orElse( Double.MAX_VALUE ); + double dCpu = mq.getTupleCount( getInput() ).orElse( Double.MAX_VALUE ) * program.getExprCount(); double dIo = 0; return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); } @@ -164,4 +170,32 @@ public String algCompareString() { (program != null ? program.toString() : "") + "&"; } + + public static RexProgram getProgramFromArgs( PolyAlgArgs args, AlgNode input, RexBuilder b ) { + List exprs = args.getListArg( "exprs", RexArg.class ).map( RexArg::getNode ); + List projectsArg = args.getListArg( "projects", RexArg.class ).getArgs(); + RexNode condition = args.getArg( "condition", RexArg.class ).getNode(); + + RexProgramBuilder builder = new RexProgramBuilder( input.getTupleType(), b ); + exprs.forEach( builder::registerInput ); + projectsArg.forEach( p -> builder.addProject( p.getNode(), p.getAlias() ) ); + if ( condition != null ) { + builder.addCondition( condition ); + } + return builder.getProgram( false ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + PolyAlgArg exprs = new ListArg<>( program.getExprList(), RexArg::new, args.getDecl().canUnpackValues() ); + PolyAlgArg projects = new ListArg<>( program.getProjectList(), RexArg::new, program.getOutputRowType().getFieldNames(), args.getDecl().canUnpackValues() ); + + args.put( "exprs", exprs ) + .put( "projects", projects ) + .put( "condition", new RexArg( program.getCondition() ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Correlate.java b/core/src/main/java/org/polypheny/db/algebra/core/Correlate.java index 6710ad4081..067f646cd0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Correlate.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Correlate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ import com.google.common.collect.ImmutableSet; import java.util.List; +import java.util.Optional; import lombok.Getter; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; @@ -158,7 +159,10 @@ public ImmutableSet getVariablesSet() { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double rowCount = mq.getTupleCount( this ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } final double rightRowCount = right.estimateTupleCount( mq ); final double leftRowCount = left.estimateTupleCount( mq ); @@ -166,13 +170,16 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { return planner.getCostFactory().makeInfiniteCost(); } - Double restartCount = mq.getTupleCount( getLeft() ); + Optional restartCount = mq.getTupleCount( getLeft() ); + if ( restartCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } // RelMetadataQuery.getCumulativeCost(getRight()); does not work for // RelSubset, so we ask planner to cost-estimate right relation AlgOptCost rightCost = planner.getCost( getRight(), mq ); - AlgOptCost rescanCost = rightCost.multiplyBy( Math.max( 1.0, restartCount - 1 ) ); + AlgOptCost rescanCost = rightCost.multiplyBy( Math.max( 1.0, restartCount.get() - 1 ) ); - return planner.getCostFactory().makeCost( rowCount /* generate results */ + leftRowCount /* relScan left results */, 0, 0 ).plus( rescanCost ); + return planner.getCostFactory().makeCost( rowCount.get() /* generate results */ + leftRowCount /* relScan left results */, 0, 0 ).plus( rescanCost ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Exchange.java b/core/src/main/java/org/polypheny/db/algebra/core/Exchange.java index 0e00d85106..eef05c9e6a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Exchange.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Exchange.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.Collectors; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgDistributions; @@ -97,9 +98,12 @@ public AlgDistribution getDistribution() { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // Higher cost if rows are wider discourages pushing a project through an exchange. - double rowCount = mq.getTupleCount( this ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } double bytesPerRow = getTupleType().getFieldCount() * 4; - return planner.getCostFactory().makeCost( Util.nLogN( rowCount ) * bytesPerRow, rowCount, 0 ); + return planner.getCostFactory().makeCost( Util.nLogN( rowCount.get() ) * bytesPerRow, rowCount.get(), 0 ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Filter.java b/core/src/main/java/org/polypheny/db/algebra/core/Filter.java index a7ab6d639e..63b55b1f34 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Filter.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Filter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ import com.google.common.collect.ImmutableList; import java.util.List; +import java.util.Optional; import lombok.Getter; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; @@ -43,6 +44,8 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelFilter; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; @@ -128,10 +131,13 @@ public boolean isValid( Litmus litmus, Context context ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double dRows = mq.getTupleCount( this ); - double dCpu = mq.getTupleCount( getInput() ); + Optional dRows = mq.getTupleCount( this ); + Optional dCpu = mq.getTupleCount( getInput() ); double dIo = 0; - return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); + if ( dRows.isEmpty() || dCpu.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + return planner.getCostFactory().makeCost( dRows.get(), dCpu.get(), dIo ); } @@ -154,4 +160,11 @@ public String algCompareString() { (condition != null ? condition.hashCode() : "") + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( 0, new RexArg( condition ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Intersect.java b/core/src/main/java/org/polypheny/db/algebra/core/Intersect.java index f791227b82..350a350187 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Intersect.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Intersect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -63,7 +65,7 @@ public double estimateTupleCount( AlgMetadataQuery mq ) { // REVIEW jvs: I just pulled this out of a hat. double dRows = Double.MAX_VALUE; for ( AlgNode input : inputs ) { - dRows = Math.min( dRows, mq.getTupleCount( input ) ); + dRows = Math.min( dRows, mq.getTupleCount( input ).orElse( Double.MAX_VALUE ) ); } dRows *= 0.25; return dRows; @@ -77,4 +79,11 @@ public String algCompareString() { all + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "all", new BooleanArg( all ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Join.java b/core/src/main/java/org/polypheny/db/algebra/core/Join.java index fe65121e41..a20a7f3401 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Join.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Join.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,8 +36,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import lombok.Getter; import org.polypheny.db.algebra.AlgNode; @@ -45,6 +47,12 @@ import org.polypheny.db.algebra.BiAlg; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.CorrelationArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.rules.JoinAddRedundantSemiJoinRule; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; @@ -56,6 +64,7 @@ import org.polypheny.db.rex.RexShuttle; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Litmus; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.Util; import org.polypheny.db.util.ValidatorUtil; @@ -154,8 +163,11 @@ public boolean isValid( Litmus litmus, Context context ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // REVIEW jvs: Just for now... - double rowCount = mq.getTupleCount( this ); - return planner.getCostFactory().makeCost( rowCount, 0, 0 ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + return planner.getCostFactory().makeCost( rowCount.get(), 0, 0 ); } @@ -247,4 +259,23 @@ public String algCompareString() { } + protected static Triple, JoinAlgType> extractArgs( PolyAlgArgs args ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + EnumArg type = args.getEnumArg( "type", JoinAlgType.class ); + List variables = args.getListArg( "variables", CorrelationArg.class ).map( CorrelationArg::getCorrId ); + return Triple.of( condition.getNode(), new HashSet<>( variables ), type.getArg() ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + args.put( 0, new RexArg( condition ) ) + .put( "type", new EnumArg<>( joinType, ParamType.JOIN_TYPE_ENUM ) ) + .put( "variables", new ListArg<>( variablesSet.asList(), CorrelationArg::new ) ); + return args; + } + + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Minus.java b/core/src/main/java/org/polypheny/db/algebra/core/Minus.java index 9b7ef85c34..edf74bfb06 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Minus.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Minus.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -80,4 +82,11 @@ public String algCompareString() { all + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "all", new BooleanArg( all ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/ModifyCollect.java b/core/src/main/java/org/polypheny/db/algebra/core/ModifyCollect.java index 8b92aafdd7..c5b657556b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/ModifyCollect.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/ModifyCollect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -49,5 +51,12 @@ public String algCompareString() { all + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "all", new BooleanArg( all ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Project.java b/core/src/main/java/org/polypheny/db/algebra/core/Project.java index bb88a0f6e4..42bda19cb2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Project.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Project.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,6 +38,7 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.apache.calcite.linq4j.Ord; @@ -46,6 +47,10 @@ import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.relational.LogicalRelProject; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgCluster; @@ -176,10 +181,13 @@ public boolean isValid( Litmus litmus, Context context ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double dRows = mq.getTupleCount( getInput() ); - double dCpu = dRows * exps.size(); + Optional dRows = mq.getTupleCount( getInput() ); + if ( dRows.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + double dCpu = dRows.get() * exps.size(); double dIo = 0; - return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); + return planner.getCostFactory().makeCost( dRows.get(), dCpu, dIo ); } @@ -322,5 +330,15 @@ public String algCompareString() { rowType.toString() + types + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + PolyAlgArg projectsArg = new ListArg<>( exps, RexArg::new, rowType.getFieldNames(), args.getDecl().canUnpackValues() ); + + args.put( 0, projectsArg ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/RelTableFunctionScan.java b/core/src/main/java/org/polypheny/db/algebra/core/RelTableFunctionScan.java index 3043575a1e..39f33500be 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/RelTableFunctionScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/RelTableFunctionScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,7 @@ import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.apache.calcite.linq4j.Ord; @@ -58,7 +59,7 @@ /** * Relational expression that calls a table-valued function. - * + *

* The function returns a result set. * It can appear as a leaf in a query tree, or can be applied to relational inputs. * @@ -151,23 +152,16 @@ public double estimateTupleCount( AlgMetadataQuery mq ) { // Calculate result as the sum of the input row count estimates, assuming there are any, otherwise use the superclass default. So for a no-input UDX, behave like an AbstractAlgNode; // for a one-input UDX, behave like a SingleRel; for a multi-input UDX, behave like UNION ALL. // TODO jvs 10-Sep-2007: UDX-supplied costing metadata. - if ( inputs.size() == 0 ) { + if ( inputs.isEmpty() ) { return super.estimateTupleCount( mq ); } - double nRows = 0.0; - for ( AlgNode input : inputs ) { - Double d = mq.getTupleCount( input ); - if ( d != null ) { - nRows += d; - } - } - return nRows; + return inputs.stream().map( mq::getTupleCount ).filter( Optional::isPresent ).mapToDouble( Optional::get ).sum(); // todo maybe only use the sum if all are not infinite } /** * Returns function invocation expression. - * + *

* Within this rexCall, instances of {@link RexIndexRef} refer to entire input {@link AlgNode}s rather than their fields. * * @return function invocation expression diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Sort.java b/core/src/main/java/org/polypheny/db/algebra/core/Sort.java index c1a017fa83..a7c246aee6 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Sort.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Sort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,16 +37,23 @@ import com.google.common.collect.ImmutableList; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.Collectors; import lombok.Getter; import org.apache.calcite.linq4j.Ord; import org.jetbrains.annotations.Nullable; import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; @@ -55,6 +62,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexShuttle; import org.polypheny.db.schema.trait.ModelTraitDef; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.Util; @@ -137,10 +145,13 @@ public final Sort copy( AlgTraitSet traitSet, List inputs ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // Higher cost if rows are wider discourages pushing a project through a sort. - final double rowCount = mq.getTupleCount( this ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } final double bytesPerRow = getTupleType().getFieldCount() * 4; - final double cpu = Util.nLogN( rowCount ) * bytesPerRow; - return planner.getCostFactory().makeCost( rowCount, cpu, 0 ); + final double cpu = Util.nLogN( rowCount.get() ) * bytesPerRow; + return planner.getCostFactory().makeCost( rowCount.get(), cpu, 0 ); } @@ -193,5 +204,29 @@ public String algCompareString() { (fetch != null ? fetch.toString() : "") + "&"; } + + protected static Triple extractArgs( PolyAlgArgs args ) { + ListArg collations = args.getListArg( "order", CollationArg.class ); + RexArg limit = args.getArg( "limit", RexArg.class ); + RexArg offset = args.getArg( "offset", RexArg.class ); + return Triple.of( AlgCollations.of( collations.map( CollationArg::getColl ) ), offset.getNode(), limit.getNode() ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + PolyAlgArg collArg = new ListArg<>( + collation.getFieldCollations(), + CollationArg::new, + args.getDecl().canUnpackValues() ); + + args.put( "order", collArg ) + .put( "limit", new RexArg( fetch ) ) + .put( "offset", new RexArg( offset ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Union.java b/core/src/main/java/org/polypheny/db/algebra/core/Union.java index 0435328f5a..6e29aac557 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Union.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Union.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -82,5 +84,12 @@ public String algCompareString() { all + "&"; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "all", new BooleanArg( all ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Values.java b/core/src/main/java/org/polypheny/db/algebra/core/Values.java index 5c33128126..a1f4642220 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Values.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Values.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,8 +35,10 @@ import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Collectors; import lombok.Getter; @@ -45,6 +47,11 @@ import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgCluster; @@ -54,8 +61,10 @@ import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexDigestIncludeType; import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexUtil; import org.polypheny.db.type.PolyTypeUtil; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.ValidatorUtil; /** @@ -136,12 +145,15 @@ protected AlgDataType deriveRowType() { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double dRows = mq.getTupleCount( this ); + Optional dRows = mq.getTupleCount( this ); + if ( dRows.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } // Assume CPU is negligible since values are precomputed. double dCpu = 1; double dIo = 0; - return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); + return planner.getCostFactory().makeCost( dRows.get(), dCpu, dIo ); } @@ -188,5 +200,32 @@ public void childrenAccept( AlgVisitor visitor ) { // empty on purpose } + + protected static Pair>> extractArgs( PolyAlgArgs args, AlgCluster cluster ) { + List names = args.getListArg( "names", StringArg.class ).map( StringArg::getArg ); + List> tuples = PolyAlgUtils.getNestedListArgAsList( + args.getListArg( "tuples", ListArg.class ), + r -> (RexLiteral) ((RexArg) r).getNode() ); + + AlgDataType rowType = RexUtil.createStructType( cluster.getTypeFactory(), tuples.get( 0 ), names, ValidatorUtil.F_SUGGESTER ); + return Pair.of( rowType, PolyAlgUtils.toImmutableNestedList( tuples ) ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + args.put( "names", new ListArg<>( rowType.getFieldNames(), StringArg::new ) ); + + List> tuplesArg = new ArrayList<>(); + for ( ImmutableList tuple : getTuples() ) { + tuplesArg.add( new ListArg<>( tuple, RexArg::new ) ); + } + args.put( "tuples", new ListArg<>( tuplesArg ) ); + + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/Window.java b/core/src/main/java/org/polypheny/db/algebra/core/Window.java index d5455128aa..e79ebbc51f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/Window.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/Window.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -38,6 +38,7 @@ import java.util.AbstractList; import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.stream.Collectors; import org.apache.calcite.linq4j.Ord; import org.polypheny.db.algebra.AlgCollation; @@ -207,12 +208,15 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // // TODO #1. Add memory cost. // TODO #2. MIN and MAX have higher CPU cost than SUM and COUNT. - final double rowsIn = mq.getTupleCount( getInput() ); + Optional rowsIn = mq.getTupleCount( getInput() ); + if ( rowsIn.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } int count = groups.size(); for ( Group group : groups ) { count += group.aggCalls.size(); } - return planner.getCostFactory().makeCost( rowsIn, rowsIn * count, 0 ); + return planner.getCostFactory().makeCost( rowsIn.get(), rowsIn.get() * count, 0 ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java b/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java index 58315a9644..abc6003c6c 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/common/Transformer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,11 +28,21 @@ import org.polypheny.db.algebra.core.lpg.LpgScan; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.algebra.type.GraphType; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.trait.ModelTrait; import org.polypheny.db.schema.trait.ModelTraitDef; +import org.polypheny.db.util.Quadruple; @Getter @@ -62,7 +72,7 @@ public Transformer( AlgCluster cluster, List inputs, @Nullable List( inputs ); @@ -102,4 +112,36 @@ public String algCompareString() { + inputs.stream().map( AlgNode::algCompareString ).collect( Collectors.joining( "$" ) ) + "&"; } + + protected static Quadruple, ModelTrait, ModelTrait, AlgDataType> extractArgs( PolyAlgArgs args, List inputs ) { + List names = args.getListArg( "names", StringArg.class ).map( StringArg::getArg ); + ModelTrait outModelTrait = args.getEnumArg( "out", DataModel.class ).getArg().getModelTrait(); + + ModelTrait inModelTrait = inputs.get( 0 ).getTraitSet().getTrait( ModelTraitDef.INSTANCE ); + if ( inModelTrait == null ) { + inModelTrait = ModelTrait.RELATIONAL; + } + AlgDataType type = switch ( outModelTrait.dataModel() ) { + case DOCUMENT -> DocumentType.ofId(); + case GRAPH -> GraphType.of(); + case RELATIONAL -> switch ( inModelTrait.dataModel() ) { + case DOCUMENT -> DocumentType.ofCrossRelational(); + case GRAPH -> GraphType.ofRelational(); + case RELATIONAL -> throw new GenericRuntimeException( "Cannot transform from RELATIONAL to RELATIONAL." ); + }; + }; + return Quadruple.of( names, inModelTrait, outModelTrait, type ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + args.put( "out", new EnumArg<>( outModelTrait.dataModel(), ParamType.DATAMODEL_ENUM ) ); + if ( names != null ) { + args.put( "names", new ListArg<>( names, StringArg::new ) ); + } + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAggregate.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAggregate.java index 29af2eed80..303f5a5fd5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgAggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ import org.polypheny.db.algebra.core.LaxAggregateCall; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -80,4 +81,18 @@ public NodeType getNodeType() { return NodeType.AGGREGATE; } + + public static AlgDataType deriveTupleType( AlgCluster cluster, AlgDataType inputType, List groups, List aggCalls ) { + final AlgDataTypeFactory.Builder builder = cluster.getTypeFactory().builder(); + for ( LaxAggregateCall aggCall : aggCalls ) { + builder.add( aggCall.name, null, aggCall.getType( cluster ) ); + } + List fields = inputType.getFields(); + for ( RexNameRef group : groups ) { + int idx = group.getIndex().orElseThrow(); + builder.add( fields.get( idx ) ); + } + return builder.build(); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgMatch.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgMatch.java index 191d090c44..131638b695 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgMatch.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgMatch.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; @@ -92,4 +96,17 @@ public AlgNode accept( RexShuttle shuttle ) { return new LogicalLpgMatch( getCluster(), traitSet, input, exps.stream().map( e -> (RexCall) e ).toList(), names ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + PolyAlgArg matchesArg = new ListArg<>( + matches, RexArg::new, + names.stream().map( PolyString::toString ).toList(), + args.getDecl().canUnpackValues() ); + + args.put( "matches", matchesArg ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgValues.java b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgValues.java index 6dd21f4b4f..9df34c06ef 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/lpg/LpgValues.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,15 +17,32 @@ package org.polypheny.db.algebra.core.lpg; import com.google.common.collect.ImmutableList; +import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import lombok.Getter; import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.graph.PolyDictionary; import org.polypheny.db.type.entity.graph.PolyEdge; import org.polypheny.db.type.entity.graph.PolyNode; +import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; @Getter @@ -63,4 +80,87 @@ public String algCompareString() { + values.hashCode() + "&"; } + + public static Triple, Collection, ImmutableList>> extractArgs( PolyAlgArgs args, AlgCluster cluster ) { + List nodes = args.getListArg( "nodes", RexArg.class ).map( n -> ((RexLiteral) n.getNode()).value.asNode() ); + List paths = args.getListArg( "edges", RexArg.class ).map( n -> ((RexLiteral) n.getNode()).value.asPath() ); + + Map nodeLookup = new HashMap<>(); // map node names to their ids + for ( PolyNode n : nodes ) { + nodeLookup.put( n.variableName, n.id ); + } + List edges = new ArrayList<>(); + for ( PolyPath p : paths ) { + if ( p.getEdges().size() != 1 || p.getNodes().size() != 2 ) { + throw new GenericRuntimeException( "Only one edge per entry is allowed." ); + } + PolyEdge e = p.getEdges().get( 0 ); + PolyNode fakeSource = p.getNodes().get( 0 ); + PolyNode fakeTarget = p.getNodes().get( 1 ); + edges.add( new PolyEdge( e.properties, e.labels, + nodeLookup.get( fakeSource.variableName ), + nodeLookup.get( fakeTarget.variableName ), + e.direction, e.variableName ) ); + } + + List> values = PolyAlgUtils.getNestedListArgAsList( + args.getListArg( "values", ListArg.class ), + r -> (RexLiteral) ((RexArg) r).getNode() ); + return Triple.of( nodes, edges, PolyAlgUtils.toImmutableNestedList( values ) ); + } + + + public static AlgDataType deriveTupleType( AlgCluster cluster, Collection nodes, Collection edges, ImmutableList> values ) { + AlgDataTypeFactory.Builder builder = cluster.getTypeFactory().builder(); + AlgDataType nodeType = cluster.getTypeFactory().createPolyType( PolyType.NODE ); + for ( PolyNode node : nodes ) { + String name = node.variableName == null ? "null" : node.variableName.value; + builder.add( name, null, nodeType ); + } + return builder.build(); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + // we wrap nodes and edges in RexLiterals which allows us to use RexArgs => we can leave parsing of nodes to the PolyAlgParser + RexBuilder b = getCluster().getRexBuilder(); + + Map nodeLookup = new HashMap<>(); + for ( PolyNode n : nodes ) { + nodeLookup.put( n.id, n ); + } + + List paths = new ArrayList<>(); + for ( PolyEdge e : edges ) { + PolyNode source = nodeLookup.get( e.left ); + PolyNode target = nodeLookup.get( e.right ); + + PolyNode fakeSource = source == null ? + new PolyNode( e.left, new PolyDictionary(), List.of(), e.left ) : + new PolyNode( source.id, new PolyDictionary(), List.of(), source.variableName ); + PolyNode fakeTarget = target == null ? + new PolyNode( e.left, new PolyDictionary(), List.of(), null ) : + new PolyNode( target.id, new PolyDictionary(), List.of(), target.variableName ); + + paths.add( PolyPath.create( + List.of( Pair.of( fakeSource.variableName, fakeSource ), Pair.of( fakeTarget.variableName, fakeTarget ) ), + List.of( Pair.of( e.variableName, e ) ) ) + ); + } + + args.put( "nodes", new ListArg<>( nodes, n -> new RexArg( b.makeLiteral( n ) ) ) ); + args.put( "edges", new ListArg<>( paths, p -> new RexArg( b.makeLiteral( p ) ) ) ); + + List> valuesArg = new ArrayList<>(); + for ( ImmutableList val : values ) { + valuesArg.add( new ListArg<>( val, RexArg::new ) ); + } + args.put( "values", new ListArg<>( valuesArg ) ); + + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java index 72dc53cd38..13ddc04d83 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -27,9 +27,19 @@ import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptUtil; @@ -38,6 +48,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.trait.ModelTrait; import org.polypheny.db.type.PolyTypeUtil; +import org.polypheny.db.util.Quadruple; /** @@ -176,8 +187,7 @@ public AlgWriter explainTerms( AlgWriter pw ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // REVIEW jvs: Just for now... - double rowCount = mq.getTupleCount( this ); - return planner.getCostFactory().makeCost( rowCount, 0, 0 ); + return mq.getTupleCount( this ).map( count -> planner.getCostFactory().makeCost( count, 0, 0 ) ).orElse( planner.getCostFactory().makeInfiniteCost() ); } @@ -193,5 +203,34 @@ public String algCompareString() { } + protected static Quadruple, List, Boolean> extractArgs( PolyAlgArgs args ) { + EnumArg op = args.getEnumArg( "operation", Operation.class ); + List updateColumns = args.getListArg( "targets", StringArg.class ).map( StringArg::getArg ); + List sourceExpressions = args.getListArg( "sources", RexArg.class ).map( RexArg::getNode ); + BooleanArg flattened = args.getArg( "flattened", BooleanArg.class ); + + updateColumns = updateColumns.isEmpty() ? null : updateColumns; + sourceExpressions = sourceExpressions.isEmpty() ? null : sourceExpressions; + return Quadruple.of( op.getArg(), updateColumns, sourceExpressions, flattened.toBool() ); + } + + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + if ( getUpdateColumns() != null ) { + args.put( "targets", new ListArg<>( getUpdateColumns(), StringArg::new ) ); + } + if ( getSourceExpressions() != null ) { + args.put( "sources", new ListArg<>( getSourceExpressions(), RexArg::new ) ); + } + + return args.put( "table", new EntityArg( entity, Catalog.snapshot(), DataModel.RELATIONAL ) ) + .put( "operation", new EnumArg<>( getOperation(), ParamType.MODIFY_OP_ENUM ) ) + .put( "flattened", new BooleanArg( isFlattened() ) ); + } + + } diff --git a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java index 870d61ce85..5786972dfc 100644 --- a/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/core/relational/RelScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,9 +29,12 @@ import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.core.common.Scan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; @@ -163,4 +166,12 @@ public boolean isCrossModel() { return entity.dataModel != DataModel.RELATIONAL; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "entity", new EntityArg( entity, Catalog.snapshot(), DataModel.RELATIONAL ) ); + } + + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableAggregate.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableAggregate.java index 037d8e84dc..20bf9d233d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableAggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableAggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -58,6 +58,7 @@ import org.polypheny.db.algebra.enumerable.impl.AggAddContextImpl; import org.polypheny.db.algebra.enumerable.impl.AggResultContextImpl; import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.config.RuntimeConfig; @@ -69,6 +70,7 @@ import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.Util; @@ -95,6 +97,16 @@ public EnumerableAggregate( AlgCluster cluster, AlgTraitSet traitSet, AlgNode ch } + public static EnumerableAggregate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, List> extracted = extractArgs( args ); + try { + return new EnumerableAggregate( cluster, cluster.traitSet(), children.get( 0 ), false, extracted.left, extracted.middle, extracted.right ); + } catch ( InvalidAlgException e ) { + throw new RuntimeException( e ); + } + } + + @Override public EnumerableAggregate copy( AlgTraitSet traitSet, AlgNode input, boolean indicator, ImmutableBitSet groupSet, List groupSets, List aggCalls ) { try { diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableCalc.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableCalc.java index 9ef0e4a9d6..f5ab57e3d2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableCalc.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableCalc.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -57,6 +57,7 @@ import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdDistribution; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptPredicateList; @@ -88,6 +89,12 @@ public EnumerableCalc( AlgCluster cluster, AlgTraitSet traitSet, AlgNode input, } + public static EnumerableCalc create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexProgram p = getProgramFromArgs( args, children.get( 0 ), cluster.getRexBuilder() ); + return new EnumerableCalc( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), p ); + } + + @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { return super.computeSelfCost( planner, mq ).multiplyBy( 10 ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableInterpreter.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableInterpreter.java index 293bb79368..a662bc1d02 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableInterpreter.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableInterpreter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,6 +42,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.DoubleArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.interpreter.Interpreter; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; @@ -89,6 +91,11 @@ public static EnumerableInterpreter create( AlgNode input, double factor ) { } + public static EnumerableInterpreter create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children.get( 0 ), args.getArg( "factor", DoubleArg.class ).getArg() ); + } + + @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { return super.computeSelfCost( planner, mq ).multiplyBy( factor ); @@ -123,5 +130,13 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { return implementor.result( physType, builder.toBlock() ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + args.put( "factor", new DoubleArg( factor ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableIntersect.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableIntersect.java index 1bba0edb0a..c0800793ed 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableIntersect.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableIntersect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Intersect; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -57,6 +59,11 @@ public EnumerableIntersect( AlgCluster cluster, AlgTraitSet traitSet, List children, AlgCluster cluster ) { + return new EnumerableIntersect( cluster, cluster.traitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public EnumerableIntersect copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new EnumerableIntersect( getCluster(), traitSet, inputs, all ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableJoin.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableJoin.java index d6911d088e..2c7464886a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableJoin.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableJoin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,7 +35,9 @@ import com.google.common.collect.ImmutableList; +import java.util.List; import java.util.Objects; +import java.util.Optional; import java.util.Set; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; @@ -50,6 +52,9 @@ import org.polypheny.db.algebra.core.JoinInfo; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgPlanner; @@ -57,6 +62,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.trait.ModelTrait; import org.polypheny.db.util.BuiltInMethod; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.Util; @@ -88,6 +94,18 @@ public static EnumerableJoin create( AlgNode left, AlgNode right, RexNode condit } + public static EnumerableJoin create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, JoinAlgType> extracted = extractArgs( args ); + ImmutableList leftKeys = ImmutableList.copyOf( args.getListArg( "leftKeys", IntArg.class ).map( IntArg::getArg ) ); + ImmutableList rightKeys = ImmutableList.copyOf( args.getListArg( "rightKeys", IntArg.class ).map( IntArg::getArg ) ); + try { + return create( children.get( 0 ), children.get( 1 ), extracted.left, leftKeys, rightKeys, extracted.middle, extracted.right ); + } catch ( InvalidAlgException e ) { + throw new RuntimeException( e ); + } + } + + @Override public EnumerableJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode left, AlgNode right, JoinAlgType joinType, boolean semiJoinDone ) { final JoinInfo joinInfo = JoinInfo.of( left, right, condition ); @@ -103,8 +121,11 @@ public EnumerableJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode lef @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double rowCount = mq.getTupleCount( this ); - + Optional count = mq.getTupleCount( this ); + if ( count.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + double rowCount = count.orElse( null ); // Joins can be flipped, and for many algorithms, both versions are viable and have the same cost. // To make the results stable between versions of the planner, make one of the versions slightly more expensive. if ( Objects.requireNonNull( joinType ) == JoinAlgType.RIGHT ) { @@ -182,5 +203,13 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { .toBlock() ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = super.bindArguments(); + return args.put( "leftKeys", new ListArg<>( leftKeys, IntArg::new ) ) + .put( "rightKeys", new ListArg<>( rightKeys, IntArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableLimit.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableLimit.java index ac5bfe0d2b..3e12487b64 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableLimit.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableLimit.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,8 @@ import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdDistribution; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexDynamicParam; @@ -92,6 +94,13 @@ public static EnumerableLimit create( final AlgNode input, RexNode offset, RexNo } + public static EnumerableLimit create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg limit = args.getArg( "limit", RexArg.class ); + RexArg offset = args.getArg( "offset", RexArg.class ); + return create( children.get( 0 ), offset.getNode(), limit.getNode() ); + } + + @Override public EnumerableLimit copy( AlgTraitSet traitSet, List newInputs ) { return new EnumerableLimit( getCluster(), traitSet, sole( newInputs ), offset, fetch ); @@ -149,5 +158,14 @@ private static Expression getExpression( RexNode offset ) { } } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + return args.put( "limit", new RexArg( fetch ) ) + .put( "offset", new RexArg( offset ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMergeJoin.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMergeJoin.java index 863c4b44be..7c17690b88 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMergeJoin.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMergeJoin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,10 +35,10 @@ import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.Set; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; @@ -58,15 +58,18 @@ import org.polypheny.db.algebra.core.JoinInfo; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgPlanner; import org.polypheny.db.plan.AlgTraitSet; -import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; /** @@ -81,7 +84,7 @@ public class EnumerableMergeJoin extends EquiJoin implements EnumerableAlg { } - public static EnumerableMergeJoin create( AlgNode left, AlgNode right, RexLiteral condition, ImmutableList leftKeys, ImmutableList rightKeys, JoinAlgType joinType ) throws InvalidAlgException { + public static EnumerableMergeJoin create( AlgNode left, AlgNode right, RexNode condition, ImmutableList leftKeys, ImmutableList rightKeys, Set variablesSet, JoinAlgType joinType ) throws InvalidAlgException { final AlgCluster cluster = right.getCluster(); AlgTraitSet traitSet = cluster.traitSet(); if ( traitSet.isEnabled( AlgCollationTraitDef.INSTANCE ) ) { @@ -89,7 +92,19 @@ public static EnumerableMergeJoin create( AlgNode left, AlgNode right, RexLitera final List collations = AlgMdCollation.mergeJoin( mq, left, right, leftKeys, rightKeys ); traitSet = traitSet.replace( collations ); } - return new EnumerableMergeJoin( cluster, traitSet, left, right, condition, leftKeys, rightKeys, ImmutableSet.of(), joinType ); + return new EnumerableMergeJoin( cluster, traitSet, left, right, condition, leftKeys, rightKeys, variablesSet, joinType ); + } + + + public static EnumerableMergeJoin create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, JoinAlgType> extracted = extractArgs( args ); + ImmutableList leftKeys = ImmutableList.copyOf( args.getListArg( "leftKeys", IntArg.class ).map( IntArg::getArg ) ); + ImmutableList rightKeys = ImmutableList.copyOf( args.getListArg( "rightKeys", IntArg.class ).map( IntArg::getArg ) ); + try { + return create( children.get( 0 ), children.get( 1 ), extracted.left, leftKeys, rightKeys, extracted.middle, extracted.right ); + } catch ( InvalidAlgException e ) { + throw new RuntimeException( e ); + } } @@ -111,8 +126,11 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // We assume that the inputs are sorted. The price of sorting them has already been paid. The cost of the join is therefore proportional to the input and output size. final double rightRowCount = right.estimateTupleCount( mq ); final double leftRowCount = left.estimateTupleCount( mq ); - final double rowCount = mq.getTupleCount( this ); - final double d = leftRowCount + rightRowCount + rowCount; + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + final double d = leftRowCount + rightRowCount + rowCount.get(); return planner.getCostFactory().makeCost( d, 0, 0 ); } @@ -153,5 +171,13 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { Expressions.constant( joinType.generatesNullsOnRight() ) ) ) ).toBlock() ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = super.bindArguments(); + return args.put( "leftKeys", new ListArg<>( leftKeys, IntArg::new ) ) + .put( "rightKeys", new ListArg<>( rightKeys, IntArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMinus.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMinus.java index 1ecb4e051a..84f027b3bb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMinus.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableMinus.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Minus; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -57,6 +59,11 @@ public EnumerableMinus( AlgCluster cluster, AlgTraitSet traitSet, List } + public static EnumerableMinus create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new EnumerableMinus( cluster, cluster.traitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public EnumerableMinus copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new EnumerableMinus( getCluster(), traitSet, inputs, all ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableProject.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableProject.java index 4e04303292..44e2022381 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableProject.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,9 @@ import org.polypheny.db.algebra.core.Project; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; @@ -91,6 +94,12 @@ static AlgNode create( AlgNode child, List projects, List children, AlgCluster cluster ) { + ListArg projects = args.getListArg( 0, RexArg.class ); + return create( children.get( 0 ), projects.map( RexArg::getNode ), projects.map( RexArg::getAlias ) ); + } + + @Override public EnumerableProject copy( AlgTraitSet traitSet, AlgNode input, List projects, AlgDataType rowType ) { return new EnumerableProject( getCluster(), traitSet, input, projects, rowType ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSemiJoin.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSemiJoin.java index 60e3089810..766015a4d2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSemiJoin.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSemiJoin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,8 @@ import com.google.common.collect.ImmutableList; +import java.util.List; +import java.util.Optional; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -46,6 +48,10 @@ import org.polypheny.db.algebra.core.SemiJoin; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgPlanner; @@ -86,6 +92,15 @@ public static EnumerableSemiJoin create( AlgNode left, AlgNode right, RexNode co } + public static EnumerableSemiJoin create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + ImmutableList leftKeys = ImmutableList.copyOf( args.getListArg( "leftKeys", IntArg.class ).map( IntArg::getArg ) ); + ImmutableList rightKeys = ImmutableList.copyOf( args.getListArg( "rightKeys", IntArg.class ).map( IntArg::getArg ) ); + + return create( children.get( 0 ), children.get( 1 ), condition.getNode(), leftKeys, rightKeys ); + } + + @Override public SemiJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode left, AlgNode right, JoinAlgType joinType, boolean semiJoinDone ) { assert joinType == JoinAlgType.INNER; @@ -102,7 +117,11 @@ public SemiJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode left, Alg @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double rowCount = mq.getTupleCount( this ); + Optional count = mq.getTupleCount( this ); + if ( count.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + double rowCount = count.get(); // Right-hand input is the "build", and hopefully small, input. final double rightRowCount = right.estimateTupleCount( mq ); @@ -142,5 +161,14 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { .toBlock() ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( 0, new RexArg( condition ) ) + .put( "leftKeys", new ListArg<>( leftKeys, IntArg::new ) ) + .put( "rightKeys", new ListArg<>( rightKeys, IntArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSort.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSort.java index cffa6fb672..218235bc46 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSort.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableSort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ import com.google.common.collect.ImmutableList; +import java.util.List; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -42,6 +43,7 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgPlanner; @@ -49,6 +51,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; /** @@ -78,6 +81,12 @@ public static EnumerableSort create( AlgNode child, AlgCollation collation, RexN } + public static EnumerableSort create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple extracted = extractArgs( args ); + return create( children.get( 0 ), extracted.left, extracted.middle, extracted.right ); + } + + @Override public EnumerableSort copy( AlgTraitSet traitSet, AlgNode newInput, AlgCollation newCollation, ImmutableList nodes, RexNode offset, RexNode fetch ) { return new EnumerableSort( getCluster(), traitSet, newInput, newCollation, offset, fetch ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableThetaJoin.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableThetaJoin.java index b866e72c0c..123739cacf 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableThetaJoin.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableThetaJoin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ import com.google.common.collect.ImmutableList; +import java.util.Optional; import java.util.Set; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; @@ -96,7 +97,12 @@ public static EnumerableThetaJoin create( AlgNode left, AlgNode right, RexNode c @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double rowCount = mq.getTupleCount( this ); + Optional count = mq.getTupleCount( this ); + if ( count.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } + + double rowCount = count.get(); // Joins can be flipped, and for many algorithms, both versions are viable and have the same cost. To make the results stable between versions of the planner, // make one of the versions slightly more expensive. diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableTransformer.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableTransformer.java index 33bac7c78f..5b805f65b5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableTransformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,11 +40,14 @@ import org.apache.calcite.linq4j.tree.Types.ArrayType; import org.apache.calcite.linq4j.tree.UnaryExpression; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.core.common.Transformer; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; @@ -62,6 +65,7 @@ import org.polypheny.db.type.entity.document.PolyDocument; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Quadruple; @Getter @@ -79,6 +83,19 @@ public EnumerableTransformer( AlgCluster cluster, List inputs, List inputs, List names, ModelTrait inTraitSet, ModelTrait outTraitSet, AlgDataType rowType, boolean isCrossModel ) { + return new EnumerableTransformer( cluster, inputs, names, inputs.get( 0 ).getTraitSet().replace( AlgCollations.EMPTY ), inTraitSet, outTraitSet, rowType, isCrossModel ); + } + + + public static EnumerableTransformer create( PolyAlgArgs args, List children, AlgCluster cluster ) { + boolean isCrossModel = args.getArg( "isCrossModel", BooleanArg.class ).toBool(); // TODO: remove parameter and instead infer isCrossModel from children + Quadruple, ModelTrait, ModelTrait, AlgDataType> extracted = extractArgs( args, children ); + List names = extracted.a.isEmpty() ? null : extracted.a; + return create( cluster, children, names, extracted.b, extracted.c, extracted.d, isCrossModel ); + } + + @Override public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { if ( outModelTrait == ModelTrait.DOCUMENT ) { @@ -515,4 +532,12 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new EnumerableTransformer( inputs.get( 0 ).getCluster(), inputs, names, traitSet, inModelTrait, outModelTrait, rowType, isCrossModel ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = super.bindArguments(); + args.put( "isCrossModel", new BooleanArg( isCrossModel ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableUnion.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableUnion.java index 251b924d4e..e9bb861de9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableUnion.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableUnion.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.apache.calcite.linq4j.tree.Expressions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -56,6 +58,11 @@ public EnumerableUnion( AlgCluster cluster, AlgTraitSet traitSet, List } + public static EnumerableUnion create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new EnumerableUnion( cluster, cluster.traitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public EnumerableUnion copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new EnumerableUnion( getCluster(), traitSet, inputs, all ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableValues.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableValues.java index 0104e98e51..4644490583 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/EnumerableValues.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -50,6 +50,7 @@ import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdDistribution; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.plan.AlgCluster; @@ -86,6 +87,12 @@ public static EnumerableValues create( AlgCluster cluster, final AlgDataType row } + public static EnumerableValues create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Pair>> extracted = extractArgs( args, cluster ); + return create( cluster, extracted.left, extracted.right ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java index 9e5ec103da..1c40720801 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/RexImpTable.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -476,7 +476,6 @@ private void defineCypherMethods() { defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_SET_LABELS ), BuiltInMethod.CYPHER_SET_LABELS.method, NullPolicy.NONE ); defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_REMOVE_LABELS ), BuiltInMethod.CYPHER_REMOVE_LABELS.method, NullPolicy.NONE ); defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_REMOVE_PROPERTY ), BuiltInMethod.CYPHER_REMOVE_PROPERTY.method, NullPolicy.NONE ); - defineMethod( OperatorRegistry.get( cypher, OperatorName.CYPHER_GRAPH_ONLY_LABEL ), BuiltInMethod.X_MODEL_GRAPH_ONLY_LABEL.method, NullPolicy.NONE ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableCollect.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableCollect.java index 7bc3e38891..372907a059 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableCollect.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableCollect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ package org.polypheny.db.algebra.enumerable.common; +import java.util.List; import org.apache.calcite.linq4j.tree.BlockBuilder; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; @@ -28,6 +29,8 @@ import org.polypheny.db.algebra.enumerable.JavaTupleFormat; import org.polypheny.db.algebra.enumerable.PhysType; import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -45,6 +48,12 @@ public EnumerableCollect( AlgCluster cluster, AlgTraitSet traitSet, AlgNode chil } + public static EnumerableCollect create( PolyAlgArgs args, List children, AlgCluster cluster ) { + String fieldName = args.getArg( "field", StringArg.class ).getArg(); + return new EnumerableCollect( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), fieldName ); + } + + @Override public EnumerableCollect copy( AlgTraitSet traitSet, AlgNode newInput ) { return new EnumerableCollect( getCluster(), traitSet, newInput, fieldName ); @@ -65,5 +74,12 @@ public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { return implementor.result( physType, builder.toBlock() ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "field", new StringArg( fieldName ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableContextSwitcher.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableContextSwitcher.java index 290f314a57..9ca5c36417 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableContextSwitcher.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableContextSwitcher.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -29,6 +29,8 @@ import org.polypheny.db.algebra.enumerable.EnumUtils; import org.polypheny.db.algebra.enumerable.EnumerableAlg; import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -45,6 +47,11 @@ public EnumerableContextSwitcher( AlgNode input ) { } + public static EnumerableContextSwitcher create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new EnumerableContextSwitcher( children.get( 0 ) ); + } + + @Override public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { final BlockBuilder builder = new BlockBuilder(); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableModifyCollect.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableModifyCollect.java index bfe456274c..7c283a45a7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableModifyCollect.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/common/EnumerableModifyCollect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,8 @@ import org.polypheny.db.algebra.enumerable.JavaTupleFormat; import org.polypheny.db.algebra.enumerable.PhysType; import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.util.BuiltInMethod; @@ -45,6 +47,11 @@ public EnumerableModifyCollect( AlgCluster cluster, AlgTraitSet traitSet, List children, AlgCluster cluster ) { + return new EnumerableModifyCollect( cluster, children.get( 0 ).getTraitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public EnumerableModifyCollect copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new EnumerableModifyCollect( getCluster(), traitSet, inputs, all ); diff --git a/core/src/main/java/org/polypheny/db/algebra/enumerable/lpg/EnumerableLpgMatch.java b/core/src/main/java/org/polypheny/db/algebra/enumerable/lpg/EnumerableLpgMatch.java index 9799c48f01..e3e9ae2635 100644 --- a/core/src/main/java/org/polypheny/db/algebra/enumerable/lpg/EnumerableLpgMatch.java +++ b/core/src/main/java/org/polypheny/db/algebra/enumerable/lpg/EnumerableLpgMatch.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,9 @@ import org.polypheny.db.algebra.enumerable.EnumerableAlg; import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; import org.polypheny.db.algebra.enumerable.PhysTypeImpl; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -65,6 +68,17 @@ protected EnumerableLpgMatch( AlgCluster cluster, AlgTraitSet traits, AlgNode in } + public static EnumerableLpgMatch create( AlgNode input, List matches, List names ) { + return new EnumerableLpgMatch( input.getCluster(), input.getTraitSet(), input, matches, names ); + } + + + public static EnumerableLpgMatch create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg matchesArg = args.getListArg( "matches", RexArg.class ); + return create( children.get( 0 ), matchesArg.map( r -> (RexCall) r.getNode() ), matchesArg.map( r -> PolyString.of( r.getAlias() ) ) ); + } + + @Override public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { Result res = implementor.visitChild( this, 0, (EnumerableAlg) input, pref ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalBatchIterator.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalBatchIterator.java index f3df5a02ba..f67720a17a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalBatchIterator.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalBatchIterator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.common.BatchIterator; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.transaction.Statement; @@ -47,6 +48,11 @@ public static LogicalBatchIterator create( AlgNode input ) { } + public static LogicalBatchIterator create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children.get( 0 ) ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalBatchIterator( @@ -55,4 +61,10 @@ public AlgNode copy( AlgTraitSet traitSet, List inputs ) { inputs.get( 0 ) ); } + + @Override + public PolyAlgArgs bindArguments() { + return new PolyAlgArgs( getPolyAlgDeclaration() ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalTransformer.java b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalTransformer.java index 75975569e3..f359b5449c 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalTransformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/common/LogicalTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,10 +23,12 @@ import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.core.common.Transformer; import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.util.Quadruple; @Setter @@ -59,6 +61,12 @@ public static LogicalTransformer create( AlgCluster cluster, List input } + public static LogicalTransformer create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Quadruple, ModelTrait, ModelTrait, AlgDataType> extracted = extractArgs( args, children ); + return create( cluster, children, extracted.a, extracted.b, extracted.c, extracted.d, true ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalTransformer( getCluster(), traitSet, inputs, names, inModelTrait, outModelTrait, rowType, isCrossModel ); @@ -76,5 +84,4 @@ public AlgWriter explainTerms( AlgWriter pw ) { return writer; } - } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentAggregate.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentAggregate.java index 62539459b8..c2252569e9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentAggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentAggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,10 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.LaxAggregateCall; import org.polypheny.db.algebra.core.document.DocumentAggregate; +import org.polypheny.db.algebra.polyalg.arguments.LaxAggArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -46,6 +50,14 @@ public static LogicalDocumentAggregate create( final AlgNode input, @Nullable Re } + public static LogicalDocumentAggregate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg group = args.getArg( "group", RexArg.class ); + ListArg aggs = args.getListArg( "aggs", LaxAggArg.class ); + + return create( children.get( 0 ), (RexNameRef) group.getNode(), aggs.map( LaxAggArg::getAgg ) ); + } + + private static LogicalDocumentAggregate create_( final AlgNode input, @Nullable RexNameRef group, List aggCalls ) { final AlgCluster cluster = input.getCluster(); final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); @@ -64,4 +76,16 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + if ( getGroup().isPresent() ) { + args.put( "group", new RexArg( getGroup().get() ) ); + } + args.put( "aggs", new ListArg<>( aggCalls, LaxAggArg::new ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentFilter.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentFilter.java index 0328045dc3..425ea8cedf 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentFilter.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentFilter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentFilter; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -43,6 +45,12 @@ public static LogicalDocumentFilter create( AlgNode node, RexNode condition ) { } + public static LogicalDocumentFilter create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + return create( children.get( 0 ), condition.getNode() ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return copy( traitSet, inputs.get( 0 ), condition ); @@ -68,4 +76,12 @@ public String toString() { '}'; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + args.put( "condition", new RexArg( condition ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java index a169cd58b7..48f60b9848 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,8 +23,18 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -44,6 +54,19 @@ public static LogicalDocumentModify create( Entity entity, AlgNode input, Operat } + public static LogicalDocumentModify create( PolyAlgArgs args, List children, AlgCluster cluster ) { + EntityArg entity = args.getArg( "entity", EntityArg.class ); + EnumArg op = args.getEnumArg( "operation", Operation.class ); + Map updates = args.getListArg( "updates", RexArg.class ) + .toStringKeyedMap( RexArg::getAlias, RexArg::getNode ); + List removes = args.getListArg( "removes", StringArg.class ).map( StringArg::getArg ); + Map renames = args.getListArg( "renames", StringArg.class ) + .toStringKeyedMap( StringArg::getAlias, StringArg::getArg ); + + return create( entity.getEntity(), children.get( 0 ), op.getArg(), updates, removes, renames ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalDocumentModify( traitSet, entity, inputs.get( 0 ), operation, updates, removes, renames ); @@ -61,4 +84,17 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + args.put( "entity", new EntityArg( entity, Catalog.snapshot(), DataModel.DOCUMENT ) ) + .put( "operation", new EnumArg<>( getOperation(), ParamType.MODIFY_OP_ENUM ) ) + .put( "updates", new ListArg<>( updates, RexArg::new ) ) + .put( "removes", new ListArg<>( removes, StringArg::new ) ) + .put( "renames", new ListArg<>( renames, StringArg::new ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java index 055148c244..478e1c8b3d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentProject.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,10 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentProject; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -43,8 +47,23 @@ public static LogicalDocumentProject create( AlgNode node, Map } + public static LogicalDocumentProject create( AlgNode node, List includes, List includesName, List excludes ) { + return create( node, Pair.zip( includesName, includes ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ), excludes ); + } + + public static LogicalDocumentProject create( AlgNode node, List includes, List includesName ) { - return create( node, Pair.zip( includesName, includes ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ), List.of() ); + return create( node, includes, includesName, List.of() ); + } + + + public static LogicalDocumentProject create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg includes = args.getListArg( "includes", RexArg.class ); + ListArg excludes = args.getListArg( "excludes", StringArg.class ); + return create( children.get( 0 ), + includes.map( RexArg::getNode ), + includes.map( RexArg::getAlias ), + excludes.map( StringArg::getArg ) ); } @@ -59,4 +78,13 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + return args.put( "includes", new ListArg<>( includes, RexArg::new ) ) + .put( "excludes", new ListArg<>( excludes, StringArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java index 55d2adefd4..2de4edd59f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -23,7 +23,11 @@ import org.polypheny.db.algebra.core.document.DocumentScan; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptRule; @@ -37,7 +41,7 @@ public class LogicalDocumentScan extends DocumentScan implements Relatio * Subclass of {@link DocumentScan} not targeted at any particular engine or calling convention. */ public LogicalDocumentScan( AlgCluster cluster, AlgTraitSet traitSet, Entity document ) { - super( cluster, traitSet.replace( ModelTrait.DOCUMENT ), document ); + super( cluster, traitSet.plus( ModelTrait.DOCUMENT ), document ); } @@ -46,6 +50,11 @@ public static AlgNode create( AlgCluster cluster, Entity collection ) { } + public static AlgNode create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( cluster, args.getArg( 0, EntityArg.class ).getEntity() ); + } + + @Override public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { return List.of( AlgOptRule.convert( LogicalRelScan.create( getCluster(), entities.get( 0 ) ), ModelTrait.RELATIONAL ) ); @@ -57,4 +66,11 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( 0, new EntityArg( entity, Catalog.snapshot(), DataModel.DOCUMENT ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentSort.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentSort.java index 3c6e5075d4..be1c1229ef 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentSort.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentSort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,10 +19,16 @@ import java.util.List; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollationTraitDef; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentAlg; import org.polypheny.db.algebra.core.document.DocumentSort; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -47,6 +53,18 @@ public static AlgNode create( AlgNode node, AlgCollation collation, List children, AlgCluster cluster ) { + ListArg collations = args.getListArg( "order", CollationArg.class ); + ListArg targets = args.getListArg( "targets", RexArg.class ); + RexArg limit = args.getArg( "limit", RexArg.class ); + RexArg offset = args.getArg( "offset", RexArg.class ); + return create( children.get( 0 ), + AlgCollations.of( collations.map( CollationArg::getColl ) ), + targets.map( RexArg::getNode ), + offset.getNode(), limit.getNode() ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalDocumentSort( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), collation, fieldExps, offset, fetch ); @@ -64,4 +82,18 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + PolyAlgArg collArg = new ListArg<>( collation.getFieldCollations(), CollationArg::new ); + PolyAlgArg targetsArg = new ListArg<>( fieldExps, RexArg::new ); + + return args.put( "order", collArg ) + .put( "targets", targetsArg ) + .put( "limit", new RexArg( fetch ) ) + .put( "offset", new RexArg( offset ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentUnwind.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentUnwind.java index 3f4efb99a2..27dce691c3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentUnwind.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentUnwind.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -19,6 +19,8 @@ import java.util.List; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.document.DocumentUnwind; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -34,9 +36,21 @@ public static LogicalDocumentUnwind create( String path, AlgNode node ) { } + public static LogicalDocumentUnwind create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( args.getArg( "path", StringArg.class ).getArg(), children.get( 0 ) ); + } + + @Override public LogicalDocumentUnwind copy( AlgTraitSet traitSet, List inputs ) { return new LogicalDocumentUnwind( inputs.get( 0 ).getCluster(), traitSet, path, inputs.get( 0 ) ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "path", new StringArg( path ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java index efe1dab90b..304ab2bbc9 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/document/LogicalDocumentValues.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,17 +21,21 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.document.DocumentValues; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.schema.trait.ModelTrait; import org.polypheny.db.type.entity.document.PolyDocument; public class LogicalDocumentValues extends DocumentValues implements RelationalTransformable { - /** * Java representation of multiple documents, which can be retrieved in the original BSON format form * or in the substantiated relational form, where the documents are bundled into a BSON string @@ -75,6 +79,16 @@ public static AlgNode create( AlgCluster cluster, List documents ) } + public static AlgNode create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg documents = args.getListArg( "docs", StringArg.class ); + ListArg dynamic = args.getListArg( "dynamic", RexArg.class ); + final AlgTraitSet traitSet = cluster.traitSetOf( ModelTrait.DOCUMENT ).plus( Convention.NONE ); + return new LogicalDocumentValues( cluster, traitSet, + documents.map( s -> PolyDocument.fromJson( s.getArg() ).asDocument() ), + dynamic.map( r -> (RexDynamicParam) r.getNode() ) ); + } + + public static LogicalDocumentValues createOneTuple( AlgCluster cluster ) { return new LogicalDocumentValues( cluster, cluster.traitSet(), List.of( new PolyDocument() ) ); } @@ -105,4 +119,12 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "docs", new ListArg<>( documents, d -> new StringArg( d.toJson() ) ) ) + .put( "dynamic", new ListArg<>( dynamicDocuments, RexArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgAggregate.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgAggregate.java index d0eb9f7b6d..cb54b1e6c7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgAggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgAggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,10 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.LaxAggregateCall; import org.polypheny.db.algebra.core.lpg.LpgAggregate; +import org.polypheny.db.algebra.polyalg.arguments.LaxAggArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -36,6 +40,22 @@ public LogicalLpgAggregate( AlgCluster cluster, AlgTraitSet traits, AlgNode chil } + public static LogicalLpgAggregate create( final AlgNode input, @NotNull List groups, List aggCalls ) { + AlgCluster cluster = input.getCluster(); + AlgTraitSet traitSet = input.getTraitSet(); + AlgDataType type = deriveTupleType( cluster, input.getTupleType(), groups, aggCalls ); + return new LogicalLpgAggregate( cluster, traitSet, input, groups, aggCalls, type ); + } + + + public static LogicalLpgAggregate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg groups = args.getListArg( "groups", RexArg.class ); + ListArg aggs = args.getListArg( "aggs", LaxAggArg.class ); + + return create( children.get( 0 ), groups.map( r -> (RexNameRef) r.getNode() ), aggs.map( LaxAggArg::getAgg ) ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalLpgAggregate( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), groups, aggCalls, rowType ); @@ -47,4 +67,14 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + args.put( "groups", new ListArg<>( groups, RexArg::new ) ); + args.put( "aggs", new ListArg<>( aggCalls, LaxAggArg::new ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgFilter.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgFilter.java index 55da05bfe5..6ebcec7552 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgFilter.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgFilter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.lpg.LpgFilter; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexNode; @@ -36,6 +38,18 @@ public LogicalLpgFilter( AlgCluster cluster, AlgTraitSet traits, AlgNode input, } + public static LogicalLpgFilter create( AlgNode input, RexNode condition ) { + // TODO: modify traitset + return new LogicalLpgFilter( input.getCluster(), input.getTraitSet(), input, condition ); + } + + + public static LogicalLpgFilter create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + return create( children.get( 0 ), condition.getNode() ); + } + + @Override protected AlgNode copy( AlgTraitSet traitSet, AlgNode input, RexNode condition ) { return new LogicalLpgFilter( getCluster(), traitSet, input, condition ); @@ -53,4 +67,12 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + args.put( "condition", new RexArg( getCondition() ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgMatch.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgMatch.java index 87b7ed7d8e..a08615a73d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgMatch.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgMatch.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.lpg.LpgMatch; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.rex.RexCall; @@ -37,6 +40,19 @@ public LogicalLpgMatch( AlgCluster cluster, AlgTraitSet traits, AlgNode input, L } + public static LogicalLpgMatch create( AlgNode input, List matches, List names ) { + return new LogicalLpgMatch( input.getCluster(), input.getTraitSet(), input, matches, names ); + } + + + public static LogicalLpgMatch create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg matchesArg = args.getListArg( "matches", RexArg.class ); + List matches = matchesArg.map( r -> (RexCall) r.getNode() ); + List names = matchesArg.map( r -> PolyString.of( r.getAlias() ) ); + return create( children.get( 0 ), matches, names ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalLpgMatch( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), matches, names ); @@ -48,4 +64,5 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java index 12b46b6cac..3aea3f062b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,11 +24,21 @@ import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.lpg.LpgModify; import org.polypheny.db.algebra.core.relational.RelationalTransformable; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; import org.polypheny.db.type.entity.PolyString; @@ -44,6 +54,22 @@ public LogicalLpgModify( AlgCluster cluster, AlgTraitSet traits, Entity entity, } + public static LogicalLpgModify create( AlgNode input, Entity entity, Operation operation, List ids, List operations ) { + final AlgCluster cluster = input.getCluster(); + final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); + return new LogicalLpgModify( cluster, traitSet, entity, input, operation, ids, operations ); + } + + + public static LogicalLpgModify create( PolyAlgArgs args, List children, AlgCluster cluster ) { + EntityArg entity = args.getArg( "entity", EntityArg.class ); + EnumArg op = args.getEnumArg( "operation", Operation.class ); + List ids = args.getListArg( "ids", StringArg.class ).map( s -> PolyString.of( s.getArg() ) ); + List operations = args.getListArg( "updates", RexArg.class ).map( RexArg::getNode ); + return create( children.get( 0 ), entity.getEntity(), op.getArg(), ids, operations ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalLpgModify( inputs.get( 0 ).getCluster(), traitSet, entity, inputs.get( 0 ), operation, ids, operations ); @@ -87,4 +113,21 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + args.put( "entity", new EntityArg( entity, Catalog.snapshot(), DataModel.GRAPH ) ) + .put( "operation", new EnumArg<>( getOperation(), ParamType.MODIFY_OP_ENUM ) ); + if ( ids != null ) { + args.put( "ids", new ListArg<>( ids, s -> new StringArg( s.value ) ) ); + } + if ( operations != null ) { + args.put( "updates", new ListArg<>( operations, RexArg::new ) ); + } + + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgProject.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgProject.java index ec03048ae7..21169b2cf2 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgProject.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,10 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.lpg.LpgProject; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -43,6 +47,17 @@ public LogicalLpgProject( AlgCluster cluster, AlgTraitSet traits, AlgNode input, } + public static LogicalLpgProject create( AlgNode input, List projects, List names ) { + return new LogicalLpgProject( input.getCluster(), input.getTraitSet(), input, projects, names ); + } + + + public static LogicalLpgProject create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg projects = args.getListArg( "projects", RexArg.class ); + return create( children.get( 0 ), projects.map( RexArg::getNode ), projects.map( r -> PolyString.of( r.getAlias() ) ) ); + } + + public boolean isStar() { if ( !projects.stream().allMatch( p -> p.isA( Kind.INPUT_REF ) ) ) { return false; @@ -68,4 +83,16 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + PolyAlgArg projectsArg = new ListArg<>( projects, RexArg::new, + names.stream().map( PolyString::toString ).toList(), + args.getDecl().canUnpackValues() ); + + args.put( "projects", projectsArg ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java index 79eb4ef070..5f6608542e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -26,8 +26,12 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelJoin; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.plan.AlgCluster; @@ -43,11 +47,21 @@ public class LogicalLpgScan extends LpgScan implements RelationalTransfo * Subclass of {@link LpgScan} not targeted at any particular engine or calling convention. */ public LogicalLpgScan( AlgCluster cluster, AlgTraitSet traitSet, Entity graph, AlgDataType rowType ) { - super( cluster, traitSet.replace( ModelTrait.GRAPH ), graph ); + super( cluster, traitSet.plus( ModelTrait.GRAPH ), graph ); this.rowType = rowType; } + public static LogicalLpgScan create( AlgCluster cluster, final Entity entity ) { + return new LogicalLpgScan( cluster, cluster.traitSet(), entity, entity.getTupleType() ); + } + + + public static LogicalLpgScan create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( cluster, args.getArg( 0, EntityArg.class ).getEntity() ); + } + + @Override public List getRelationalEquivalent( List inputs, List entities, Snapshot snapshot ) { assert !entities.isEmpty(); @@ -93,4 +107,13 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( 0, + new EntityArg( entity, Catalog.snapshot(), DataModel.GRAPH ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgSort.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgSort.java index 13e231714d..bbacb6695a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgSort.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgSort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,14 +18,23 @@ import com.google.common.collect.ImmutableList; import java.math.BigDecimal; +import java.util.List; import lombok.Getter; import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollationTraitDef; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.lpg.LpgSort; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; @@ -43,6 +52,22 @@ public LogicalLpgSort( AlgCluster cluster, AlgTraitSet traitSet, AlgCollation co } + public static LogicalLpgSort create( AlgCollation collation, AlgNode input, Integer skip, Integer limit ) { + // TODO: traitset correctly modified? + collation = AlgCollationTraitDef.INSTANCE.canonize( collation ); + AlgTraitSet traitSet = input.getTraitSet().replace( Convention.NONE ).replace( collation ); + return new LogicalLpgSort( input.getCluster(), traitSet, collation, input, skip, limit ); + } + + + public static LogicalLpgSort create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg collations = args.getListArg( "order", CollationArg.class ); + IntArg limit = args.getArg( "limit", IntArg.class ); + IntArg skip = args.getArg( "skip", IntArg.class ); + return create( AlgCollations.of( collations.map( CollationArg::getColl ) ), children.get( 0 ), skip.getArg(), limit.getArg() ); + } + + @Override public String algCompareString() { return getClass().getSimpleName() + "$" @@ -77,4 +102,26 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + // We cannot use super.bindArguments() since the type for limit and skip (offset differ). + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + PolyAlgArg collArg = new ListArg<>( + collation.getFieldCollations(), + CollationArg::new ); + + args.put( "order", collArg ); + + if ( fetch != null ) { + args.put( "limit", new IntArg( Integer.parseInt( fetch.toString() ) ) ); + } + if ( offset != null ) { + args.put( "skip", new IntArg( Integer.parseInt( offset.toString() ) ) ); + } + + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java index f807a7902e..3165bfef91 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgTransformer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,11 +17,17 @@ package org.polypheny.db.algebra.logical.lpg; import java.util.List; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.common.Modify.Operation; import org.polypheny.db.algebra.core.lpg.LpgTransformer; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.GraphType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.type.PolyType; @@ -37,6 +43,20 @@ public LogicalLpgTransformer( AlgCluster cluster, AlgTraitSet traitSet, List inputs, List operationOrder, Operation operation ) { + AlgTraitSet traitSet = inputs.get( 0 ).getTraitSet().replace( AlgCollations.EMPTY ); + AlgDataType type = GraphType.of(); + return new LogicalLpgTransformer( inputs.get( 0 ).getCluster(), traitSet, inputs, type, operationOrder, operation ); + } + + + public static LogicalLpgTransformer create( PolyAlgArgs args, List children, AlgCluster cluster ) { + EnumArg op = args.getEnumArg( "operation", Operation.class ); + List order = args.getListArg( "order", EnumArg.class ).map( e -> (PolyType) e.getArg() ); + return create( children, order, op.getArg() ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalLpgTransformer( inputs.get( 0 ).getCluster(), traitSet, inputs, rowType, operationOrder, operation ); @@ -48,4 +68,13 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + return args.put( "operation", new EnumArg<>( operation, ParamType.POLY_TYPE_ENUM ) ) + .put( "order", new ListArg<>( operationOrder, o -> new EnumArg( o, ParamType.POLY_TYPE_ENUM ) ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnion.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnion.java index be1667314e..5d9628d162 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnion.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnion.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,8 +21,11 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.SetOp; import org.polypheny.db.algebra.core.Union; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.plan.Convention; public class LogicalLpgUnion extends Union { @@ -35,6 +38,18 @@ protected LogicalLpgUnion( AlgCluster cluster, AlgTraitSet traits, List } + public static LogicalLpgUnion create( List inputs, boolean all ) { + final AlgCluster cluster = inputs.get( 0 ).getCluster(); + final AlgTraitSet traitSet = cluster.traitSetOf( Convention.NONE ); + return new LogicalLpgUnion( cluster, traitSet, inputs, all ); + } + + + public static LogicalLpgUnion create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public SetOp copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new LogicalLpgUnion( inputs.get( 0 ).getCluster(), traitSet, inputs, all ); @@ -46,4 +61,5 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnwind.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnwind.java index 4c45753b03..3371d5ef8d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnwind.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgUnwind.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -22,6 +22,9 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.lpg.LpgUnwind; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -38,6 +41,18 @@ public LogicalLpgUnwind( AlgCluster cluster, AlgTraitSet traits, AlgNode input, } + public static LogicalLpgUnwind create( AlgNode input, int index, String alias ) { + return new LogicalLpgUnwind( input.getCluster(), input.getTraitSet(), input, index, alias ); + } + + + public static LogicalLpgUnwind create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children.get( 0 ), + args.getArg( "index", IntArg.class ).getArg(), + args.getArg( "alias", StringArg.class ).getArg() ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new LogicalLpgUnwind( inputs.get( 0 ).getCluster(), traitSet, inputs.get( 0 ), index, alias ); @@ -49,4 +64,12 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "index", new IntArg( index ) ) + .put( "alias", new StringArg( alias ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java index cd35b8d181..2244d4cc7a 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/lpg/LogicalLpgValues.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -30,6 +30,7 @@ import org.polypheny.db.algebra.core.lpg.LpgValues; import org.polypheny.db.algebra.core.relational.RelationalTransformable; import org.polypheny.db.algebra.logical.relational.LogicalRelValues; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; @@ -50,6 +51,7 @@ import org.polypheny.db.type.entity.graph.PolyNode; import org.polypheny.db.util.Collation; import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Triple; @Getter @@ -116,6 +118,13 @@ public static LogicalLpgValues create( } + public static LogicalLpgValues create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, Collection, ImmutableList>> extracted = extractArgs( args, cluster ); + AlgDataType type = deriveTupleType( cluster, extracted.left, extracted.middle, extracted.right ); + return new LogicalLpgValues( cluster, cluster.traitSet(), extracted.left, extracted.middle, extracted.right, type ); + } + + @Override public List getRelationalEquivalent( List values, List entities, Snapshot snapshot ) { AlgTraitSet out = traitSet.replace( ModelTrait.RELATIONAL ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalCalc.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalCalc.java index 784c6c0eaf..9efbcbeb5d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalCalc.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalCalc.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.logical.relational; +import java.util.List; import java.util.Set; import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgDistributionTraitDef; @@ -44,6 +45,7 @@ import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdDistribution; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.rules.FilterCalcMergeRule; import org.polypheny.db.algebra.rules.FilterToCalcRule; import org.polypheny.db.algebra.rules.ProjectToCalcRule; @@ -94,6 +96,11 @@ public static LogicalCalc create( final AlgNode input, final RexProgram program } + public static LogicalCalc create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children.get( 0 ), getProgramFromArgs( args, children.get( 0 ), cluster.getRexBuilder() ) ); + } + + @Override public LogicalCalc copy( AlgTraitSet traitSet, AlgNode child, RexProgram program ) { return new LogicalCalc( getCluster(), traitSet, child, program ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModifyCollect.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModifyCollect.java index 4c922b1d64..687345f6a0 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModifyCollect.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalModifyCollect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,6 +20,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.ModifyCollect; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -50,6 +52,11 @@ public static LogicalModifyCollect create( List inputs, boolean all ) { } + public static LogicalModifyCollect create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public LogicalModifyCollect copy( AlgTraitSet traitSet, List inputs, boolean all ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelAggregate.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelAggregate.java index 9637281b81..3067e5a24d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelAggregate.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelAggregate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,12 +40,14 @@ import org.polypheny.db.algebra.core.Aggregate; import org.polypheny.db.algebra.core.AggregateCall; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.rules.AggregateProjectPullUpConstantsRule; import org.polypheny.db.algebra.rules.AggregateReduceFunctionsRule; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.Triple; /** @@ -98,6 +100,12 @@ private static LogicalRelAggregate create_( final AlgNode input, boolean indicat } + public static LogicalRelAggregate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, List> extracted = extractArgs( args ); + return create( children.get( 0 ), extracted.left, extracted.middle, extracted.right ); + } + + @Override public LogicalRelAggregate copy( AlgTraitSet traitSet, AlgNode input, boolean indicator, ImmutableBitSet groupSet, List groupSets, List aggCalls ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelCorrelate.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelCorrelate.java index 58af886087..81213d6210 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelCorrelate.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelCorrelate.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.logical.relational; +import java.util.List; import org.polypheny.db.algebra.AlgInput; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; @@ -42,6 +43,13 @@ import org.polypheny.db.algebra.core.CorrelationId; import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.CorrelationArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.FieldArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.config.RuntimeConfig; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -114,6 +122,15 @@ public static LogicalRelCorrelate create( } + public static LogicalRelCorrelate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + CorrelationArg id = args.getArg( "id", CorrelationArg.class ); + ListArg columns = args.getListArg( "columns", FieldArg.class ); + EnumArg type = args.getEnumArg( "joinType", SemiJoinType.class ); + + return create( children.get( 0 ), children.get( 1 ), id.getCorrId(), ImmutableBitSet.of( columns.map( FieldArg::getField ) ), type.getArg() ); + } + + @Override public LogicalRelCorrelate copy( AlgTraitSet traitSet, @@ -132,4 +149,16 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + PolyAlgArg columns = new ListArg<>( requiredColumns.asList(), FieldArg::new ); + + args.put( "id", new CorrelationArg( correlationId ) ) + .put( "columns", columns ) + .put( "joinType", new EnumArg<>( getJoinType(), ParamType.SEMI_JOIN_TYPE_ENUM ) ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelExchange.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelExchange.java index d6e6176d93..cf9c410e3d 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelExchange.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelExchange.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,12 +34,19 @@ package org.polypheny.db.algebra.logical.relational; +import java.util.List; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgDistributionTraitDef; +import org.polypheny.db.algebra.AlgDistributions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Exchange; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -70,6 +77,13 @@ public static LogicalRelExchange create( AlgNode input, AlgDistribution distribu } + public static LogicalRelExchange create( PolyAlgArgs args, List children, AlgCluster cluster ) { + AlgDistribution.Type type = args.getEnumArg( "distributionType", AlgDistribution.Type.class ).getArg(); + List numbers = args.getListArg( "numbers", IntArg.class ).map( IntArg::getArg ); + return create( children.get( 0 ), AlgDistributions.getDistribution( type, numbers ) ); + } + + @Override public Exchange copy( AlgTraitSet traitSet, AlgNode newInput, AlgDistribution newDistribution ) { return new LogicalRelExchange( getCluster(), traitSet, newInput, newDistribution ); @@ -81,5 +95,13 @@ public AlgNode accept( AlgShuttle shuttle ) { return shuttle.visit( this ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "distributionType", new EnumArg<>( distribution.getType(), ParamType.DISTRIBUTION_TYPE_ENUM ) ) + .put( "numbers", new ListArg<>( distribution.getKeys(), IntArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelFilter.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelFilter.java index 3d6b1e821e..2cd65672ec 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelFilter.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelFilter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -35,6 +35,7 @@ import com.google.common.collect.ImmutableSet; +import java.util.List; import java.util.Objects; import lombok.Getter; import org.polypheny.db.algebra.AlgCollationTraitDef; @@ -48,6 +49,10 @@ import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdDistribution; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.CorrelationArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -102,6 +107,19 @@ public static LogicalRelFilter create( final AlgNode input, RexNode condition, I } + public static LogicalRelFilter create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + List variables = args.getListArg( "variables", CorrelationArg.class ).map( CorrelationArg::getCorrId ); + return create( children.get( 0 ), condition.getNode(), ImmutableSet.copyOf( variables ) ); + } + + + @Override + public ImmutableSet getVariablesSet() { + return variablesSet; + } + + @Override public LogicalRelFilter copy( AlgTraitSet traitSet, AlgNode input, RexNode condition ) { assert traitSet.containsIfApplicable( Convention.NONE ); @@ -120,4 +138,12 @@ public AlgWriter explainTerms( AlgWriter pw ) { return super.explainTerms( pw ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = super.bindArguments(); + return args.put( "variables", new ListArg<>( variablesSet.asList(), CorrelationArg::new ) ); + } + } + diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelIntersect.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelIntersect.java index 08eebcc0a8..2761d1f1eb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelIntersect.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelIntersect.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,8 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Intersect; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -69,6 +71,11 @@ public static LogicalRelIntersect create( List inputs, boolean all ) { } + public static LogicalRelIntersect create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public LogicalRelIntersect copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new LogicalRelIntersect( getCluster(), traitSet, inputs, all ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelJoin.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelJoin.java index 7982b873e7..602c085068 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelJoin.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelJoin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.logical.relational; +import java.util.List; import java.util.Set; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; @@ -42,11 +43,14 @@ import org.polypheny.db.algebra.core.Join; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.util.Triple; /** @@ -116,6 +120,13 @@ public static LogicalRelJoin create( AlgNode left, AlgNode right, RexNode condit } + public static LogicalRelJoin create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, JoinAlgType> extracted = extractArgs( args ); + BooleanArg semiJoinDone = args.getArg( "semiJoinDone", BooleanArg.class ); + return create( children.get( 0 ), children.get( 1 ), extracted.left, extracted.middle, extracted.right, semiJoinDone.toBool() ); + } + + @Override public LogicalRelJoin copy( AlgTraitSet traitSet, RexNode conditionExpr, AlgNode left, AlgNode right, JoinAlgType joinType, boolean semiJoinDone ) { assert traitSet.containsIfApplicable( Convention.NONE ); @@ -142,4 +153,11 @@ public boolean isSemiJoinDone() { } + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = super.bindArguments(); + return args.put( "semiJoinDone", new BooleanArg( semiJoinDone ) ); + } + } + diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelMinus.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelMinus.java index fe4de0f77f..94aa0a854e 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelMinus.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelMinus.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,8 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Minus; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -78,6 +80,11 @@ public static LogicalRelMinus create( List inputs, boolean all ) { } + public static LogicalRelMinus create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public LogicalRelMinus copy( AlgTraitSet traitSet, List inputs, boolean all ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java index 8e67ce1c5e..6cf12443f7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelModify.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -20,12 +20,15 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.catalog.entity.Entity; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.util.Quadruple; /** @@ -79,6 +82,13 @@ public static LogicalRelModify create( } + public static LogicalRelModify create( PolyAlgArgs args, List children, AlgCluster cluster ) { + EntityArg entity = args.getArg( "table", EntityArg.class ); + Quadruple, List, Boolean> extracted = extractArgs( args ); + return create( entity.getEntity(), children.get( 0 ), extracted.a, extracted.b, extracted.c, extracted.d ); + } + + @Override public LogicalRelModify copy( AlgTraitSet traitSet, List inputs ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelProject.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelProject.java index 7e08007f8b..aecff92289 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelProject.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelProject.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,6 +43,9 @@ import org.polypheny.db.algebra.core.relational.RelAlg; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -100,6 +103,12 @@ public static LogicalRelProject create( final AlgNode input, final List children, AlgCluster cluster ) { + ListArg projects = args.getListArg( 0, RexArg.class ); + return create( children.get( 0 ), projects.map( RexArg::getNode ), projects.map( RexArg::getAlias ) ); + } + + public static LogicalRelProject identity( final AlgNode input ) { return create( input, diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java index ad2897ec90..0d149329fb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.relational.RelScan; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.catalog.entity.Entity; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -127,5 +129,9 @@ public static LogicalRelScan create( AlgCluster cluster, final Entity entity ) { } + public static LogicalRelScan create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( cluster, args.getArg( 0, EntityArg.class ).getEntity() ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelSort.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelSort.java index cbc6be8842..8a864c17bc 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelSort.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelSort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,10 +42,12 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; import org.polypheny.db.rex.RexNode; +import org.polypheny.db.util.Triple; /** @@ -82,6 +84,12 @@ public static AlgNode create( AlgNode input, List fieldExps, AlgCollati } + public static LogicalRelSort create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple extracted = extractArgs( args ); + return create( children.get( 0 ), extracted.left, extracted.middle, extracted.right ); + } + + @Override public Sort copy( AlgTraitSet traitSet, AlgNode newInput, AlgCollation newCollation, ImmutableList fieldExps, RexNode offset, RexNode fetch ) { return new LogicalRelSort( getCluster(), traitSet, newInput, newCollation, fieldExps, offset, fetch ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelUnion.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelUnion.java index a488be758e..2bd0c928d5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelUnion.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelUnion.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,8 @@ import org.polypheny.db.algebra.AlgShuttle; import org.polypheny.db.algebra.core.Union; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -78,6 +80,11 @@ public static LogicalRelUnion create( List inputs, boolean all ) { } + public static LogicalRelUnion create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public LogicalRelUnion copy( AlgTraitSet traitSet, List inputs, boolean all ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelValues.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelValues.java index 5271331632..4fe8191bfb 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelValues.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelValues.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,6 +44,7 @@ import org.polypheny.db.algebra.core.relational.RelAlg; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; @@ -51,6 +52,7 @@ import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.schema.trait.ModelTrait; import org.polypheny.db.type.PolyType; +import org.polypheny.db.util.Pair; /** @@ -83,6 +85,12 @@ public static LogicalRelValues create( AlgCluster cluster, final AlgDataType row } + public static LogicalRelValues create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Pair>> extracted = extractArgs( args, cluster ); + return create( cluster, extracted.left, extracted.right ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert traitSet.containsIfApplicable( Convention.NONE ); diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java index b2f15c3dbb..552cf43294 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalRelViewScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,8 @@ import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelScan; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; @@ -70,6 +72,11 @@ public static AlgNode create( AlgCluster cluster, final Entity entity ) { } + public static AlgNode create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return create( cluster, args.getArg( "entity", EntityArg.class ).getEntity() ); + } + + @Override public boolean containsView() { return true; diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalSortExchange.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalSortExchange.java index c9096fbee0..3de203081b 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalSortExchange.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalSortExchange.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,13 +34,22 @@ package org.polypheny.db.algebra.logical.relational; +import java.util.List; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollationTraitDef; +import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgDistribution; import org.polypheny.db.algebra.AlgDistributionTraitDef; +import org.polypheny.db.algebra.AlgDistributions; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.SortExchange; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.plan.Convention; @@ -72,9 +81,29 @@ public static LogicalSortExchange create( AlgNode input, AlgDistribution distrib } + public static LogicalSortExchange create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg collations = args.getListArg( "order", CollationArg.class ); + AlgDistribution.Type type = args.getEnumArg( "distributionType", AlgDistribution.Type.class ).getArg(); + List numbers = args.getListArg( "numbers", IntArg.class ).map( IntArg::getArg ); + return create( children.get( 0 ), + AlgDistributions.getDistribution( type, numbers ), + AlgCollations.of( collations.map( CollationArg::getColl ) ) ); + } + + @Override public SortExchange copy( AlgTraitSet traitSet, AlgNode newInput, AlgDistribution newDistribution, AlgCollation newCollation ) { return new LogicalSortExchange( this.getCluster(), traitSet, newInput, newDistribution, newCollation ); } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + + return args.put( "order", new ListArg<>( collation.getFieldCollations(), CollationArg::new ) ) + .put( "distributionType", new EnumArg<>( distribution.getType(), ParamType.DISTRIBUTION_TYPE_ENUM ) ) + .put( "numbers", new ListArg<>( distribution.getKeys(), IntArg::new ) ); + } + } diff --git a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalWindow.java b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalWindow.java index deee50587f..fbb45a737f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalWindow.java +++ b/core/src/main/java/org/polypheny/db/algebra/logical/relational/LogicalWindow.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,10 +46,15 @@ import java.util.Map; import java.util.Objects; import org.apache.calcite.linq4j.Ord; +import org.apache.commons.lang3.NotImplementedException; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Window; import org.polypheny.db.algebra.core.relational.RelAlg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.WindowGroupArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; @@ -276,6 +281,13 @@ public RexNode visitLocalRef( RexLocalRef localRef ) { } + public static LogicalWindow create( PolyAlgArgs args, List children, AlgCluster cluster ) { + List constants = args.getListArg( "constants", RexArg.class ).map( RexArg::getNode ); + List groups = args.getListArg( "groups", WindowGroupArg.class ).map( WindowGroupArg::getGroup ); + throw new NotImplementedException( "Creation of LogicalWindow from PolyAlgArgs is not yet supported" ); + } + + private static List toInputRefs( final List operands ) { return new AbstractList() { @Override @@ -298,6 +310,14 @@ public RexNode get( int index ) { } + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + return args.put( "constants", new ListArg<>( constants, RexArg::new ) ) + .put( "groups", new ListArg<>( groups, WindowGroupArg::new ) ); + } + + /** * Group specification. All windowed aggregates over the same window (regardless of how it is specified, in terms of a named window or specified attribute by attribute) * will end up with the same window key. diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistinctRowCount.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistinctRowCount.java index adcb1b066d..8b3c356986 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistinctRowCount.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdDistinctRowCount.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -80,16 +80,18 @@ public MetadataDef getDef() { * * @see AlgMetadataQuery#getDistinctRowCount(AlgNode, ImmutableBitSet, RexNode) */ + @SuppressWarnings("unused")//used by codegen public Double getDistinctRowCount( AlgNode alg, AlgMetadataQuery mq, ImmutableBitSet groupKey, RexNode predicate ) { // REVIEW zfong: Broadbase code does not take into consideration selectivity of predicates passed in. Also, they assume the rows are unique even if the table is not boolean uniq = AlgMdUtil.areColumnsDefinitelyUnique( mq, alg, groupKey ); if ( uniq ) { - return NumberUtil.multiply( mq.getTupleCount( alg ), mq.getSelectivity( alg, predicate ) ); + return NumberUtil.multiply( mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ), mq.getSelectivity( alg, predicate ) ); } return null; } + @SuppressWarnings("unused")//used by codegen public Double getDistinctRowCount( Union alg, AlgMetadataQuery mq, ImmutableBitSet groupKey, RexNode predicate ) { double rowCount = 0.0; int[] adjustments = new int[alg.getTupleType().getFieldCount()]; @@ -258,7 +260,7 @@ public Double getDistinctRowCount( Project alg, AlgMetadataQuery mq, ImmutableBi distinctRowCount *= subRowCount; } - return AlgMdUtil.numDistinctVals( distinctRowCount, mq.getTupleCount( alg ) ); + return AlgMdUtil.numDistinctVals( distinctRowCount, mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ) ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPercentageOriginalRows.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPercentageOriginalRows.java index 991819b501..8ef9f7a45f 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPercentageOriginalRows.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPercentageOriginalRows.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -70,12 +70,14 @@ public MetadataDef getDef() { } + @SuppressWarnings("unused") // used by codegen public Double getPercentageOriginalRows( Aggregate alg, AlgMetadataQuery mq ) { // REVIEW jvs: The assumption here seems to be that aggregation does not apply any filtering, so it does not modify the percentage. That's very much oversimplified. return mq.getPercentageOriginalRows( alg.getInput() ); } + @SuppressWarnings("unused") // used by codegen public Double getPercentageOriginalRows( Union alg, AlgMetadataQuery mq ) { double numerator = 0.0; double denominator = 0.0; @@ -87,7 +89,7 @@ public Double getPercentageOriginalRows( Union alg, AlgMetadataQuery mq ) { // case where a huge table has been completely filtered away. for ( AlgNode input : alg.getInputs() ) { - double rowCount = mq.getTupleCount( input ); + double rowCount = mq.getTupleCount( input ).orElse( Double.MAX_VALUE ); double percentage = mq.getPercentageOriginalRows( input ); if ( percentage != 0.0 ) { denominator += rowCount / percentage; @@ -99,6 +101,7 @@ public Double getPercentageOriginalRows( Union alg, AlgMetadataQuery mq ) { } + @SuppressWarnings("unused") // used by codegen public Double getPercentageOriginalRows( Join alg, AlgMetadataQuery mq ) { // Assume any single-table filter conditions have already been pushed down. @@ -132,8 +135,7 @@ public Double getPercentageOriginalRows( AlgNode alg, AlgMetadataQuery mq ) { } // Compute product of percentage filtering from this alg (assuming any filtering is the effect of single-table filters) with the percentage filtering performed by the child. - Double algPercentage = - quotientForPercentage( mq.getTupleCount( alg ), mq.getTupleCount( child ) ); + Double algPercentage = quotientForPercentage( mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ), mq.getTupleCount( child ).orElse( Double.MAX_VALUE ) ); if ( algPercentage == null ) { return null; } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPopulationSize.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPopulationSize.java index 149b382c8b..bfc8273871 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPopulationSize.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdPopulationSize.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -122,6 +122,7 @@ public Double getPopulationSize( Values alg, AlgMetadataQuery mq, ImmutableBitSe } + @SuppressWarnings("unused") // used by codegen public Double getPopulationSize( Project alg, AlgMetadataQuery mq, ImmutableBitSet groupKey ) { ImmutableBitSet.Builder baseCols = ImmutableBitSet.builder(); ImmutableBitSet.Builder projCols = ImmutableBitSet.builder(); @@ -147,7 +148,7 @@ public Double getPopulationSize( Project alg, AlgMetadataQuery mq, ImmutableBitS } // REVIEW zfong: Broadbase did not have the call to numDistinctVals. This is needed; otherwise, population can be larger than the number of rows in the AlgNode. - return AlgMdUtil.numDistinctVals( population, mq.getTupleCount( alg ) ); + return AlgMdUtil.numDistinctVals( population, mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ) ); } @@ -156,13 +157,14 @@ public Double getPopulationSize( Project alg, AlgMetadataQuery mq, ImmutableBitS * * @see AlgMetadataQuery#getPopulationSize(AlgNode, ImmutableBitSet) */ + @SuppressWarnings("unused") //used by codegen public Double getPopulationSize( AlgNode alg, AlgMetadataQuery mq, ImmutableBitSet groupKey ) { // if the keys are unique, return the row count; otherwise, we have no further information on which to return any legitimate value // REVIEW zfong: Broadbase code returns the product of each unique key, which would result in the population being larger than the total rows in the relnode boolean uniq = AlgMdUtil.areColumnsDefinitelyUnique( mq, alg, groupKey ); if ( uniq ) { - return mq.getTupleCount( alg ); + return mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ); } return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSelectivity.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSelectivity.java index eafd7c1146..1af64e2154 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSelectivity.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdSelectivity.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ import java.util.ArrayList; import java.util.List; +import java.util.Optional; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.Aggregate; import org.polypheny.db.algebra.core.Filter; @@ -72,7 +73,7 @@ public MetadataDef getDef() { public Double getSelectivity( Union alg, AlgMetadataQuery mq, RexNode predicate ) { - if ( (alg.getInputs().size() == 0) || (predicate == null) ) { + if ( (alg.getInputs().isEmpty()) || (predicate == null) ) { return 1.0; } @@ -81,8 +82,8 @@ public Double getSelectivity( Union alg, AlgMetadataQuery mq, RexNode predicate int[] adjustments = new int[alg.getTupleType().getFieldCount()]; RexBuilder rexBuilder = alg.getCluster().getRexBuilder(); for ( AlgNode input : alg.getInputs() ) { - Double nRows = mq.getTupleCount( input ); - if ( nRows == null ) { + Optional nRows = mq.getTupleCount( input ); + if ( nRows.isEmpty() ) { return null; } @@ -90,8 +91,8 @@ public Double getSelectivity( Union alg, AlgMetadataQuery mq, RexNode predicate RexNode modifiedPred = predicate.accept( new AlgOptUtil.RexInputConverter( rexBuilder, null, input.getTupleType().getFields(), adjustments ) ); double sel = mq.getSelectivity( input, modifiedPred ); - sumRows += nRows; - sumSelectedRows += nRows * sel; + sumRows += nRows.get(); + sumSelectedRows += nRows.get() * sel; } if ( sumRows < 1.0 ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTupleCount.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTupleCount.java index 4564d40f00..ddc20d4ff4 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTupleCount.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdTupleCount.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -34,6 +34,7 @@ package org.polypheny.db.algebra.metadata; +import java.util.Optional; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.SingleAlg; @@ -87,44 +88,30 @@ public Double getTupleCount( AlgNode alg, AlgMetadataQuery mq ) { public Double getTupleCount( AlgSubset subset, AlgMetadataQuery mq ) { - return mq.getTupleCount( Util.first( subset.getBest(), subset.getOriginal() ) ); + return mq.getTupleCount( Util.first( subset.getBest(), subset.getOriginal() ) ).orElse( Double.MAX_VALUE ); } public Double getTupleCount( Union alg, AlgMetadataQuery mq ) { double rowCount = 0.0; for ( AlgNode input : alg.getInputs() ) { - Double partialRowCount = mq.getTupleCount( input ); - if ( partialRowCount == null ) { + Optional partialRowCount = mq.getTupleCount( input ); + if ( partialRowCount.isEmpty() ) { return null; } - rowCount += partialRowCount; + rowCount += partialRowCount.get(); } return rowCount; } public Double getTupleCount( Intersect alg, AlgMetadataQuery mq ) { - Double rowCount = null; - for ( AlgNode input : alg.getInputs() ) { - Double partialRowCount = mq.getTupleCount( input ); - if ( rowCount == null || partialRowCount != null && partialRowCount < rowCount ) { - rowCount = partialRowCount; - } - } - return rowCount; + return alg.getInputs().stream().map( mq::getTupleCount ).filter( Optional::isPresent ).map( Optional::get ).reduce( null, ( a, b ) -> a != null ? Double.min( a, b ) : b ); } public Double getTupleCount( Minus alg, AlgMetadataQuery mq ) { - Double rowCount = null; - for ( AlgNode input : alg.getInputs() ) { - Double partialRowCount = mq.getTupleCount( input ); - if ( rowCount == null || partialRowCount != null && partialRowCount < rowCount ) { - rowCount = partialRowCount; - } - } - return rowCount; + return alg.getInputs().stream().map( mq::getTupleCount ).filter( Optional::isPresent ).map( Optional::get ).reduce( null, ( a, b ) -> a != null ? Double.min( a, b ) : b ); } @@ -139,15 +126,16 @@ public Double getTupleCount( Calc alg, AlgMetadataQuery mq ) { public Double getTupleCount( Project alg, AlgMetadataQuery mq ) { - return mq.getTupleCount( alg.getInput() ); + return mq.getTupleCount( alg.getInput() ).orElse( Double.MAX_VALUE ); } public Double getTupleCount( Sort alg, AlgMetadataQuery mq ) { - Double rowCount = mq.getTupleCount( alg.getInput() ); - if ( rowCount == null ) { + Optional count = mq.getTupleCount( alg.getInput() ); + if ( count.isEmpty() ) { return null; } + double rowCount = count.get(); if ( alg.offset instanceof RexDynamicParam ) { return rowCount; } @@ -168,10 +156,12 @@ public Double getTupleCount( Sort alg, AlgMetadataQuery mq ) { public Double getTupleCount( EnumerableLimit alg, AlgMetadataQuery mq ) { - Double rowCount = mq.getTupleCount( alg.getInput() ); - if ( rowCount == null ) { + Optional count = mq.getTupleCount( alg.getInput() ); + if ( count.isEmpty() ) { return null; } + double rowCount = count.get(); + if ( alg.offset instanceof RexDynamicParam ) { return rowCount; } @@ -193,7 +183,7 @@ public Double getTupleCount( EnumerableLimit alg, AlgMetadataQuery mq ) { // Covers Converter, Interpreter public Double getTupleCount( SingleAlg alg, AlgMetadataQuery mq ) { - return mq.getTupleCount( alg.getInput() ); + return mq.getTupleCount( alg.getInput() ).orElse( Double.MAX_VALUE ); } @@ -208,7 +198,7 @@ public Double getTupleCount( SemiJoin alg, AlgMetadataQuery mq ) { return NumberUtil.multiply( mq.getSelectivity( alg.getLeft(), semiJoinSelectivity ), - mq.getTupleCount( alg.getLeft() ) ); + mq.getTupleCount( alg.getLeft() ).orElse( Double.MAX_VALUE ) ); } @@ -218,7 +208,7 @@ public Double getTupleCount( Aggregate alg, AlgMetadataQuery mq ) { // rowCount is the cardinality of the group by columns Double distinctRowCount = mq.getDistinctRowCount( alg.getInput(), groupKey, null ); if ( distinctRowCount == null ) { - distinctRowCount = mq.getTupleCount( alg.getInput() ) / 10; + distinctRowCount = mq.getTupleCount( alg.getInput() ).orElse( Double.MAX_VALUE ) / 10; } // Grouping sets multiply @@ -228,7 +218,7 @@ public Double getTupleCount( Aggregate alg, AlgMetadataQuery mq ) { } - public Double getTupleCount( RelScan alg, AlgMetadataQuery mq ) { + public Double getTupleCount( RelScan alg, AlgMetadataQuery mq ) { return alg.estimateTupleCount( mq ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdUtil.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdUtil.java index ab069a8a05..e4bc8f10e5 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdUtil.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMdUtil.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,6 +40,7 @@ import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.List; +import java.util.Optional; import java.util.Set; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -489,7 +490,7 @@ public static Double getJoinPopulationSize( AlgMetadataQuery mq, AlgNode joinRel mq.getPopulationSize( left, leftMask.build() ), mq.getPopulationSize( right, rightMask.build() ) ); - return numDistinctVals( population, mq.getTupleCount( joinRel ) ); + return numDistinctVals( population, mq.getTupleCount( joinRel ).orElse( Double.MAX_VALUE ) ); } @@ -548,7 +549,7 @@ public static Double getJoinDistinctRowCount( AlgMetadataQuery mq, AlgNode joinR mq.getDistinctRowCount( right, rightMask.build(), rightPred ) ); } - return AlgMdUtil.numDistinctVals( distRowCount, mq.getTupleCount( joinRel ) ); + return AlgMdUtil.numDistinctVals( distRowCount, mq.getTupleCount( joinRel ).orElse( Double.MAX_VALUE ) ); } @@ -558,7 +559,7 @@ public static Double getJoinDistinctRowCount( AlgMetadataQuery mq, AlgNode joinR public static double getUnionAllRowCount( AlgMetadataQuery mq, Union alg ) { double rowCount = 0; for ( AlgNode input : alg.getInputs() ) { - rowCount += mq.getTupleCount( input ); + rowCount += mq.getTupleCount( input ).orElse( Double.MAX_VALUE ); } return rowCount; } @@ -570,9 +571,14 @@ public static double getUnionAllRowCount( AlgMetadataQuery mq, Union alg ) { public static double getMinusRowCount( AlgMetadataQuery mq, Minus minus ) { // REVIEW jvs: I just pulled this out of a hat. final List inputs = minus.getInputs(); - double dRows = mq.getTupleCount( inputs.get( 0 ) ); + Optional rows = mq.getTupleCount( inputs.get( 0 ) ); + if ( rows.isEmpty() ) { + return Double.MAX_VALUE; + } + + double dRows = 0; for ( int i = 1; i < inputs.size(); i++ ) { - dRows -= 0.5 * mq.getTupleCount( inputs.get( i ) ); + dRows -= 0.5 * mq.getTupleCount( inputs.get( i ) ).orElse( Double.MAX_VALUE ); } if ( dRows < 0 ) { dRows = 0; @@ -586,18 +592,18 @@ public static double getMinusRowCount( AlgMetadataQuery mq, Minus minus ) { */ public static Double getJoinRowCount( AlgMetadataQuery mq, Join join, RexNode condition ) { // Row count estimates of 0 will be rounded up to 1. So, use maxRowCount where the product is very small. - final Double left = mq.getTupleCount( join.getLeft() ); - final Double right = mq.getTupleCount( join.getRight() ); - if ( left == null || right == null ) { + Optional left = mq.getTupleCount( join.getLeft() ); + Optional right = mq.getTupleCount( join.getRight() ); + if ( left.isEmpty() || right.isEmpty() ) { return null; } - if ( left <= 1D || right <= 1D ) { + if ( left.get() <= 1D || right.get() <= 1D ) { Double max = mq.getMaxRowCount( join ); if ( max != null && max <= 1D ) { return max; } } - double product = left * right; + double product = left.get() * right.get(); // TODO: correlation factor return product * mq.getSelectivity( join, condition ); @@ -609,11 +615,11 @@ public static Double getJoinRowCount( AlgMetadataQuery mq, Join join, RexNode co */ public static Double getSemiJoinRowCount( AlgMetadataQuery mq, AlgNode left, AlgNode right, JoinAlgType joinType, RexNode condition ) { // TODO: correlation factor - final Double leftCount = mq.getTupleCount( left ); - if ( leftCount == null ) { + Optional leftCount = mq.getTupleCount( left ); + if ( leftCount.isEmpty() ) { return null; } - return leftCount * RexUtil.getSelectivity( condition ); + return leftCount.get() * RexUtil.getSelectivity( condition ); } @@ -631,7 +637,7 @@ public static double estimateFilteredRows( AlgNode child, RexProgram program, Al public static double estimateFilteredRows( AlgNode child, RexNode condition, AlgMetadataQuery mq ) { - return mq.getTupleCount( child ) * mq.getSelectivity( child, condition ); + return mq.getTupleCount( child ).map( count -> count * mq.getSelectivity( child, condition ) ).orElse( Double.MAX_VALUE ); } @@ -687,21 +693,21 @@ public Double visitIndexRef( RexIndexRef var ) { if ( distinctRowCount == null ) { return null; } else { - return numDistinctVals( distinctRowCount, mq.getTupleCount( alg ) ); + return numDistinctVals( distinctRowCount, mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ) ); } } @Override public Double visitLiteral( RexLiteral literal ) { - return numDistinctVals( 1.0, mq.getTupleCount( alg ) ); + return numDistinctVals( 1.0, mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ) ); } @Override public Double visitCall( RexCall call ) { Double distinctRowCount; - Double rowCount = mq.getTupleCount( alg ); + Double rowCount = mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ); if ( call.isA( Kind.MINUS_PREFIX ) ) { distinctRowCount = cardOfProjExpr( mq, alg, call.getOperands().get( 0 ) ); } else if ( call.isA( ImmutableList.of( Kind.PLUS, Kind.MINUS ) ) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java index 25bb6529f5..7fc7d2af42 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/AlgMetadataQuery.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -43,7 +43,9 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgDistribution; @@ -66,7 +68,7 @@ * *

    *
  1. Add a static method getXyz specification to this class.
  2. - *
  3. Add unit tests to {@code org.polypheny.db.test.RelMetadataTest}.
  4. + *
  5. Add unit tests to {@link AlgMetadataProvider}.
  6. *
  7. Write a new provider class RelMdXyz in this package. Follow the pattern from an existing class such as {@link AlgMdColumnOrigins}, overloading on all of the logical algebra expressions to which the query applies.
  8. *
  9. Add a {@code SOURCE} static member, similar to {@link AlgMdColumnOrigins#SOURCE}.
  10. *
  11. Register the {@code SOURCE} object in {@link DefaultAlgMetadataProvider}.
  12. @@ -227,16 +229,16 @@ public Multimap, AlgNode> getNodeTypes( AlgNode alg ) { * @param alg the algebra expression * @return estimated tuple count, or null if no reliable estimate can be determined */ - public Double getTupleCount( AlgNode alg ) { + public Optional getTupleCount( AlgNode alg ) { for ( ; ; ) { try { Double result = rowCountHandler.getTupleCount( alg, this ); - return validateResult( result ); + return Optional.ofNullable( validateResult( result ) ); } catch ( JaninoRelMetadataProvider.NoHandler e ) { rowCountHandler = revise( e.algClass, TupleCount.DEF ); } catch ( CyclicMetadataException e ) { log.warn( "Cyclic metadata detected while computing row count for {}", alg ); - return null; + return Optional.empty(); } } } @@ -288,6 +290,8 @@ public AlgOptCost getCumulativeCost( AlgNode alg ) { return cumulativeCostHandler.getCumulativeCost( alg, this ); } catch ( JaninoRelMetadataProvider.NoHandler e ) { cumulativeCostHandler = revise( e.algClass, BuiltInMetadata.CumulativeCost.DEF ); + } catch ( CyclicMetadataException e ) { + return alg.getCluster().getPlanner().getCostFactory().makeInfiniteCost(); } } } @@ -305,6 +309,8 @@ public AlgOptCost getNonCumulativeCost( AlgNode alg ) { return nonCumulativeCostHandler.getNonCumulativeCost( alg, this ); } catch ( JaninoRelMetadataProvider.NoHandler e ) { nonCumulativeCostHandler = revise( e.algClass, BuiltInMetadata.NonCumulativeCost.DEF ); + } catch ( CyclicMetadataException e ) { + return alg.getCluster().getPlanner().getCostFactory().makeInfiniteCost(); } } } @@ -404,7 +410,7 @@ public Entity getTableOrigin( AlgNode alg ) { return null; } final Set colOrigins = getColumnOrigins( alg, 0 ); - if ( colOrigins == null || colOrigins.size() == 0 ) { + if ( colOrigins == null || colOrigins.isEmpty() ) { return null; } return colOrigins.iterator().next().getOriginTable(); @@ -807,6 +813,7 @@ private static boolean isNonNegative( Double result, boolean fail ) { } + @Nullable private static Double validateResult( Double result ) { if ( result == null ) { return null; diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/CyclicMetadataException.java b/core/src/main/java/org/polypheny/db/algebra/metadata/CyclicMetadataException.java index efbbaaffed..3bcfa00ffa 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/CyclicMetadataException.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/CyclicMetadataException.java @@ -34,22 +34,20 @@ package org.polypheny.db.algebra.metadata; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; + /** * Exception that indicates that a cycle has been detected while computing metadata. */ -public class CyclicMetadataException extends RuntimeException { - - /** - * Singleton instance. Since this exception is thrown for signaling purposes, rather than on an actual error, re-using a singleton instance saves the effort of constructing an exception instance. - */ - public static final CyclicMetadataException INSTANCE = new CyclicMetadataException(); +public class CyclicMetadataException extends GenericRuntimeException { /** * Creates a CyclicMetadataException. + * Has to be {@code public} used by code generation. */ - private CyclicMetadataException() { - super(); + public CyclicMetadataException( String message ) { + super( message ); } } diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java index 6d5832cd0d..80535de3a3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/JaninoRelMetadataProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -358,9 +358,9 @@ private static MetadataHandler load3( MetadataDef def .append( " if (v == " ) .append( NullSentinel.class.getName() ) .append( ".ACTIVE) {\n" ) - .append( " throw " ) + .append( " throw new " ) .append( CyclicMetadataException.class.getName() ) - .append( ".INSTANCE;\n" ) + .append( "(\"failed during load3.\");\n" ) .append( " }\n" ) .append( " if (v == " ) .append( NullSentinel.class.getName() ) diff --git a/core/src/main/java/org/polypheny/db/algebra/metadata/ReflectiveAlgMetadataProvider.java b/core/src/main/java/org/polypheny/db/algebra/metadata/ReflectiveAlgMetadataProvider.java index 6a4382a5d6..109eb626ce 100644 --- a/core/src/main/java/org/polypheny/db/algebra/metadata/ReflectiveAlgMetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/algebra/metadata/ReflectiveAlgMetadataProvider.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -53,6 +53,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.BuiltInMethod; import org.polypheny.db.util.ImmutableNullableList; @@ -173,14 +174,15 @@ private static AlgMetadataProvider reflectiveSource( final MetadataHandler ta } key1 = List.of( args2 ); } - if ( mq.map.put( key1, NullSentinel.INSTANCE ) != null ) { - throw CyclicMetadataException.INSTANCE; + Object value = mq.map.put( key1, NullSentinel.INSTANCE ); + if ( value != null ) { + throw new CyclicMetadataException( String.format( "Already found key %s with value %s", key1, value ) ); } try { return handlerMethod.invoke( target, args1 ); } catch ( InvocationTargetException | UndeclaredThrowableException e ) { Util.throwIfUnchecked( e.getCause() ); - throw new RuntimeException( e.getCause() ); + throw new GenericRuntimeException( e.getCause() ); } finally { mq.map.remove( key1 ); } diff --git a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java index c544fc47d3..188325ad64 100644 --- a/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java +++ b/core/src/main/java/org/polypheny/db/algebra/operators/OperatorName.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -1699,7 +1699,6 @@ public enum OperatorName { CYPHER_LIKE( LangFunctionOperator.class ), - CYPHER_GRAPH_ONLY_LABEL( LangFunctionOperator.class ), CYPHER_GEO_DISTANCE( LangFunctionOperator.class ), diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgDeclaration.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgDeclaration.java new file mode 100644 index 0000000000..16802f1473 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgDeclaration.java @@ -0,0 +1,541 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import lombok.Builder; +import lombok.Data; +import lombok.Getter; +import lombok.NonNull; +import lombok.Singular; +import org.apache.commons.lang3.function.TriFunction; +import org.polypheny.db.algebra.AlgDistribution; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.constant.SemiJoinType; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.polyalg.arguments.AggArg; +import org.polypheny.db.algebra.polyalg.arguments.AnyArg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.CorrelationArg; +import org.polypheny.db.algebra.polyalg.arguments.DoubleArg; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.FieldArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.LaxAggArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; +import org.polypheny.db.algebra.polyalg.arguments.WindowGroupArg; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.Convention; +import org.polypheny.db.type.PolyType; + + +public class PolyAlgDeclaration { + + public final String opName; + public final ImmutableSet opAliases; + public final DataModel model; // null: common (can be used with nodes of any datamodel) + public final Convention convention; // null: no convention (i.e. not a physical operator) + private final int numInputs; // -1 if arbitrary amount is allowed + public final ImmutableSet opTags; + private final boolean isNotFullyImplemented; // + + public final ImmutableList posParams; + public final ImmutableList kwParams; + private final ImmutableMap paramLookup; + + private final TriFunction, AlgCluster, AlgNode> creator; + + + @Builder + public PolyAlgDeclaration( + @NonNull String opName, + @Singular ImmutableSet opAliases, + DataModel model, + Convention convention, + TriFunction, AlgCluster, AlgNode> creator, + @Singular ImmutableSet opTags, + int numInputs, + boolean isNotFullyImplemented, + @Singular ImmutableList params ) { + this.opName = opName; + this.opAliases = (opAliases != null) ? opAliases : ImmutableSet.of(); + this.model = model; + this.convention = convention; + this.creator = creator; + this.numInputs = numInputs; + this.isNotFullyImplemented = isNotFullyImplemented; + this.opTags = (opTags != null) ? opTags : ImmutableSet.of(); + params = (params != null) ? params : ImmutableList.of(); + + assert PolyAlgDeclaration.hasUniqueNames( params ); + assert PolyAlgDeclaration.hasRequiredTags( params ); + assert convention == null || this.opTags.contains( OperatorTag.PHYSICAL ); + + ImmutableMap.Builder bMap = ImmutableMap.builder(); + ImmutableList.Builder bPos = ImmutableList.builder(); + ImmutableList.Builder bKey = ImmutableList.builder(); + for ( Parameter p : params ) { + assert p.hasCompatibleSimpleType(); + assert p.hasValidDefault(); + + bMap.put( p.name, p ); + bMap.putAll( p.aliases.stream().collect( Collectors.toMap( a -> a, a -> p ) ) ); + + if ( p.isPositional() ) { + bPos.add( p ); + } else { + bKey.add( p ); + } + } + this.posParams = bPos.build(); + this.kwParams = bKey.build(); + this.paramLookup = bMap.build(); + } + + + public AlgNode createNode( PolyAlgArgs args, List children, AlgCluster cluster ) { + return creator.apply( args, children, cluster ); + } + + + /** + * Retrieves the positional parameter at the specified position. + * + * @param i The position of the parameter to retrieve. + * @return The parameter at the specified position, or {@code null} if the position is out of bounds. + */ + public Parameter getPos( int i ) { + if ( i < 0 || i >= posParams.size() ) { + return null; + } + return posParams.get( i ); + } + + + /** + * Retrieves the parameter (positional or keyword) associated with the specified name. + * It is also possible to specify an alias name. + * + * @param name The name of the parameter to retrieve. + * @return The parameter associated with the specified name, or {@code null} if no parameter is found. + */ + public Parameter getParam( String name ) { + return paramLookup.get( name ); + } + + + private static boolean hasUniqueNames( ImmutableList params ) { + Set names = new HashSet<>(); + for ( Parameter p : params ) { + if ( names.contains( p.name ) ) { + return false; + } + names.add( p.name ); + for ( String alias : p.aliases ) { + if ( names.contains( alias ) ) { + return false; + } + names.add( alias ); + } + } + return true; + } + + + private static boolean hasRequiredTags( ImmutableList params ) { + for ( Parameter p : params ) { + if ( p.requiresAlias && !p.tags.contains( ParamTag.ALIAS ) ) { + return false; + } + if ( p.tags.contains( ParamTag.HIDE_TRIVIAL ) && !p.tags.contains( ParamTag.ALIAS ) ) { + return false; + } + } + return true; + } + + + /** + * Checks whether this operator has exactly one positional parameter, which in addition must be multiValued. + * If this is the case, it is safe for the multiValued parameter to omit the brackets, as it is possible to + * infer that any positional argument must belong to the multiValued argument. + * + * @return whether it is safe to unpack the values of the (only) positional argument + */ + public boolean canUnpackValues() { + return posParams.size() == 1 && getPos( 0 ).isMultiValued(); + } + + + public boolean containsParam( Parameter p ) { + return posParams.contains( p ) || kwParams.contains( p ); + } + + + public boolean hasParams() { + return !paramLookup.isEmpty(); + } + + + public boolean supportsNumberOfChildren( int n ) { + return numInputs == -1 || numInputs == n; + } + + + public boolean mightRequireAuxiliaryProject() { + return (numInputs == -1 || numInputs > 1) && + !opTags.contains( OperatorTag.PHYSICAL ) && + (model == DataModel.RELATIONAL || model == null); + } + + + public ObjectNode serialize( ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "name", opName ); + + ArrayNode aliases = mapper.createArrayNode(); + for ( String alias : opAliases ) { + aliases.add( alias ); + } + node.set( "aliases", aliases ); + + if ( model == null ) { + node.put( "model", "COMMON" ); + } else { + node.put( "model", model.name() ); + } + + if ( convention != null ) { + node.put( "convention", convention.getName() ); + } + node.put( "numInputs", numInputs ); + + ArrayNode tags = mapper.createArrayNode(); + for ( OperatorTag tag : opTags ) { + tags.add( tag.name() ); + } + node.set( "tags", tags ); + + ArrayNode posArr = mapper.createArrayNode(); + for ( Parameter p : posParams ) { + posArr.add( p.serialize( mapper ) ); + } + if ( !posArr.isEmpty() && canUnpackValues() ) { + ((ObjectNode) posArr.get( 0 )).put( "canUnpackValues", true ); + } + node.set( "posParams", posArr ); + + ArrayNode kwArr = mapper.createArrayNode(); + for ( Parameter p : kwParams ) { + kwArr.add( p.serialize( mapper ) ); + } + node.set( "kwParams", kwArr ); + + if ( isNotFullyImplemented ) { + node.put( "notRegistered", true ); // disables editing for this node in the UI + } + + return node; + } + + + /** + * Depending on whether a defaultValue is specified, a Parameter can result in two types of corresponding arguments: + *
      + *
    • Positional arguments: + * An argument for a positional Parameter always has to be included in the proper position. It does not have a default value
    • + *
    • Keyword arguments: Arguments are preceded by the name of the parameter. Keyword arguments can be omitted, in which case the defaultValue is used.
    • + *
    + * + * The int multiValued indicates that the argument is wrapped within multiValued number of nested lists. (0: arg, 1: [arg0, arg1, ...], 2: [[arg00, arg01, ...], ...]) + * The boolean requiresAlias can be useful if a key-value pair is expected. This alias corresponds to the "AS" clause + * and should not be confused with parameter name aliases. + */ + @Builder + @Data + public static class Parameter { + + @NonNull + private final String name; + @Singular + private final ImmutableSet aliases; + @Singular + private final ImmutableSet tags; + private final SimpleType simpleType; + @NonNull + private final ParamType type; + private final int multiValued; // 0: not multivalued (default). otherwise: nesting depth of lists + public final boolean requiresAlias; + private final PolyAlgArg defaultValue; // for multiValued parameters the default value should be a ListArg representing the outermost element + + + public boolean isPositional() { + return defaultValue == null; + } + + + public boolean isCompatible( ParamType type ) { + return this.type == type || (isMultiValued() && type == ParamType.LIST); + } + + + /** + * Checks if the parameter has a valid default value. + * This can either be no default value at all, or it is a {@link PolyAlgArg} of a compatible type. + * + * @return true if the default value is valid + */ + public boolean hasValidDefault() { + return isPositional() || isCompatible( defaultValue.getType() ); + } + + + public boolean hasCompatibleSimpleType() { + return this.simpleType == null || + (this.simpleType.isCompatible( type ) && + !(this.isPositional() && this.simpleType == SimpleType.HIDDEN)); + } + + + public boolean isMultiValued() { + return multiValued > 0; + } + + + public String getDefaultAsPolyAlg( AlgNode context, List inputFieldNames ) { + if ( isPositional() ) { + return null; + } + return defaultValue.toPolyAlg( context, inputFieldNames ); + } + + + public ObjectNode serialize( ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "name", name ); + + ArrayNode aliasesArr = mapper.createArrayNode(); + for ( String alias : aliases ) { + aliasesArr.add( alias ); + } + node.set( "aliases", aliasesArr ); + + ArrayNode tagsArr = mapper.createArrayNode(); + for ( ParamTag tag : tags ) { + tagsArr.add( tag.name() ); + } + node.set( "tags", tagsArr ); + + node.put( "type", type.name() ); + if ( simpleType != null ) { + node.put( "simpleType", simpleType.name() ); + } + node.put( "multiValued", multiValued ); + node.put( "requiresAlias", requiresAlias ); + if ( !isPositional() ) { + node.set( "defaultValue", defaultValue.serializeWrapped( null, List.of(), mapper ) ); + node.put( "defaultPolyAlg", defaultValue.toPolyAlg( null, List.of() ) ); + } + node.put( "isEnum", type.isEnum() ); + + return node; + } + + } + + + /** + * When creating a new ParamType, you also need to + *
      + *
    • Create a corresponding {@link PolyAlgArg}
    • + *
    • Write code for parsing the argument in {@link org.polypheny.db.algebra.polyalg.parser.PolyAlgToAlgConverter}
    • + *
    • Add support for your new type to Polypheny-UI
    • + *
    + */ + @Getter + public enum ParamType { + /** + * The default type. Should only be used if no other type fits better. + */ + ANY( AnyArg.class ), + INTEGER( IntArg.class ), + DOUBLE( DoubleArg.class ), + STRING( StringArg.class ), + + /** + * A boolean flag, either "true" or "false". + */ + BOOLEAN( BooleanArg.class ), + + /** + * A serialized RexNode + */ + REX( RexArg.class ), + AGGREGATE( AggArg.class ), + LAX_AGGREGATE( LaxAggArg.class ), + ENTITY( EntityArg.class ), + + // Every new enum also needs to be added to the PolyAlgToAlgConverter like any other new ParamType + JOIN_TYPE_ENUM( EnumArg.class, JoinAlgType.class ), + SEMI_JOIN_TYPE_ENUM( EnumArg.class, SemiJoinType.class ), + MODIFY_OP_ENUM( EnumArg.class, Modify.Operation.class ), + DISTRIBUTION_TYPE_ENUM( EnumArg.class, AlgDistribution.Type.class ), + DATAMODEL_ENUM( EnumArg.class, DataModel.class ), + POLY_TYPE_ENUM( EnumArg.class, PolyType.class ), + + /** + * A specific field (= column in the relational data model). + */ + FIELD( FieldArg.class ), + + /** + * The type of ListArg itself (should only return this value as type if it has no elements). + */ + LIST( ListArg.class ), + + /** + * + */ + COLLATION( CollationArg.class ), + + /** + * Correlation ID + */ + CORR_ID( CorrelationArg.class ), + + /** + * Window.Group + */ + WINDOW_GROUP( WindowGroupArg.class ); + + private final Class argClass; + private final Class> enumClass; + + + ParamType( Class argClass ) { + this( argClass, null ); + } + + + ParamType( Class argClass, Class> enumClass ) { + this.argClass = argClass; + this.enumClass = enumClass; + } + + + public boolean isEnum() { + return this.argClass == EnumArg.class; + } + + + public static List getEnumParamTypes() { + return Arrays.stream( ParamType.values() ).filter( ParamType::isEnum ).toList(); + } + + } + + + public enum OperatorTag { + LOGICAL, + PHYSICAL, + ALLOCATION, + + /** + * Operator should be hidden in simple mode. + */ + ADVANCED + } + + + public enum ParamTag { + + /** + * Parameter allows for a (possibly optional) alias (not to be confused with a parameter name alias) + */ + ALIAS, + + /** + * Indicates that negative values are not permitted (typically together with IntArg) + */ + NON_NEGATIVE, + + /** + * For projects and some other operators it is useful to let the user hide any trivial arguments in the UI + */ + HIDE_TRIVIAL, + + /** + * Indicates that this parameter should be treated as a PolyNode + */ + POLY_NODE, + + /** + * Indicates that this parameter should be treated as a PolyPath + */ + POLY_PATH + } + + + public enum SimpleType { + HIDDEN, // do not show parameter in simple mode and use default value instead + REX_PREDICATE( ParamType.REX ), + REX_UINT( ParamType.REX ), // integer >= 0 + SIMPLE_COLLATION( ParamType.COLLATION ), + SIMPLE_AGG( ParamType.AGGREGATE ); + + private final Set compatible; // empty: compatible with all + + + SimpleType() { + this( Set.of() ); + } + + + SimpleType( ParamType compatible ) { + this( Set.of( compatible ) ); + } + + + SimpleType( Set compatible ) { + this.compatible = compatible; + } + + + public boolean isCompatible( ParamType type ) { + return compatible.isEmpty() || compatible.contains( type ); + } + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgMetadata.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgMetadata.java new file mode 100644 index 0000000000..cdd5bcf076 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgMetadata.java @@ -0,0 +1,224 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.HashMap; +import java.util.Map; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.plan.AlgOptCost; + +public class PolyAlgMetadata { + + private final GlobalStats globalStats; + private final ObjectMapper mapper; + + private final ArrayNode table; // simple key-value pairs + private final ArrayNode badges; // can be used for indicating max values etc. + private final ObjectNode outConnection; // width < 0: use default width, otherwise between 0 (min) and 1 (max) + + + public PolyAlgMetadata( ObjectMapper mapper, GlobalStats globalStats ) { + this.mapper = mapper; + this.globalStats = globalStats; + this.table = mapper.createArrayNode(); + this.badges = mapper.createArrayNode(); + this.outConnection = mapper.createObjectNode(); + } + + + public PolyAlgMetadata addCosts( AlgOptCost nonCumulative, AlgOptCost cumulative, double tupleCount ) { + addMetadata( "tupleCount", "Tuple Count", tupleCount, false ); + addMetadata( "tuplesCost", "Processed Tuples", nonCumulative.getRows(), cumulative.getRows(), false, "Most Tuples Processed" ); + addMetadata( "cpuCost", "CPU Cost", nonCumulative.getCpu(), cumulative.getCpu(), false, "Most CPU" ); + setOutConnection( "tupleCount", false ); + return this; + } + + + public PolyAlgMetadata addMetadata( String key, String displayName, double value, boolean isCalculated ) { + return addMetadata( key, displayName, value, -1, isCalculated, null ); + } + + + /** + * Adds the given value to the metadata table. + * + * @param key unique key to identify the metadata + * @param displayName name that is displayed instead of the key + * @param value non-rounded value (non-cumulative) + * @param cumulativeValue cumulative value or -1 if no such value exists + * @param isCalculated whether the value is + * @param displayMaxName display name of the badge to be shown if this is the highest global value or null if no badge should be shown + * @return the PolyAlgMetadata for fluent chaining + */ + public PolyAlgMetadata addMetadata( String key, String displayName, double value, double cumulativeValue, boolean isCalculated, String displayMaxName ) { + ObjectNode row = mapper.createObjectNode(); + row.put( "key", key ); + row.put( "displayName", displayName ); + row.put( "value", value ); + if ( cumulativeValue >= 0 ) { + row.put( "cumulativeValue", cumulativeValue ); + } + row.put( "calculated", isCalculated ); + table.add( row ); + + if ( displayMaxName != null && globalStats.isMax( key, value ) ) { + addBadge( displayMaxName, key, BadgeLevel.DANGER ); + } + return this; + } + + + /** + * Sets the line width proportional to the value of this key divided by the global maximum. + * + * @param key the key of the stat to be used + * @return the PolyAlgMetadata for fluent chaining + */ + public PolyAlgMetadata setOutConnection( String key, boolean useCumulative ) { + double max = useCumulative ? globalStats.getCumulativeMax( key ) : globalStats.getMax( key ); + ObjectNode row = findTableEntry( key ); + if ( row != null && max > 1 && (!useCumulative || row.has( "cumulativeValue" )) ) { + double v = useCumulative ? row.get( "cumulativeValue" ).asDouble() : row.get( "value" ).asDouble(); + outConnection.put( "width", v / max ); + outConnection.put( "forKey", key ); + } + return this; + } + + + public PolyAlgMetadata addBadge( String content, String forKey, BadgeLevel level ) { + ObjectNode badge = mapper.createObjectNode(); + badge.put( "content", content ); + badge.put( "forKey", forKey ); + badge.put( "level", level.name() ); + badges.add( badge ); + return this; + } + + + public ObjectNode serialize() { + ObjectNode node = mapper.createObjectNode(); + node.put( "isAuxiliary", false ); + node.set( "table", table ); + node.set( "badges", badges ); + + if ( outConnection.has( "width" ) ) { + node.set( "outConnection", outConnection ); + } + return node; + } + + + public static ObjectNode getMetadataForAuxiliaryNode( ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "isAuxiliary", true ); + return node; + } + + + private ObjectNode findTableEntry( String key ) { + for ( JsonNode node : table ) { + if ( node.isObject() ) { + ObjectNode row = (ObjectNode) node; + if ( row.get( "key" ).asText().equals( key ) ) { + return row; + } + } + } + return null; + } + + + public static class GlobalStats { + + private final static double EPS = 0.0000001; + private final Map maxValues = new HashMap<>(); + private final Map maxCumulativeValues = new HashMap<>(); + + + private GlobalStats() { + } + + + public static GlobalStats computeGlobalStats( AlgNode root ) { + GlobalStats stats = new GlobalStats(); + stats.updateGlobalStats( root ); + stats.setMaxCumulativeCosts( root.getCluster().getMetadataQuery().getCumulativeCost( root ) ); + return stats; + } + + + private void updateGlobalStats( AlgNode node ) { + for ( AlgNode child : node.getInputs() ) { + updateGlobalStats( child ); + } + AlgMetadataQuery mq = node.getCluster().getMetadataQuery(); + updateMaxCosts( mq.getNonCumulativeCost( node ), mq.getTupleCount( node ).orElse( -1D ) ); + } + + + public void updateMaxCosts( AlgOptCost nonCumulative, Double tupleCount ) { + update( "tupleCount", tupleCount ); + update( "tuplesCost", nonCumulative.getRows() ); + update( "cpuCost", nonCumulative.getCpu() ); + } + + + public void setMaxCumulativeCosts( AlgOptCost cumulative ) { + maxCumulativeValues.put( "tuplesCost", cumulative.getRows() ); + maxCumulativeValues.put( "cpuCost", cumulative.getCpu() ); + } + + + public void update( String key, Double value ) { + double curr = maxValues.getOrDefault( key, 0d ); + if ( value != null && value > curr ) { + maxValues.put( key, value ); + } + } + + + public boolean isMax( String key, double value ) { + return value > 0 && Math.abs( maxValues.getOrDefault( key, 0d ) - value ) < EPS; + } + + + public double getMax( String key ) { + return maxValues.getOrDefault( key, 0d ); + } + + + public double getCumulativeMax( String key ) { + return maxCumulativeValues.getOrDefault( key, 0d ); + } + + } + + + public enum BadgeLevel { + INFO, + WARN, + DANGER + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgRegistry.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgRegistry.java new file mode 100644 index 0000000000..ab433c0dd2 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgRegistry.java @@ -0,0 +1,636 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.enumerable.EnumerableAggregate; +import org.polypheny.db.algebra.enumerable.EnumerableCalc; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; +import org.polypheny.db.algebra.enumerable.EnumerableInterpreter; +import org.polypheny.db.algebra.enumerable.EnumerableIntersect; +import org.polypheny.db.algebra.enumerable.EnumerableJoin; +import org.polypheny.db.algebra.enumerable.EnumerableLimit; +import org.polypheny.db.algebra.enumerable.EnumerableMergeJoin; +import org.polypheny.db.algebra.enumerable.EnumerableMinus; +import org.polypheny.db.algebra.enumerable.EnumerableProject; +import org.polypheny.db.algebra.enumerable.EnumerableSemiJoin; +import org.polypheny.db.algebra.enumerable.EnumerableSort; +import org.polypheny.db.algebra.enumerable.EnumerableTransformer; +import org.polypheny.db.algebra.enumerable.EnumerableUnion; +import org.polypheny.db.algebra.enumerable.EnumerableValues; +import org.polypheny.db.algebra.enumerable.common.EnumerableCollect; +import org.polypheny.db.algebra.enumerable.common.EnumerableContextSwitcher; +import org.polypheny.db.algebra.enumerable.common.EnumerableModifyCollect; +import org.polypheny.db.algebra.enumerable.lpg.EnumerableLpgMatch; +import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.algebra.logical.common.LogicalBatchIterator; +import org.polypheny.db.algebra.logical.common.LogicalTransformer; +import org.polypheny.db.algebra.logical.document.LogicalDocumentAggregate; +import org.polypheny.db.algebra.logical.document.LogicalDocumentFilter; +import org.polypheny.db.algebra.logical.document.LogicalDocumentModify; +import org.polypheny.db.algebra.logical.document.LogicalDocumentProject; +import org.polypheny.db.algebra.logical.document.LogicalDocumentScan; +import org.polypheny.db.algebra.logical.document.LogicalDocumentSort; +import org.polypheny.db.algebra.logical.document.LogicalDocumentUnwind; +import org.polypheny.db.algebra.logical.document.LogicalDocumentValues; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgAggregate; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgFilter; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgMatch; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgModify; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgProject; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgScan; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgSort; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgTransformer; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgUnion; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgUnwind; +import org.polypheny.db.algebra.logical.lpg.LogicalLpgValues; +import org.polypheny.db.algebra.logical.relational.LogicalCalc; +import org.polypheny.db.algebra.logical.relational.LogicalModifyCollect; +import org.polypheny.db.algebra.logical.relational.LogicalRelAggregate; +import org.polypheny.db.algebra.logical.relational.LogicalRelCorrelate; +import org.polypheny.db.algebra.logical.relational.LogicalRelExchange; +import org.polypheny.db.algebra.logical.relational.LogicalRelFilter; +import org.polypheny.db.algebra.logical.relational.LogicalRelIntersect; +import org.polypheny.db.algebra.logical.relational.LogicalRelJoin; +import org.polypheny.db.algebra.logical.relational.LogicalRelMinus; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelSort; +import org.polypheny.db.algebra.logical.relational.LogicalRelUnion; +import org.polypheny.db.algebra.logical.relational.LogicalRelValues; +import org.polypheny.db.algebra.logical.relational.LogicalRelViewScan; +import org.polypheny.db.algebra.logical.relational.LogicalSortExchange; +import org.polypheny.db.algebra.logical.relational.LogicalWindow; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.OperatorTag; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamTag; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.Parameter; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.SimpleType; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.interpreter.BindableConvention; +import org.polypheny.db.interpreter.Bindables.BindableScan; +import org.polypheny.db.plan.Convention; + +@Slf4j +public class PolyAlgRegistry { + + private static final Map, PolyAlgDeclaration> declarations = new HashMap<>(); + private static final Map> classes = new HashMap<>(); + private static ObjectNode serialized = null; + + + static { + populateDeclarationsMap(); + populateClassesMap(); + } + + + private static void populateDeclarationsMap() { + // logical operators can also be used as allocation operators + ImmutableList logAllTags = ImmutableList.of( OperatorTag.LOGICAL, OperatorTag.ALLOCATION ); + ImmutableList logAllProTags = ImmutableList.of( OperatorTag.LOGICAL, OperatorTag.ALLOCATION, OperatorTag.ADVANCED ); + + // RELATIONAL + declarations.put( LogicalRelProject.class, PolyAlgDeclaration.builder() + .creator( LogicalRelProject::create ).model( DataModel.RELATIONAL ) + .opName( "REL_PROJECT" ).opAliases( List.of( "PROJECT", "P", "REL_PROJECT#", "PROJECT#" ) ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "projects" ).tags( List.of( ParamTag.ALIAS, ParamTag.HIDE_TRIVIAL ) ).multiValued( 1 ).type( ParamType.REX ).build() ) + .build() ); + declarations.put( LogicalRelScan.class, PolyAlgDeclaration.builder() + .creator( LogicalRelScan::create ).model( DataModel.RELATIONAL ) + .opName( "REL_SCAN" ).opAlias( "SCAN" ).numInputs( 0 ).opTags( logAllTags ) + .param( Parameter.builder().name( "entity" ).alias( "table" ).type( ParamType.ENTITY ).build() ) + .build() ); + declarations.put( LogicalRelViewScan.class, PolyAlgDeclaration.builder() + .creator( LogicalRelViewScan::create ).model( DataModel.RELATIONAL ) + .opName( "REL_VIEW_SCAN" ).opAlias( "VIEW_SCAN" ).numInputs( 0 ).opTags( logAllTags ) + .param( Parameter.builder().name( "entity" ).alias( "table" ).type( ParamType.ENTITY ).build() ) + .build() ); + declarations.put( LogicalRelFilter.class, PolyAlgDeclaration.builder() + .creator( LogicalRelFilter::create ).model( DataModel.RELATIONAL ) + .opName( "REL_FILTER" ).opAlias( "FILTER" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "condition" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .param( Parameter.builder().name( "variables" ).type( ParamType.CORR_ID ).simpleType( SimpleType.HIDDEN ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalRelAggregate.class, PolyAlgDeclaration.builder() + .creator( LogicalRelAggregate::create ).model( DataModel.RELATIONAL ) + .opName( "REL_AGGREGATE" ).opAliases( List.of( "AGGREGATE", "AGG" ) ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "group" ).type( ParamType.FIELD ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) // select count(*) has no group + .param( Parameter.builder().name( "groups" ).type( ParamType.FIELD ).simpleType( SimpleType.HIDDEN ).multiValued( 2 ).defaultValue( ListArg.NESTED_EMPTY ).build() ) + .param( Parameter.builder().name( "aggregates" ).alias( "aggs" ).type( ParamType.AGGREGATE ).simpleType( SimpleType.SIMPLE_AGG ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalRelMinus.class, PolyAlgDeclaration.builder() + .creator( LogicalRelMinus::create ).model( DataModel.RELATIONAL ) + .opName( "REL_MINUS" ).opAlias( "MINUS" ).numInputs( -1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "all" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalRelUnion.class, PolyAlgDeclaration.builder() + .creator( LogicalRelUnion::create ).model( DataModel.RELATIONAL ) + .opName( "REL_UNION" ).opAlias( "UNION" ).numInputs( -1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "all" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalRelIntersect.class, PolyAlgDeclaration.builder() + .creator( LogicalRelIntersect::create ).model( DataModel.RELATIONAL ) + .opName( "REL_INTERSECT" ).opAlias( "INTERSECT" ).numInputs( -1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "all" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalModifyCollect.class, PolyAlgDeclaration.builder() + .creator( LogicalModifyCollect::create ).model( DataModel.RELATIONAL ) + .opName( "REL_MODIFY_COLLECT" ).opAlias( "MODIFY_COLLECT" ).numInputs( -1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "all" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalRelSort.class, PolyAlgDeclaration.builder() + .creator( LogicalRelSort::create ).model( DataModel.RELATIONAL ) + .opName( "REL_SORT" ).opAlias( "SORT" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "order" ).aliases( List.of( "collation", "key", "sort" ) ).type( ParamType.COLLATION ).simpleType( SimpleType.SIMPLE_COLLATION ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "limit" ).alias( "fetch" ).type( ParamType.REX ).simpleType( SimpleType.REX_UINT ).defaultValue( RexArg.NULL ).build() ) + .param( Parameter.builder().name( "offset" ).type( ParamType.REX ).simpleType( SimpleType.HIDDEN ).defaultValue( RexArg.NULL ).build() ) + .build() ); + declarations.put( LogicalRelJoin.class, PolyAlgDeclaration.builder() + .creator( LogicalRelJoin::create ).model( DataModel.RELATIONAL ) + .opName( "REL_JOIN" ).opAlias( "JOIN" ).numInputs( 2 ).opTags( logAllTags ) + .param( Parameter.builder().name( "condition" ).alias( "on" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .param( Parameter.builder().name( "type" ).type( ParamType.JOIN_TYPE_ENUM ).defaultValue( new EnumArg<>( JoinAlgType.INNER, ParamType.JOIN_TYPE_ENUM ) ).build() ) + .param( Parameter.builder().name( "variables" ).type( ParamType.CORR_ID ).simpleType( SimpleType.HIDDEN ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "semiJoinDone" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalCalc.class, PolyAlgDeclaration.builder() + .model( DataModel.RELATIONAL ) + .opName( "REL_CALC" ).opAlias( "CALC" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "exprs" ).type( ParamType.REX ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "projects" ).tag( ParamTag.ALIAS ).type( ParamType.REX ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "condition" ).type( ParamType.REX ).defaultValue( RexArg.NULL ).build() ) + .build() ); + declarations.put( LogicalRelModify.class, PolyAlgDeclaration.builder() + .creator( LogicalRelModify::create ).model( DataModel.RELATIONAL ) + .opName( "REL_MODIFY" ).opAlias( "MODIFY" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "table" ).alias( "target" ).type( ParamType.ENTITY ).build() ) + .param( Parameter.builder().name( "operation" ).type( ParamType.MODIFY_OP_ENUM ).build() ) + .param( Parameter.builder().name( "targets" ).alias( "columns" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "sources" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "flattened" ).type( ParamType.BOOLEAN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalRelValues.class, PolyAlgDeclaration.builder() + .creator( LogicalRelValues::create ).model( DataModel.RELATIONAL ) + .opName( "REL_VALUES" ).opAlias( "VALUES" ).numInputs( 0 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "names" ).multiValued( 1 ).type( ParamType.STRING ).build() ) + .param( Parameter.builder().name( "tuples" ).multiValued( 2 ).type( ParamType.REX ).build() ) + .build() ); + declarations.put( LogicalRelCorrelate.class, PolyAlgDeclaration.builder() + .creator( LogicalRelCorrelate::create ).model( DataModel.RELATIONAL ) + .opName( "REL_CORRELATE" ).opAlias( "CORRELATE" ).numInputs( 2 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "id" ).type( ParamType.CORR_ID ).build() ) + .param( Parameter.builder().name( "columns" ).type( ParamType.REX ).build() ) + .param( Parameter.builder().name( "joinType" ).alias( "type" ).type( ParamType.SEMI_JOIN_TYPE_ENUM ).build() ) + .build() ); + declarations.put( LogicalRelExchange.class, PolyAlgDeclaration.builder() + .creator( LogicalRelExchange::create ).model( DataModel.RELATIONAL ) + .opName( "REL_EXCHANGE" ).opAlias( "EXCHANGE" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "distributionType" ).alias( "type" ).type( ParamType.DISTRIBUTION_TYPE_ENUM ).build() ) + .param( Parameter.builder().name( "numbers" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalSortExchange.class, PolyAlgDeclaration.builder() + .creator( LogicalSortExchange::create ).model( DataModel.RELATIONAL ) + .opName( "REL_SORT_EXCHANGE" ).opAlias( "SORT_EXCHANGE" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "order" ).aliases( List.of( "collation", "sort", "key" ) ).multiValued( 1 ).type( ParamType.COLLATION ).simpleType( SimpleType.SIMPLE_COLLATION ).build() ) + .param( Parameter.builder().name( "distributionType" ).alias( "type" ).type( ParamType.DISTRIBUTION_TYPE_ENUM ).build() ) + .param( Parameter.builder().name( "numbers" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalWindow.class, PolyAlgDeclaration.builder() + .creator( LogicalWindow::create ).model( DataModel.RELATIONAL ) + .opName( "REL_WINDOW" ).opAlias( "WINDOW" ).numInputs( 1 ).opTags( logAllProTags ).isNotFullyImplemented( true ) + .param( Parameter.builder().name( "constants" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "groups" ).multiValued( 1 ).type( ParamType.WINDOW_GROUP ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + + // DOCUMENT + declarations.put( LogicalDocumentScan.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentScan::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_SCAN" ).numInputs( 0 ).opTags( logAllTags ) + .param( Parameter.builder().name( "entity" ).type( ParamType.ENTITY ).build() ) + .build() ); + declarations.put( LogicalDocumentFilter.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentFilter::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_FILTER" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "condition" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .build() ); + declarations.put( LogicalDocumentSort.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentSort::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_SORT" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "order" ).aliases( List.of( "collation", "key", "sort" ) ).type( ParamType.COLLATION ).simpleType( SimpleType.SIMPLE_COLLATION ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "targets" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "limit" ).alias( "fetch" ).type( ParamType.REX ).simpleType( SimpleType.REX_UINT ).defaultValue( RexArg.NULL ).build() ) + .param( Parameter.builder().name( "offset" ).type( ParamType.REX ).simpleType( SimpleType.HIDDEN ).defaultValue( RexArg.NULL ).build() ) + .build() ); + declarations.put( LogicalDocumentUnwind.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentUnwind::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_UNWIND" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "path" ).type( ParamType.STRING ).build() ) + .build() ); + declarations.put( LogicalDocumentProject.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentProject::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_PROJECT" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "includes" ).tag( ParamTag.ALIAS ).requiresAlias( true ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "excludes" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalDocumentAggregate.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentAggregate::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_AGGREGATE" ).opAlias( "DOC_AGG" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "group" ).type( ParamType.REX ).defaultValue( RexArg.NULL ).build() ) + .param( Parameter.builder().name( "aggregates" ).alias( "aggs" ).multiValued( 1 ).type( ParamType.LAX_AGGREGATE ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalDocumentModify.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentModify::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_MODIFY" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "entity" ).type( ParamType.ENTITY ).build() ) + .param( Parameter.builder().name( "operation" ).type( ParamType.MODIFY_OP_ENUM ).build() ) + .param( Parameter.builder().name( "updates" ).tag( ParamTag.ALIAS ).requiresAlias( true ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "removes" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "renames" ).tag( ParamTag.ALIAS ).requiresAlias( true ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalDocumentValues.class, PolyAlgDeclaration.builder() + .creator( LogicalDocumentValues::create ).model( DataModel.DOCUMENT ) + .opName( "DOC_VALUES" ).numInputs( 0 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "docs" ).alias( "documents" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "dynamic" ).multiValued( 1 ).type( ParamType.REX ).simpleType( SimpleType.HIDDEN ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + + // GRAPH + declarations.put( LogicalLpgScan.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgScan::create ).model( DataModel.GRAPH ) + .opName( "LPG_SCAN" ).numInputs( 0 ).opTags( logAllTags ) + .param( Parameter.builder().name( "entity" ).type( ParamType.ENTITY ).build() ) + .build() ); + declarations.put( LogicalLpgMatch.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgMatch::create ).model( DataModel.GRAPH ) + .opName( "LPG_MATCH" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "matches" ).tag( ParamTag.ALIAS ).multiValued( 1 ).type( ParamType.REX ).build() ) + .build() ); + declarations.put( LogicalLpgFilter.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgFilter::create ).model( DataModel.GRAPH ) + .opName( "LPG_FILTER" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "condition" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .build() ); + declarations.put( LogicalLpgProject.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgProject::create ).model( DataModel.GRAPH ) + .opName( "LPG_PROJECT" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "projects" ).tag( ParamTag.ALIAS ).multiValued( 1 ).type( ParamType.REX ).build() ) + .build() ); + declarations.put( LogicalLpgSort.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgSort::create ).model( DataModel.GRAPH ) + .opName( "LPG_SORT" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "order" ).aliases( List.of( "collation", "key", "sort" ) ).type( ParamType.COLLATION ).simpleType( SimpleType.SIMPLE_COLLATION ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "limit" ).alias( "fetch" ).tag( ParamTag.NON_NEGATIVE ).type( ParamType.INTEGER ).defaultValue( IntArg.NULL ).build() ) + .param( Parameter.builder().name( "skip" ).alias( "offset" ).tag( ParamTag.NON_NEGATIVE ).type( ParamType.INTEGER ).simpleType( SimpleType.HIDDEN ).defaultValue( IntArg.NULL ).build() ) + .build() ); + declarations.put( LogicalLpgUnion.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgUnion::create ).model( DataModel.GRAPH ) + .opName( "LPG_UNION" ).numInputs( -1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "all" ).type( ParamType.BOOLEAN ).simpleType( SimpleType.HIDDEN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( LogicalLpgUnwind.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgUnwind::create ).model( DataModel.GRAPH ) + .opName( "LPG_UNWIND" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "index" ).tag( ParamTag.NON_NEGATIVE ).type( ParamType.INTEGER ).build() ) + .param( Parameter.builder().name( "alias" ).type( ParamType.STRING ).defaultValue( StringArg.NULL ).build() ) + .build() ); + declarations.put( LogicalLpgAggregate.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgAggregate::create ).model( DataModel.GRAPH ) + .opName( "LPG_AGGREGATE" ).opAlias( "LPG_AGG" ).numInputs( 1 ).opTags( logAllTags ) + .param( Parameter.builder().name( "groups" ).alias( "group" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "aggregates" ).alias( "aggs" ).multiValued( 1 ).type( ParamType.LAX_AGGREGATE ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalLpgModify.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgModify::create ).model( DataModel.GRAPH ) + .opName( "LPG_MODIFY" ).numInputs( 1 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "entity" ).type( ParamType.ENTITY ).build() ) + .param( Parameter.builder().name( "operation" ).type( ParamType.MODIFY_OP_ENUM ).build() ) + .param( Parameter.builder().name( "updates" ).alias( "operations" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "ids" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalLpgTransformer.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgTransformer::create ).model( DataModel.GRAPH ) + .opName( "LPG_TRANSFORMER" ).numInputs( -1 ).opTags( ImmutableList.of( OperatorTag.ALLOCATION, OperatorTag.ADVANCED ) ) + .param( Parameter.builder().name( "operation" ).type( ParamType.MODIFY_OP_ENUM ).build() ) + .param( Parameter.builder().name( "order" ).multiValued( 1 ).type( ParamType.POLY_TYPE_ENUM ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( LogicalLpgValues.class, PolyAlgDeclaration.builder() + .creator( LogicalLpgValues::create ).model( DataModel.GRAPH ) + .opName( "LPG_VALUES" ).numInputs( 0 ).opTags( logAllProTags ) + .param( Parameter.builder().name( "nodes" ).multiValued( 1 ).type( ParamType.REX ).tag( ParamTag.POLY_NODE ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "edges" ).multiValued( 1 ).type( ParamType.REX ).tag( ParamTag.POLY_PATH ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "values" ).multiValued( 2 ).type( ParamType.REX ).defaultValue( ListArg.NESTED_EMPTY ).build() ) + .build() ); + + // Common + declarations.put( LogicalBatchIterator.class, PolyAlgDeclaration.builder() + .creator( LogicalBatchIterator::create ).model( null ) + .opName( "BATCH_ITERATOR" ).opAlias( "BATCH" ).numInputs( 1 ).opTags( logAllProTags ) + .build() ); + declarations.put( LogicalTransformer.class, PolyAlgDeclaration.builder() + .creator( LogicalTransformer::create ).model( null ) + .opName( "TRANSFORMER" ).numInputs( -1 ).opTags( ImmutableList.of( OperatorTag.ALLOCATION, OperatorTag.ADVANCED ) ) + .param( Parameter.builder().name( "out" ).alias( "outModel" ).type( ParamType.DATAMODEL_ENUM ).build() ) + .param( Parameter.builder().name( "names" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + + // Physical + addEnumerableDeclarations(); + addBindableDeclarations(); + } + + + private static void addEnumerableDeclarations() { + ImmutableList physTags = ImmutableList.of( OperatorTag.PHYSICAL, OperatorTag.ADVANCED ); + Convention c = EnumerableConvention.INSTANCE; + + declarations.put( EnumerableProject.class, PolyAlgDeclaration.builder() + .creator( EnumerableProject::create ).model( null ) + .opName( "E_PROJECT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( getParams( LogicalRelProject.class ) ) + .build() ); + declarations.put( EnumerableInterpreter.class, PolyAlgDeclaration.builder() + .creator( EnumerableInterpreter::create ).model( null ) + .opName( "E_INTERPRETER" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .param( Parameter.builder().name( "factor" ).tag( ParamTag.NON_NEGATIVE ).type( ParamType.DOUBLE ).build() ) + .build() ); + declarations.put( EnumerableAggregate.class, PolyAlgDeclaration.builder() + .creator( EnumerableAggregate::create ).model( null ) + .opName( "E_AGGREGATE" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( getParams( LogicalRelAggregate.class ) ) + .build() ); + declarations.put( EnumerableCalc.class, PolyAlgDeclaration.builder() + .creator( EnumerableCalc::create ).model( null ) + .opName( "E_CALC" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( getParams( LogicalCalc.class ) ) + .build() ); + declarations.put( EnumerableJoin.class, PolyAlgDeclaration.builder() + .creator( EnumerableJoin::create ).model( null ) + .opName( "E_JOIN" ).numInputs( 2 ).opTags( physTags ) + .param( Parameter.builder().name( "condition" ).alias( "on" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .param( Parameter.builder().name( "type" ).type( ParamType.JOIN_TYPE_ENUM ).defaultValue( new EnumArg<>( JoinAlgType.INNER, ParamType.JOIN_TYPE_ENUM ) ).build() ) + .param( Parameter.builder().name( "variables" ).type( ParamType.CORR_ID ).simpleType( SimpleType.HIDDEN ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "leftKeys" ).multiValued( 1 ).type( ParamType.INTEGER ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "rightKeys" ).multiValued( 1 ).type( ParamType.INTEGER ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( EnumerableMergeJoin.class, PolyAlgDeclaration.builder() + .creator( EnumerableMergeJoin::create ).model( null ) + .opName( "E_MERGE_JOIN" ).numInputs( 2 ).opTags( physTags ) + .params( getParams( EnumerableJoin.class ) ) + .build() ); + declarations.put( EnumerableSemiJoin.class, PolyAlgDeclaration.builder() + .creator( EnumerableSemiJoin::create ).model( null ) + .opName( "E_SEMI_JOIN" ).numInputs( 2 ).opTags( physTags ) + .param( Parameter.builder().name( "condition" ).alias( "on" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .param( Parameter.builder().name( "leftKeys" ).multiValued( 1 ).type( ParamType.INTEGER ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "rightKeys" ).multiValued( 1 ).type( ParamType.INTEGER ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + declarations.put( EnumerableSort.class, PolyAlgDeclaration.builder() + .creator( EnumerableSort::create ).model( null ) + .opName( "E_SORT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( getParams( LogicalRelSort.class ) ) + .build() ); + declarations.put( EnumerableUnion.class, PolyAlgDeclaration.builder() + .creator( EnumerableUnion::create ).model( null ) + .opName( "E_UNION" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( getParams( LogicalRelUnion.class ) ) + .build() ); + declarations.put( EnumerableIntersect.class, PolyAlgDeclaration.builder() + .creator( EnumerableIntersect::create ).model( null ) + .opName( "E_INTERSECT" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( getParams( LogicalRelIntersect.class ) ) + .build() ); + declarations.put( EnumerableMinus.class, PolyAlgDeclaration.builder() + .creator( EnumerableMinus::create ).model( null ) + .opName( "E_MINUS" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( getParams( LogicalRelMinus.class ) ) + .build() ); + declarations.put( EnumerableValues.class, PolyAlgDeclaration.builder() + .creator( EnumerableValues::create ).model( DataModel.RELATIONAL ) + .opName( "E_VALUES" ).convention( c ).numInputs( 0 ).opTags( physTags ) + .params( getParams( LogicalRelValues.class ) ) + .build() ); + declarations.put( EnumerableLimit.class, PolyAlgDeclaration.builder() + .creator( EnumerableLimit::create ).model( null ) + .opName( "E_LIMIT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .param( Parameter.builder().name( "limit" ).alias( "fetch" ).type( ParamType.REX ).simpleType( SimpleType.REX_UINT ).defaultValue( RexArg.NULL ).build() ) + .param( Parameter.builder().name( "offset" ).type( ParamType.REX ).simpleType( SimpleType.HIDDEN ).defaultValue( RexArg.NULL ).build() ) + .build() ); + declarations.put( EnumerableTransformer.class, PolyAlgDeclaration.builder() + .creator( EnumerableTransformer::create ).model( null ) + .opName( "E_TRANSFORMER" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .param( Parameter.builder().name( "out" ).alias( "outModel" ).type( ParamType.DATAMODEL_ENUM ).build() ) + .param( Parameter.builder().name( "names" ).multiValued( 1 ).type( ParamType.STRING ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "isCrossModel" ).type( ParamType.BOOLEAN ).defaultValue( BooleanArg.FALSE ).build() ) + .build() ); + declarations.put( EnumerableLpgMatch.class, PolyAlgDeclaration.builder() + .creator( EnumerableLpgMatch::create ).model( DataModel.GRAPH ) + .opName( "E_LPG_MATCH" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( getParams( LogicalLpgMatch.class ) ) + .build() ); + declarations.put( EnumerableCollect.class, PolyAlgDeclaration.builder() + .creator( EnumerableCollect::create ).model( null ) + .opName( "E_COLLECT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .param( Parameter.builder().name( "field" ).type( ParamType.STRING ).build() ) + .build() ); + declarations.put( EnumerableModifyCollect.class, PolyAlgDeclaration.builder() + .creator( EnumerableModifyCollect::create ).model( null ) + .opName( "E_MODIFY_COLLECT" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( getParams( LogicalModifyCollect.class ) ) + .build() ); + declarations.put( EnumerableContextSwitcher.class, PolyAlgDeclaration.builder() + .creator( EnumerableContextSwitcher::create ).model( null ) + .opName( "E_CONTEXT_SWITCHER" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .build() ); + } + + + private static void addBindableDeclarations() { + ImmutableList physTags = ImmutableList.of( OperatorTag.PHYSICAL, OperatorTag.ADVANCED ); + Convention c = BindableConvention.INSTANCE; + + declarations.put( BindableScan.class, PolyAlgDeclaration.builder() + .creator( BindableScan::create ).model( DataModel.RELATIONAL ) + .opName( "BINDABLE_SCAN" ).convention( c ).numInputs( 0 ).opTags( physTags ) + .param( Parameter.builder().name( "entity" ).type( ParamType.ENTITY ).build() ) + .param( Parameter.builder().name( "filters" ).multiValued( 1 ).type( ParamType.REX ).defaultValue( ListArg.EMPTY ).build() ) + .param( Parameter.builder().name( "projects" ).multiValued( 1 ).type( ParamType.INTEGER ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + } + + + private static void populateClassesMap() { + for ( Map.Entry, PolyAlgDeclaration> entry : declarations.entrySet() ) { + Class clazz = entry.getKey(); + PolyAlgDeclaration declaration = entry.getValue(); + + assert !classes.containsKey( declaration.opName ); + classes.put( declaration.opName, clazz ); + for ( String alias : declaration.opAliases ) { + assert !classes.containsKey( alias ); + classes.put( alias, clazz ); + } + } + } + + + public static PolyAlgDeclaration getDeclaration( Class clazz ) { + return getDeclaration( clazz, DataModel.getDefault(), 0 ); + } + + + /** + * Retrieves the PolyAlgDeclaration associated with the specified class from a map of declarations, + * or returns a default PolyAlgDeclaration if none is found. + * + * @param clazz The class for which the PolyAlgDeclaration is being retrieved + * @param model the default DataModel to be used if the class is not registered + * @param numInputs The number of inputs associated with the PolyAlgDeclaration if a new one is created. + * @return The PolyAlgDeclaration associated with the specified class if found in the map, + * or a new PolyAlgDeclaration initialized with the class name and the specified number of inputs. + */ + public static PolyAlgDeclaration getDeclaration( Class clazz, DataModel model, int numInputs ) { + return declarations.getOrDefault( + clazz, + PolyAlgDeclaration.builder().opName( clazz.getSimpleName() ).model( model ).numInputs( numInputs ).build() ); + } + + + public static Class getClass( String opName ) { + return classes.get( opName ); + } + + + /** + * Retrieves the PolyAlgDeclaration associated with the specified operator + * or returns null if the operator is not known. + * It is also possible to use an alias for the operator name. + * + * @param opName The name of the operator or one of its aliases + * @return The PolyAlgDeclaration associated with the opName if it exists, + * or null otherwise. + */ + public static PolyAlgDeclaration getDeclaration( String opName ) { + return declarations.get( getClass( opName ) ); + } + + + /** + * Registers the specified declaration for the given AlgNode class. + * This is only allowed during the initialization of Polypheny-DB. + * As soon as a UI instance has requested the registry, this will result in an assertion error. + *

    + * General steps to follow for adding support for a new AlgNode: + *

      + *
    1. Create a {@link PolyAlgDeclaration} that declares the parameters and their corresponding types. + * If no fitting {@link ParamType} exists yet, you can create a new one together with a corresponding {@link org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg}
    2. + *
    3. Implement {@link AlgNode#bindArguments()} or use the implementation of a superclass + * (given that it is compatible with the declaration).
    4. + *
    5. Write a static creator method in your AlgNode that has the signature {@code create( PolyAlgArgs args, List children, AlgCluster cluster )}
    6. + *
    7. Register the declaration once during initialization using this method.
    8. + *
    + * + * @param clazz The class for which the PolyAlgDeclaration is being registered + * @param decl The PolyAlgDeclaration to register + */ + public static void register( Class clazz, PolyAlgDeclaration decl ) { + if ( serialized != null ) { + log.warn( "PolyAlg operator was registered after the registry was already serialized!" ); + } + assert !declarations.containsKey( clazz ); + declarations.put( clazz, decl ); + + assert !classes.containsKey( decl.opName ); + classes.put( decl.opName, clazz ); + for ( String alias : decl.opAliases ) { + assert !classes.containsKey( alias ); + classes.put( alias, clazz ); + } + } + + + /** + * Retrieves a mutable list containing all parameters of a previously registered declaration. + * This can be useful when multiple operators share the same arguments. + * + * @param clazz the class whose declaration will be used + * @return a list containing all parameters of the declaration corresponding to the specified class + */ + public static List getParams( Class clazz ) { + PolyAlgDeclaration decl = declarations.get( clazz ); + List params = new ArrayList<>( decl.posParams ); + params.addAll( decl.kwParams ); + return params; + } + + + public static ObjectNode serialize() { + if ( serialized == null ) { + ObjectMapper mapper = new ObjectMapper(); + ObjectNode node = mapper.createObjectNode(); + + ObjectNode decls = mapper.createObjectNode(); + + for ( PolyAlgDeclaration decl : declarations.values() ) { + decls.set( decl.opName, decl.serialize( mapper ) ); + } + node.set( "declarations", decls ); + + ObjectNode enums = mapper.createObjectNode(); + for ( ParamType type : ParamType.getEnumParamTypes() ) { + ArrayNode values = mapper.createArrayNode(); + for ( Enum enumValue : type.getEnumClass().getEnumConstants() ) { + values.add( enumValue.name() ); + } + enums.set( type.name(), values ); + } + + ArrayNode values = mapper.createArrayNode(); + for ( OperatorName operator : OperatorName.values() ) { + if ( operator.getClazz() == AggFunction.class ) { + values.add( operator.name() ); + } + } + enums.set( "AggFunctionOperator", values ); + + node.set( "enums", enums ); + + serialized = node; + } + + return serialized; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java new file mode 100644 index 0000000000..8f3798eb2f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/PolyAlgUtils.java @@ -0,0 +1,652 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg; + + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ImmutableList; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.ArrayList; +import java.util.List; +import java.util.Map.Entry; +import java.util.function.Function; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.Syntax; +import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexCorrelVariable; +import org.polypheny.db.rex.RexDigestIncludeType; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.rex.RexElementRef; +import org.polypheny.db.rex.RexFieldAccess; +import org.polypheny.db.rex.RexFieldCollation; +import org.polypheny.db.rex.RexIndexRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.rex.RexNameRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.rex.RexOver; +import org.polypheny.db.rex.RexPatternFieldRef; +import org.polypheny.db.rex.RexRangeRef; +import org.polypheny.db.rex.RexSubQuery; +import org.polypheny.db.rex.RexTableIndexRef; +import org.polypheny.db.rex.RexVisitor; +import org.polypheny.db.rex.RexWindow; +import org.polypheny.db.rex.RexWindowBound; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyList; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.document.PolyDocument; +import org.polypheny.db.type.entity.graph.GraphPropertyHolder; +import org.polypheny.db.type.entity.graph.PolyDictionary; +import org.polypheny.db.type.entity.graph.PolyEdge; +import org.polypheny.db.type.entity.graph.PolyEdge.EdgeDirection; +import org.polypheny.db.type.entity.graph.PolyNode; +import org.polypheny.db.type.entity.graph.PolyPath; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Quadruple; +import org.polypheny.db.util.ValidatorUtil; + +@Slf4j +public class PolyAlgUtils { + + private static final Pattern CAST_PATTERN; + public static final String ELEMENT_REF_PREFIX = "$elem"; + + + static { + // matches group "my_field" in "CAST(my_field AS INTEGER)" + CAST_PATTERN = Pattern.compile( "^CAST\\(([^ )]+) AS.+\\)$", Pattern.CASE_INSENSITIVE ); + } + + + public static String appendAlias( String exp, String alias ) { + if ( alias == null || alias.equals( exp ) || isCastWithSameName( exp, alias ) ) { + return exp; + } + String sanitized = sanitizeIdentifier( alias ); + if ( sanitized.equals( exp ) ) { + return exp; + } + return exp + " AS " + sanitized; + } + + + public static String sanitizeIdentifier( String alias ) { + if ( (alias.startsWith( "'" ) && alias.endsWith( "'" )) || (alias.startsWith( "\"" ) && alias.endsWith( "\"" )) ) { + return alias; + } + if ( alias.matches( "[a-zA-Z#$@öÖäÄüÜàÀçÇáÁèÈíÍîÎóÓòôÔÒíÍëËâÂïÏéÉñÑß.\\d]*" ) ) { + return alias; + } + return "\"" + alias + "\""; + } + + + /** + * Each element in exps is compared with the corresponding element in aliases. + * If they differ (and not just by a CAST expression), the alias is appended to the element, separated by the keyword {@code AS}. + * For example {@code AVG(age) AS average}. + * + * @param exps List of strings to be assigned an alias + * @param aliases List with each element being the alias for the corresponding value in exps + * @return Copy of the list exps with aliases appended where values differ + */ + public static List appendAliases( List exps, List aliases ) { + assert exps.size() == aliases.size(); + List list = new ArrayList<>(); + for ( int i = 0; i < exps.size(); i++ ) { + list.add( appendAlias( exps.get( i ), aliases.get( i ) ) ); + } + return list; + } + + + private static boolean isCastWithSameName( String exp, String alias ) { + Matcher m = CAST_PATTERN.matcher( exp ); + return m.find() && m.group( 1 ).equals( alias ); + } + + + /** + * Joins the values for a multivalued attribute into a single string. + * If values contains more than one element, the returned string is surrounded with brackets to represent a list. + * An empty list is indicated with {@code "[]"}. + * + * @param values the values to be joined + * @param omitBrackets whether the surrounding brackets in the case of multiple values should be omitted + * @return a string either representing a list containing all entries of values or a single value if values is of size 1 + */ + public static String joinMultiValued( List values, boolean omitBrackets ) { + if ( values.isEmpty() ) { + return "[]"; + } + String str = String.join( ", ", values ); + return (omitBrackets || values.size() <= 1) ? str : "[" + str + "]"; + } + + + public static String joinMultiValuedWithBrackets( List values ) { + String str = String.join( ", ", values ); + return "[" + str + "]"; + } + + + /** + * Returns a ListArg (with unpackValues = false) corresponding to the projects argument of an implicit PROJECT operator required to rename the fieldNames + * of child to the corresponding fieldNames in inputFieldNames. + * If the projection is non-trivial, the returned ListArg will contain {@code child.getTupleType().getFieldCount()} entries. + * + * @param child the child whose implicit projections should be generated + * @param inputFieldNames the names of the fields of all children after renaming + * @param startIndex index of the first field of child in inputFieldNames + * @return ListArg representing the projects argument or null if no projections are required. + */ + public static ListArg getAuxProjections( AlgNode child, List inputFieldNames, int startIndex ) { + List from = new ArrayList<>(); + List to = new ArrayList<>(); + List names = child.getTupleType().getFieldNames(); + boolean isTrivial = true; + + for ( int i = 0; i < names.size(); i++ ) { + String name = names.get( i ); + String uniqueName = inputFieldNames.get( startIndex + i ); + from.add( RexIndexRef.of( i, child.getTupleType() ) ); + to.add( uniqueName ); + if ( name != null && !name.equals( uniqueName ) ) { + isTrivial = false; + } + } + if ( isTrivial ) { + return null; + } + return new ListArg<>( from, RexArg::new, to, false ); + } + + + public static List getInputFieldNamesList( AlgNode context ) { + if ( context == null ) { + return List.of(); + } + return context.getInputs().stream() + .flatMap( node -> node.getTupleType().getFieldNames().stream() ) + .toList(); + } + + + public static List uniquifiedInputFieldNames( AlgNode context ) { + List names = getInputFieldNamesList( context ); + return ValidatorUtil.uniquify( names, ValidatorUtil.ATTEMPT_SUGGESTER, true ); + } + + + public static List> getNestedListArgAsList( ListArg outerListArg ) { + List> outerList = new ArrayList<>(); + for ( List list : outerListArg.map( ListArg::getArgs ) ) { + if ( list.isEmpty() ) { + // empty inner lists are not supported + } else { + outerList.add( list ); + } + } + return outerList; + } + + + public static List> getNestedListArgAsList( ListArg outerListArg, Function mapper ) { + List> outerList = new ArrayList<>(); + for ( List list : outerListArg.map( ListArg::getArgs ) ) { + if ( list.isEmpty() ) { + outerList.add( List.of() ); + } else { + outerList.add( list.stream().map( mapper ).toList() ); + } + } + return outerList; + } + + + public static ImmutableList> toImmutableNestedList( List> nestedList ) { + ImmutableList.Builder> builder = ImmutableList.builder(); + + for ( List innerList : nestedList ) { + builder.add( ImmutableList.copyOf( innerList ) ); + } + + return builder.build(); + } + + + public static String digestWithNames( RexNode expr, List inputFieldNames ) { + return expr.accept( new NameReplacer( inputFieldNames ) ); + } + + + public static ObjectNode wrapInRename( AlgNode child, ListArg projections, AlgNode context, List inputFieldNames, ObjectMapper mapper, GlobalStats gs ) { + ObjectNode node = mapper.createObjectNode(); + PolyAlgDeclaration decl = PolyAlgRegistry.getDeclaration( LogicalRelProject.class ); + node.put( "opName", decl.opName ); + + ObjectNode argNode = mapper.createObjectNode(); + argNode.put( "type", ParamType.LIST.name() ); + argNode.set( "value", projections.serialize( context, inputFieldNames, mapper ) ); + + node.set( "arguments", mapper.createObjectNode().set( decl.getPos( 0 ).getName(), argNode ) ); + node.set( "metadata", PolyAlgMetadata.getMetadataForAuxiliaryNode( mapper ) ); + + node.set( "inputs", mapper.createArrayNode().add( child.serializePolyAlgebra( mapper, gs ) ) ); + return node; + } + + + public static PolyPath buildPolyPath( List nodes, List, PolyString, EdgeDirection>> edgeArgs ) { + List edges = new ArrayList<>(); + for ( int i = 0; i < edgeArgs.size(); i++ ) { + Quadruple, PolyString, EdgeDirection> e = edgeArgs.get( 0 ); + PolyNode source = nodes.get( i ); + PolyNode target = nodes.get( i + 1 ); + edges.add( new PolyEdge( e.a, e.b, source.id, target.id, e.d, e.c ) ); + } + return PolyPath.create( nodes.stream().map( n -> Pair.of( n.variableName, n ) ).toList(), + edges.stream().map( e -> Pair.of( e.variableName, e ) ).toList() ); + } + + + public static class NameReplacer implements RexVisitor { + + private final List names; + + + public NameReplacer( List names ) { + this.names = names; + } + + + @Override + public String visitIndexRef( RexIndexRef inputRef ) { + return sanitizeIdentifier( names.get( inputRef.getIndex() ) ); + } + + + @Override + public String visitLocalRef( RexLocalRef localRef ) { + String type = localRef.getType().toString(); + if ( type.contains( "[" ) ) { + // DocumentTypes should probably be handled better + type = localRef.getType().getPolyType().getTypeName(); + } + return RexLocalRef.PREFIX + localRef.getIndex() + ":" + type; + } + + + @Override + public String visitLiteral( RexLiteral literal ) { + return visitLiteral( literal, RexDigestIncludeType.OPTIONAL ); + } + + + @Override + public String visitCall( RexCall call ) { + // This code follows call.toString(), but uses the visitor for nested RexNodes + + boolean withType = call.isA( Kind.CAST ) || call.isA( Kind.NEW_SPECIFICATION ); + final StringBuilder sb = new StringBuilder( OperatorRegistry.getUniqueName( call.op ) ); + if ( (!call.operands.isEmpty()) && (call.op.getSyntax() == Syntax.FUNCTION_ID) ) { + // Don't print params for empty arg list. For example, we want "SYSTEM_USER", not "SYSTEM_USER()". + } else { + sb.append( "(" ); + appendOperands( call, sb ); + if ( withType ) { + sb.append( " AS " ); // this is different to the syntax of type specification for literals to be closer to SQL syntax + sb.append( call.type.getFullTypeString() ); + } + sb.append( ")" ); + } + return sb.toString(); + } + + + @Override + public String visitOver( RexOver over ) { + log.warn( "Serialization is not yet correctly implemented for RexWindow." ); + boolean withType = over.isA( Kind.CAST ) || over.isA( Kind.NEW_SPECIFICATION ); + final StringBuilder sb = new StringBuilder( OperatorRegistry.getUniqueName( over.op ) ); + sb.append( "(" ); + if ( over.isDistinct() ) { + sb.append( "DISTINCT " ); + } + appendOperands( over, sb ); + sb.append( ")" ); + if ( withType ) { + sb.append( ":" ); + sb.append( over.type.getFullTypeString() ); + } + sb.append( " OVER (" ) + .append( visitRexWindow( over.getWindow() ) ) + .append( ")" ); + return sb.toString(); + } + + + @Override + public String visitCorrelVariable( RexCorrelVariable correlVariable ) { + return correlVariable.getName(); + } + + + @Override + public String visitDynamicParam( RexDynamicParam dynamicParam ) { + String type = dynamicParam.type.toString(); + if ( type.contains( "[" ) ) { + // DocumentTypes should probably be handled better + type = dynamicParam.type.getPolyType().getTypeName(); + } + return "?" + dynamicParam.getIndex() + ":" + type; + } + + + @Override + public String visitRangeRef( RexRangeRef rangeRef ) { + // Regular RexNode trees do not contain this construct + return rangeRef.toString(); + } + + + @Override + public String visitFieldAccess( RexFieldAccess fieldAccess ) { + return fieldAccess.getReferenceExpr().accept( this ) + "." + fieldAccess.getField().getName(); + } + + + @Override + public String visitSubQuery( RexSubQuery subQuery ) { + /* final StringBuilder sb = new StringBuilder( OperatorRegistry.getUniqueName( subQuery.op ) ); + sb.append( "(" ); + for ( RexNode operand : subQuery.operands ) { + sb.append( operand ); + sb.append( ", " ); + } + sb.append( "{\n" ); + subQuery.alg.buildPolyAlgebra( sb ); + sb.append( "})" ); + return "subQuery: " + sb; */ + throw new NotImplementedException( "RexSubQuery can not yet be serialized to PolyAlgebra" ); + } + + + @Override + public String visitTableInputRef( RexTableIndexRef fieldRef ) { + throw new NotImplementedException( "tableInputRef can not yet be serialized to PolyAlgebra" ); + } + + + @Override + public String visitPatternFieldRef( RexPatternFieldRef fieldRef ) { + throw new NotImplementedException( "patternFieldRef can not yet be serialized to PolyAlgebra" ); + } + + + @Override + public String visitNameRef( RexNameRef nameRef ) { + String names = String.join( ".", nameRef.getNames() ); + if ( nameRef.getIndex().isPresent() ) { + return names + "@" + nameRef.getIndex().get(); + } + return names; + } + + + @Override + public String visitElementRef( RexElementRef elemRef ) { + if ( elemRef.type.getPolyType() != PolyType.DOCUMENT ) { + throw new NotImplementedException( "PolyAlg for RexElementRef is currently only supported for DocumentType" ); + } + return ELEMENT_REF_PREFIX + "(" + elemRef.getCollectionRef().accept( this ) + ")"; + } + + + private void appendOperands( RexCall call, StringBuilder sb ) { + for ( int i = 0; i < call.operands.size(); i++ ) { + if ( i > 0 ) { + sb.append( ", " ); + } + RexNode operand = call.operands.get( i ); + if ( !(operand instanceof RexLiteral) ) { + sb.append( operand.accept( this ) ); + continue; + } + // Type information might be omitted in certain cases to improve readability + // For instance, AND/OR arguments should be BOOLEAN, so AND(true, null) is better than AND(true, null:BOOLEAN), and we keep the same info +($0, 2) is better than +($0, 2:BIGINT). Note: if $0 has BIGINT, + // then 2 is expected to be of BIGINT type as well. + RexDigestIncludeType includeType = RexDigestIncludeType.OPTIONAL; + if ( (call.isA( Kind.AND ) || call.isA( Kind.OR )) && operand.getType().getPolyType() == PolyType.BOOLEAN ) { + includeType = RexDigestIncludeType.NO_TYPE; + } + if ( RexCall.SIMPLE_BINARY_OPS.contains( call.getKind() ) ) { + RexNode otherArg = call.operands.get( 1 - i ); + if ( (!(otherArg instanceof RexLiteral) || ((RexLiteral) otherArg).digestIncludesType() == RexDigestIncludeType.NO_TYPE) && RexCall.equalSansNullability( operand.getType(), otherArg.getType() ) ) { + includeType = RexDigestIncludeType.NO_TYPE; + } + } + sb.append( visitLiteral( (RexLiteral) operand, includeType ) ); + } + } + + + private String visitRexWindow( RexWindow window ) { + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter( sw ); + int clauseCount = 0; + if ( !window.partitionKeys.isEmpty() ) { + clauseCount++; + pw.print( "PARTITION BY " ); + for ( int i = 0; i < window.partitionKeys.size(); i++ ) { + if ( i > 0 ) { + pw.print( ", " ); + } + RexNode partitionKey = window.partitionKeys.get( i ); + pw.print( partitionKey.accept( this ) ); + } + } + if ( window.orderKeys.size() > 0 ) { + if ( clauseCount++ > 0 ) { + pw.print( ' ' ); + } + pw.print( "ORDER BY " ); + for ( int i = 0; i < window.orderKeys.size(); i++ ) { + if ( i > 0 ) { + pw.print( ", " ); + } + RexFieldCollation orderKey = window.orderKeys.get( i ); + pw.print( orderKey.toString( this ) ); + } + } + if ( window.getLowerBound() == null ) { + // No ROWS or RANGE clause + } else if ( window.getUpperBound() == null ) { + if ( clauseCount++ > 0 ) { + pw.print( ' ' ); + } + if ( window.isRows() ) { + pw.print( "ROWS " ); + } else { + pw.print( "RANGE " ); + } + pw.print( visitRexWindowBound( window.getLowerBound() ) ); + } else { + if ( clauseCount++ > 0 ) { + pw.print( ' ' ); + } + if ( window.isRows() ) { + pw.print( "ROWS BETWEEN " ); + } else { + pw.print( "RANGE BETWEEN " ); + } + pw.print( visitRexWindowBound( window.getLowerBound() ) ); + pw.print( " AND " ); + pw.print( visitRexWindowBound( window.getUpperBound() ) ); + } + return sw.toString(); + } + + + private String visitRexWindowBound( RexWindowBound bound ) { + // at this point it is simply much easier to rely on the toString method of the RexWindowBound subclasses. + return bound.toString( this ); + } + + + private String visitLiteral( RexLiteral literal, RexDigestIncludeType includeType ) { + PolyValue value = literal.value; + String str = visitPolyValue( value ); + if ( str == null ) { + str = literal.computeDigest( includeType ); + } + return str; + } + + + private String visitPolyValue( PolyValue value ) { + if ( value.isNode() ) { + return visitPolyNode( value.asNode(), true ); + } else if ( value.isPath() ) { + return visitPolyPath( value.asPath(), true ); + } else if ( value.isEdge() ) { + return visitPolyEdge( value.asEdge(), true ); + } else if ( value.isList() ) { + return visitPolyList( value.asList() ); + } else if ( value.isDocument() ) { + return visitPolyDocument( value.asDocument() ); + } + return null; + } + + + private String visitPolyDocument( PolyDocument document ) { + return "PolyDocument " + document.toJson(); + } + + + private String visitPolyList( PolyList list ) { + return "PolyList " + list.toJson(); + } + + + private String visitPolyNode( PolyNode node, boolean withPrefix ) { + + String prefix = withPrefix ? "PolyNode " : ""; + return prefix + "(" + visitGraphLabelProps( node.labels, node.properties, node.variableName ) + ")"; + + } + + + private String visitPolyPath( PolyPath path, boolean withPrefix ) { + StringBuilder sb = new StringBuilder( withPrefix ? "PolyPath " : "" ); + for ( GraphPropertyHolder holder : path.getPath() ) { + if ( holder.isNode() ) { + sb.append( visitPolyNode( (PolyNode) holder, false ) ); + } else if ( holder.isEdge() ) { + sb.append( visitPolyEdge( (PolyEdge) holder, false ) ); + } + } + return sb.toString(); + } + + + private String visitPolyEdge( PolyEdge edge, boolean withPrefix ) { + + String left = "-", right = "-"; + switch ( edge.direction ) { + case LEFT_TO_RIGHT -> right += ">"; + case RIGHT_TO_LEFT -> left = "<" + left; + case NONE -> { + } + } + + StringBuilder sb = new StringBuilder(); + if ( withPrefix ) { + sb.append( "PolyEdge (" ).append( edge.left ).append( ")" ); + } + sb.append( left ); + String lp = visitGraphLabelProps( edge.labels, edge.properties, edge.variableName ); + if ( !lp.isEmpty() ) { + sb.append( "[" ) + .append( lp ) + .append( "]" ); + } + sb.append( right ); + if ( withPrefix ) { + sb.append( "(" ).append( edge.right ).append( ")" ); + } + return sb.toString(); + } + + + private String visitGraphLabelProps( PolyList lbls, PolyDictionary props, PolyString varName ) { + String name = (varName == null || varName.isNull()) ? "" : varName.toString(); + String labels = String.join( ":", lbls.stream().map( PolyString::toString ).toList() ); + String properties = visitPolyDictionary( props ); + if ( properties.equals( "{}" ) ) { + properties = ""; + } + String s = name; + + if ( !labels.isEmpty() ) { + s += ":" + labels; + } + if ( !s.isEmpty() && !properties.isEmpty() ) { + s += " "; + } + s += properties; + return s; + + } + + + private String visitPolyDictionary( PolyDictionary dict ) { + List propsList = new ArrayList<>(); + for ( Entry entry : dict.map.entrySet() ) { + PolyValue value = entry.getValue(); + String valueStr = visitPolyValue( value ); + if ( valueStr == null ) { + valueStr = switch ( value.type ) { + case VARCHAR, CHAR, TEXT -> value.asString().toTypedString( false ); + default -> value.toString(); + }; + } + propsList.add( entry.getKey().toString() + "=" + valueStr ); + } + return "{" + String.join( ", ", propsList ) + "}"; + } + + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AggArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AggArg.java new file mode 100644 index 0000000000..4d45cd0d94 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AggArg.java @@ -0,0 +1,135 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.Aggregate; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; + +public class AggArg implements PolyAlgArg { + + @Getter + private final AggregateCall agg; + + + public AggArg( AggregateCall agg ) { + this.agg = agg; + } + + + @Override + public ParamType getType() { + return ParamType.AGGREGATE; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + String str = aggToString( inputFieldNames ); + return PolyAlgUtils.appendAlias( str, getAggName( context ) ); + } + + + private String aggToString( List inputFieldNames ) { + StringBuilder buf = new StringBuilder( agg.getAggregation().getOperatorName().toString() ); + buf.append( "(" ); + List argList = agg.getArgList(); + AlgCollation collation = agg.getCollation(); + + if ( agg.isDistinct() ) { + buf.append( argList.isEmpty() ? "DISTINCT" : "DISTINCT " ); + } + int i = -1; + for ( Integer arg : argList ) { + if ( ++i > 0 ) { + buf.append( ", " ); + } + buf.append( inputFieldNames.get( arg ) ); + } + buf.append( ")" ); + if ( agg.isApproximate() ) { + buf.append( " APPROXIMATE" ); + } + if ( !collation.equals( AlgCollations.EMPTY ) ) { + throw new NotImplementedException( "Aggs using the WITHIN GROUP statement are not yet supported." ); + /* + buf.append( " WITHIN GROUP (" ); + buf.append( collation ); + buf.append( ")" ); + */ + } + if ( agg.hasFilter() ) { + buf.append( " FILTER " ); + buf.append( inputFieldNames.get( agg.filterArg ) ); + } + return buf.toString(); + } + + + private String getAggName( AlgNode context ) { + String name = agg.getName(); + if ( name == null ) { + Aggregate instance = (Aggregate) context; + int i = instance.getAggCallList().indexOf( agg ); + if ( i != -1 ) { + i += instance.getGroupSet().asList().size(); + } + name = "$f" + i; + } + return name; + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "function", agg.getAggregation().getOperatorName().toString() ); + node.put( "distinct", agg.isDistinct() ); + node.put( "approximate", agg.isApproximate() ); + + ArrayNode argList = mapper.createArrayNode(); + for ( int idx : agg.getArgList() ) { + argList.add( inputFieldNames.get( idx ) ); + } + node.set( "argList", argList ); + + ArrayNode collList = mapper.createArrayNode(); + for ( AlgFieldCollation coll : agg.getCollation().getFieldCollations() ) { + collList.add( CollationArg.serialize( coll, inputFieldNames, mapper ) ); + } + node.set( "collList", collList ); + if ( agg.hasFilter() ) { + node.put( "filter", inputFieldNames.get( agg.filterArg ) ); + } + + node.put( "alias", getAggName( context ) ); + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AnyArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AnyArg.java new file mode 100644 index 0000000000..96baf3ae07 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/AnyArg.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +/** + * PolyAlgArgument implementation for the case where no other ParamType fits better. + * toPolyAlg() returns the default string representation of the object. + * This should be only used for the inner type of empty ListArgs + */ +public class AnyArg implements PolyAlgArg { + + private final Object arg; + + + public AnyArg( Object arg ) { + this.arg = arg; + } + + + @Override + public ParamType getType() { + return ParamType.ANY; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return arg.toString(); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", arg.toString() ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/BooleanArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/BooleanArg.java new file mode 100644 index 0000000000..ac0a5d3b57 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/BooleanArg.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class BooleanArg implements PolyAlgArg { + + public static final BooleanArg FALSE = new BooleanArg( false ); + public static final BooleanArg TRUE = new BooleanArg( true ); + + private final boolean bool; + + + public BooleanArg( boolean bool ) { + this.bool = bool; + } + + + public boolean toBool() { + return bool; + } + + + @Override + public ParamType getType() { + return ParamType.BOOLEAN; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return Boolean.toString( bool ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", bool ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CollationArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CollationArg.java new file mode 100644 index 0000000000..e911266fa4 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CollationArg.java @@ -0,0 +1,82 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class CollationArg implements PolyAlgArg { + + public static final CollationArg NULL = new CollationArg( null ); + + @Getter + private final AlgFieldCollation coll; + + + public CollationArg( AlgFieldCollation coll ) { + this.coll = coll; + } + + + @Override + public ParamType getType() { + return ParamType.COLLATION; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( coll == null ) { + return ""; + } + int idx = coll.getFieldIndex(); + String str = inputFieldNames.size() > idx ? inputFieldNames.get( idx ) : Integer.toString( idx ); + boolean notDefaultNullDir = coll.nullDirection != coll.direction.defaultNullDirection(); + if ( coll.direction != AlgFieldCollation.Direction.ASCENDING || notDefaultNullDir ) { + str += " " + coll.direction.shortString; + if ( notDefaultNullDir ) { + str += " " + coll.nullDirection.toString(); + } + } + return str; + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return serialize( coll, inputFieldNames, mapper ); + } + + + public static ObjectNode serialize( AlgFieldCollation coll, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + if ( coll != null ) { + int idx = coll.getFieldIndex(); + node.put( "field", inputFieldNames.size() > idx ? inputFieldNames.get( idx ) : Integer.toString( idx ) ); + node.put( "direction", coll.direction.shortString ); + node.put( "nullDirection", coll.nullDirection.toString() ); + } + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CorrelationArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CorrelationArg.java new file mode 100644 index 0000000000..ba1b1f47a4 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/CorrelationArg.java @@ -0,0 +1,56 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class CorrelationArg implements PolyAlgArg { + + @Getter + private final CorrelationId corrId; + + + public CorrelationArg( CorrelationId corrId ) { + this.corrId = corrId; + } + + + @Override + public ParamType getType() { + return ParamType.CORR_ID; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return String.valueOf( corrId.getId() ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", corrId.getId() ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/DoubleArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/DoubleArg.java new file mode 100644 index 0000000000..70e2784a4d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/DoubleArg.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class DoubleArg implements PolyAlgArg { + + public static final DoubleArg ZERO = new DoubleArg( 0d ); + public static final DoubleArg NULL = new DoubleArg( null ); + + @Getter + private final Double arg; + + + public DoubleArg( Double arg ) { + this.arg = arg; + } + + + @Override + public ParamType getType() { + return ParamType.DOUBLE; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( arg == null ) { + return ""; + } + return Double.toString( arg ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", arg ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EntityArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EntityArg.java new file mode 100644 index 0000000000..d293b8dd0d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EntityArg.java @@ -0,0 +1,143 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import java.util.Optional; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph.SubstitutionGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.type.entity.PolyString; + +public class EntityArg implements PolyAlgArg { + + @Getter + private final Entity entity; + + private final String namespaceName; + private String entityName; // for graphs, entityName is null + + // in the case of an AllocationEntity: + private String partitionName; + + + /** + * Creates an EntityArg for an entity which is used in an AlgNode with the specified DataModel. + */ + public EntityArg( Entity entity, Snapshot snapshot, DataModel model ) { + this.namespaceName = getNamespaceName( entity, snapshot ); + this.entity = entity; + + if ( model == DataModel.GRAPH || entity.dataModel == DataModel.GRAPH ) { + // origin or target data model is graph + if ( entity instanceof SubstitutionGraph sub && !sub.names.isEmpty() ) { + this.entityName = String.join( ".", sub.names.stream().map( PolyString::toString ).toList() ); + } else { + this.entityName = null; + } + } else { + this.entityName = entity.getName(); + } + + if ( entity instanceof AllocationEntity e ) { + if ( e.dataModel != DataModel.GRAPH ) { + this.entityName = snapshot.getLogicalEntity( e.logicalId ).orElseThrow().name; + } else if ( !e.name.startsWith( AllocationEntity.PREFIX ) ) { + this.entityName = e.name; + } + this.partitionName = snapshot.alloc().getPartition( e.partitionId ).orElseThrow().name; + } + } + + + private String getNamespaceName( Entity entity, Snapshot snapshot ) { + String nsName; + try { + nsName = entity.getNamespaceName(); + } catch ( UnsupportedOperationException e ) { + Optional ns = snapshot.getNamespace( entity.namespaceId ); + nsName = ns.map( LogicalNamespace::getName ).orElse( null ); + } + return nsName; + } + + + @Override + public ParamType getType() { + return ParamType.ENTITY; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( entity instanceof PhysicalEntity e ) { + return getAdapterName( e.adapterId ) + "." + e.id; + } + String name = getFullName(); + if ( entity instanceof AllocationEntity e ) { + return name + "@" + getAdapterName( e.adapterId ) + "." + e.partitionId; + } + return name; + } + + + private String getAdapterName( Long adapterId ) { + return AdapterManager.getInstance().getAdapter( adapterId ).orElseThrow().getUniqueName(); + } + + + private String getFullName() { + if ( entityName == null ) { + return namespaceName; + } + return namespaceName + "." + entityName; + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + + node.put( "fullName", getFullName() ); + + if ( entity instanceof AllocationEntity e ) { + node.put( "adapterName", getAdapterName( e.adapterId ) ); + node.put( "partitionId", String.valueOf( e.partitionId ) ); + if ( partitionName != null && !partitionName.isEmpty() ) { + node.put( "partitionName", partitionName ); + } + } else if ( entity instanceof PhysicalEntity e ) { + node.put( "adapterName", getAdapterName( e.adapterId ) ); + node.put( "physicalId", e.id ); + } + + return node; + } + + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EnumArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EnumArg.java new file mode 100644 index 0000000000..a2c90dea8d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/EnumArg.java @@ -0,0 +1,69 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class EnumArg> implements PolyAlgArg { + + @Getter + private final E arg; + private final ParamType type; + + + public EnumArg( E arg, ParamType enumType ) { + assert enumType.isEnum(); + + this.arg = arg; + this.type = enumType; + } + + + @Override + public ParamType getType() { + return type; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return arg.name(); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "arg", arg.name() ); + node.put( "enum", arg.getDeclaringClass().getSimpleName() ); + return node; + } + + + @Override + public ObjectNode serializeWrapped( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = PolyAlgArg.super.serializeWrapped( context, inputFieldNames, mapper ); + return node.put( "isEnum", true ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/FieldArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/FieldArg.java new file mode 100644 index 0000000000..e8ed0a711a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/FieldArg.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class FieldArg implements PolyAlgArg { + + @Getter + private final int field; + + + public FieldArg( int field ) { + this.field = field; + } + + + @Override + public ParamType getType() { + return ParamType.FIELD; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( inputFieldNames.size() > field ) { + return inputFieldNames.get( field ); + } + return Integer.toString( field ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", toPolyAlg( context, inputFieldNames ) ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/IntArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/IntArg.java new file mode 100644 index 0000000000..f986e0a2a9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/IntArg.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public class IntArg implements PolyAlgArg { + + public static final IntArg ZERO = new IntArg( 0 ); + public static final IntArg NULL = new IntArg( null ); + + @Getter + private final Integer arg; + + + public IntArg( Integer arg ) { + this.arg = arg; + } + + + @Override + public ParamType getType() { + return ParamType.INTEGER; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( arg == null ) { + return ""; + } + return Integer.toString( arg ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + return mapper.createObjectNode().put( "arg", arg ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/LaxAggArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/LaxAggArg.java new file mode 100644 index 0000000000..69cd35dedc --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/LaxAggArg.java @@ -0,0 +1,75 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.LaxAggregateCall; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; + +public class LaxAggArg implements PolyAlgArg { + + @Getter + private final LaxAggregateCall agg; + + + public LaxAggArg( LaxAggregateCall agg ) { + this.agg = agg; + } + + + @Override + public ParamType getType() { + return ParamType.LAX_AGGREGATE; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + String str = aggToString( inputFieldNames ); + return PolyAlgUtils.appendAlias( str, agg.name ); + } + + + private String aggToString( List inputFieldNames ) { + StringBuilder sb = new StringBuilder( agg.function.toString() ); + sb.append( "(" ); + if ( agg.getInput().isPresent() ) { + sb.append( PolyAlgUtils.digestWithNames( agg.getInput().get(), inputFieldNames ) ); + } + sb.append( ")" ); + return sb.toString(); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "function", agg.function.toString() ); + if ( agg.getInput().isPresent() ) { + node.put( "input", PolyAlgUtils.digestWithNames( agg.getInput().get(), inputFieldNames ) ); + } + node.put( "alias", agg.name ); + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/ListArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/ListArg.java new file mode 100644 index 0000000000..45b84a0e80 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/ListArg.java @@ -0,0 +1,174 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.util.Pair; + +public class ListArg implements PolyAlgArg { + + public static final ListArg EMPTY = new ListArg<>( List.of(), List.of() ); + public static final ListArg> NESTED_EMPTY = new ListArg<>( List.of( EMPTY ) ); + + @Getter + private final List args; + private final List aliases; + private final boolean unpackValues; + private final boolean containsListArg; // if a list is inside a list, the lists need brackets to be unambiguous + + + public ListArg( List args, List aliases, boolean unpackValues ) { + this.args = args; + this.aliases = aliases; + this.unpackValues = unpackValues; + this.containsListArg = !args.isEmpty() && args.get( 0 ) instanceof ListArg; + } + + + public ListArg( List args ) { + this( args, (List) null, false ); + } + + + public ListArg( List args, List aliases ) { + this( args, aliases, false ); + } + + + public ListArg( List rawArgs, Function converter ) { + this( rawArgs, converter, null, false ); + } + + + public ListArg( List rawArgs, Function converter, boolean unpackValues ) { + this( rawArgs, converter, null, unpackValues ); + } + + + public ListArg( List rawArgs, Function converter, List aliases ) { + this( rawArgs.stream().map( converter ).toList(), aliases, false ); + } + + + public ListArg( List rawArgs, Function converter, List aliases, boolean unpackValues ) { + this( rawArgs.stream().map( converter ).toList(), aliases, unpackValues ); + } + + + public ListArg( Map rawArgs, Function converter ) { + this( rawArgs, converter, false ); + } + + + public ListArg( Map rawArgs, Function converter, boolean unpackValues ) { + this( new ArrayList<>( rawArgs.values() ), converter, new ArrayList<>( rawArgs.keySet() ), unpackValues ); + } + + + @Override + public ParamType getType() { + if ( args.isEmpty() ) { + return ParamType.LIST; + } + return args.get( 0 ).getType(); + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return toPolyAlg( context, inputFieldNames, false ); + } + + + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames, boolean enforceBrackets ) { + List strArgs = containsListArg ? + args.stream().map( a -> ((ListArg) a).toPolyAlg( context, inputFieldNames, true ) ).toList() : + args.stream().map( a -> a.toPolyAlg( context, inputFieldNames ) ).toList(); + + if ( aliases != null ) { + strArgs = PolyAlgUtils.appendAliases( strArgs, aliases ); + } + if ( containsListArg || enforceBrackets ) { + return PolyAlgUtils.joinMultiValuedWithBrackets( strArgs ); + } + return PolyAlgUtils.joinMultiValued( strArgs, unpackValues ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + ArrayNode argsNode = mapper.createArrayNode(); + + String type = containsListArg ? ParamType.LIST.name() : getType().name(); + for ( int i = 0; i < args.size(); i++ ) { + E element = this.args.get( i ); + ObjectNode elementNode = element.serializeWrapped( context, inputFieldNames, mapper ); + + if ( aliases != null ) { + ObjectNode innerArg = (ObjectNode) elementNode.get( "value" ); + if ( !innerArg.has( "alias" ) ) { + innerArg.put( "alias", aliases.get( i ) ); + } + } + argsNode.add( elementNode ); + } + node.put( "innerType", type ); // redundant, but might be useful since all children must have the same type + node.set( "args", argsNode ); + + return node; + } + + + @Override + public ObjectNode serializeWrapped( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = PolyAlgArg.super.serializeWrapped( context, inputFieldNames, mapper ); + + // overwrite type, since on this level we are not interested in the inner type + return node.put( "type", ParamType.LIST.name() ); + } + + + public List map( Function mapper ) { + return args.stream().map( mapper ).toList(); + } + + + public Map toStringKeyedMap( Function keyMapper, Function valueMapper ) { + List aliases = this.map( keyMapper ); + List values = this.map( valueMapper ); + return Pair.zip( aliases, values ).stream().collect( Collectors.toMap( e -> e.left, e -> e.right ) ); + } + + + public boolean isEmpty() { + return args.isEmpty(); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArg.java new file mode 100644 index 0000000000..0079f7152d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArg.java @@ -0,0 +1,81 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; + +public interface PolyAlgArg { + + /** + * Return the ParamType this argument is meant for. + * In case of multivalued arguments like lists, this returns the type of the underlying elements. + * If this type can not be inferred (e.g. because it is an empty list), the inherent type of the multivalued argument + * (i.e. ParamType.LIST) is returned. + * + * @return the ParamType of this argument or its children in case of multivalued arguments. + */ + + ParamType getType(); + + /** + * Returns the PolyAlg representation of this argument. + * + * @param context the AlgNode this argument belongs to + * @param inputFieldNames list of field names of all children with duplicate names uniquified + * @return string representing the PolyAlg representation of this argument under the given context + */ + String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ); + + + /** + * Returns the JSON-serialized PolyAlg representation of this argument. + * While implementations are free to decide the structure of the ObjectNode, + * one convention is that if the argument has an alias name, then it should be set + * with the key {@code alias}. This ensures that the alias can also be set by a ListArg wrapping this argument. + * + * @param context the AlgNode this argument belongs to + * @param inputFieldNames list of field names of all children with duplicate names uniquified + * @param mapper the ObjectMapper used for creating JsonNodes + * @return a ObjectNode representing the JSON-serialized PolyAlg representation of this argument under the given context + */ + ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ); + + /** + * Returns a wrapped PolyAlg representation of this argument. + * This is useful as it introduces a common structure for all implementations. + * The wrapped representation adds attributes for inferring the structure of the wrapped argument (like its type). + * The serialized argument itself (whose structure can vary) can be found under the key {@code value}. + * + * @param context the AlgNode this argument belongs to + * @param inputFieldNames list of field names of all children with duplicate names uniquified + * @param mapper the ObjectMapper used for creating JsonNodes + * @return a ObjectNode representing the JSON-serialized PolyAlg representation of this argument under the given context + */ + default ObjectNode serializeWrapped( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + String type = this.getType().name(); + ObjectNode argNode = mapper.createObjectNode(); + argNode.put( "type", type ); + argNode.set( "value", this.serialize( context, inputFieldNames, mapper ) ); + return argNode; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArgs.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArgs.java new file mode 100644 index 0000000000..4c94c3d597 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/PolyAlgArgs.java @@ -0,0 +1,200 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.StringJoiner; +import lombok.Getter; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.Parameter; + +/** + * Represents the parameters with corresponding values (PolyAlgArg) for an AlgNode instance. + * It is used as an intermediary representation when serializing or parsing between AlgNodes and PolyAlgebra. + */ +public class PolyAlgArgs { + + @Getter + private final PolyAlgDeclaration decl; + private final Map args = new HashMap<>(); + + + public PolyAlgArgs( PolyAlgDeclaration declaration ) { + this.decl = declaration; + } + + + public String toPolyAlgebra( AlgNode context, List inputFieldNames ) { + StringJoiner joiner = new StringJoiner( ", ", "[", "]" ); + + for ( Parameter p : decl.posParams ) { + assert args.containsKey( p ); + PolyAlgArg arg = getArg( p ); + joiner.add( arg.toPolyAlg( context, inputFieldNames ) ); + } + for ( Parameter p : decl.kwParams ) { + if ( args.containsKey( p ) ) { + PolyAlgArg arg = getArg( p ); + String value = arg.toPolyAlg( context, inputFieldNames ); + if ( !p.getDefaultAsPolyAlg( context, inputFieldNames ).equals( value ) ) { + joiner.add( p.getName() + "=" + value ); + } + } + } + return joiner.toString(); + } + + + public ObjectNode serialize( AlgNode context, List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + for ( Map.Entry entry : args.entrySet() ) { + Parameter parameter = entry.getKey(); + PolyAlgArg arg = entry.getValue(); + node.set( parameter.getName(), arg.serializeWrapped( context, inputFieldNames, mapper ) ); + } + return node; + } + + + public boolean contains( Parameter p ) { + return args.containsKey( p ); + } + + + /** + * Validates if all required parameters are present and adds default values for missing keyword parameters if specified. + * + * @param addDefaultOnMissingKw {@code true} to add default values for missing keyword parameters + * @return {@code true} if all required parameters are present, {@code false} otherwise. + */ + public boolean validate( boolean addDefaultOnMissingKw ) { + for ( Parameter p : decl.posParams ) { + if ( !args.containsKey( p ) ) { + return false; + } + } + for ( Parameter p : decl.kwParams ) { + if ( !args.containsKey( p ) || args.get( p ) == null ) { + if ( addDefaultOnMissingKw ) { + put( p, p.getDefaultValue() ); + } else { + return false; + } + } + } + return true; + } + + + public PolyAlgArgs put( String name, PolyAlgArg arg ) { + return put( decl.getParam( name ), arg ); + } + + + public PolyAlgArgs put( int pos, PolyAlgArg arg ) { + return put( decl.getPos( pos ), arg ); + } + + + public PolyAlgArgs putWithCheck( Parameter p, PolyAlgArg arg ) { + if ( decl.containsParam( p ) ) { + put( p, arg ); + } + return this; + } + + + /** + * Inserts an argument for the specified parameter. + * If you are not sure whether that parameter belongs to the declaration, it is better to use {@link #putWithCheck}. + * + * @return this instance for chaining calls + */ + public PolyAlgArgs put( Parameter p, PolyAlgArg arg ) { + assert p.isCompatible( arg.getType() ); + + args.put( p, arg ); + return this; + } + + + private PolyAlgArg getArg( Parameter p ) { + return args.get( p ); + } + + + public PolyAlgArg getArg( String name ) { + return getArg( decl.getParam( name ) ); + } + + + public PolyAlgArg getArg( int pos ) { + return getArg( decl.getPos( pos ) ); + } + + + public T getArg( String name, Class type ) { + return getArg( decl.getParam( name ), type ); + } + + + public T getArg( int pos, Class type ) { + return getArg( decl.getPos( pos ), type ); + } + + + public ListArg getListArg( int pos, Class type ) { + ListArg listArg = getArg( pos, ListArg.class ); + assert listArg.isEmpty() || type.isInstance( listArg.getArgs().get( 0 ) ); // an empty ListArg is of type EMPTY_LIST + return (ListArg) listArg; + } + + + public ListArg getListArg( String name, Class type ) { + ListArg listArg = getArg( name, ListArg.class ); + assert listArg.isEmpty() || type.isInstance( listArg.getArgs().get( 0 ) ); // an empty ListArg is of type EMPTY_LIST + return (ListArg) listArg; + } + + + public > EnumArg getEnumArg( int pos, Class type ) { + EnumArg enumArg = getArg( pos, EnumArg.class ); + assert type.isInstance( enumArg.getArg() ); + return (EnumArg) enumArg; + } + + + public > EnumArg getEnumArg( String name, Class type ) { + EnumArg enumArg = getArg( name, EnumArg.class ); + assert type.isInstance( enumArg.getArg() ); + return (EnumArg) enumArg; + } + + + private T getArg( Parameter p, Class type ) { + PolyAlgArg arg = getArg( p ); + assert type.isInstance( arg ); + return type.cast( arg ); + } + + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/RexArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/RexArg.java new file mode 100644 index 0000000000..9e67f99569 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/RexArg.java @@ -0,0 +1,103 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.rex.RexNode; + +public class RexArg implements PolyAlgArg { + + public static final RexArg NULL = new RexArg( null ); + + @Getter + private final RexNode node; + @Getter + private final String alias; + private List inputFieldNames = null; + + + /** + * Use this constructor if you want to specify an alias on level of a single argument. + * If this argument is part of a ListArg, it is often more convenient to specify all aliases + * as a single list in the ListArg constructor. In that case, alias should be {@code null}. + * + * @param node the RexNode corresponding to this argument + * @param alias the alias name of this argument or {@code null} if no alias should be used. + */ + public RexArg( RexNode node, String alias ) { + this.node = node; + this.alias = alias; + } + + + public RexArg( RexNode node ) { + this( node, (String) null ); + } + + + /** + * Creates a RexArg for the given RexNode that uses the specified list of inputFieldNames during serialization. + * This can be useful for leaf nodes, since the inputFieldNames cannot be derived from the child node. + * + * @param node the RexNode corresponding to this argument + * @param inputFieldNames the list of names to be used for serialization + */ + public RexArg( RexNode node, @NonNull List inputFieldNames ) { + this( node ); + this.inputFieldNames = inputFieldNames; + } + + + @Override + public ParamType getType() { + return ParamType.REX; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + return PolyAlgUtils.appendAlias( rexAsString( inputFieldNames ), alias ); + } + + + private String rexAsString( @NonNull List inputFieldNames ) { + String str = node == null ? "" : node.toString(); + if ( node == null ) { + return str; + } + return PolyAlgUtils.digestWithNames( node, this.inputFieldNames == null ? inputFieldNames : this.inputFieldNames ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "rex", rexAsString( inputFieldNames ) ); + if ( alias != null ) { + node.put( "alias", alias ); + } + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/StringArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/StringArg.java new file mode 100644 index 0000000000..a403e677f8 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/StringArg.java @@ -0,0 +1,73 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; + +@Getter +public class StringArg implements PolyAlgArg { + + public static final StringArg NULL = new StringArg( null ); + + private final String arg; + private final String alias; + + + public StringArg( String arg ) { + this( arg, null ); + } + + + public StringArg( String arg, String alias ) { + this.arg = arg; + this.alias = alias; + } + + + @Override + public ParamType getType() { + return ParamType.STRING; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + if ( arg == null ) { + return ""; + } + return PolyAlgUtils.appendAlias( arg, alias ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + node.put( "arg", arg ); + if ( alias != null ) { + node.put( "alias", alias ); + } + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/WindowGroupArg.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/WindowGroupArg.java new file mode 100644 index 0000000000..5dced1a131 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/arguments/WindowGroupArg.java @@ -0,0 +1,78 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.arguments; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.util.List; +import lombok.Getter; +import lombok.NonNull; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.Window.Group; +import org.polypheny.db.algebra.core.Window.RexWinAggCall; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; + +public class WindowGroupArg implements PolyAlgArg { + + @Getter + private final Group group; + + + public WindowGroupArg( Group group ) { + this.group = group; + } + + + @Override + public ParamType getType() { + return ParamType.WINDOW_GROUP; + } + + + @Override + public String toPolyAlg( AlgNode context, @NonNull List inputFieldNames ) { + throw new NotImplementedException( "WindowGroupArg can not yet be serialized to PolyAlgebra" ); + } + + + @Override + public ObjectNode serialize( AlgNode context, @NonNull List inputFieldNames, ObjectMapper mapper ) { + ObjectNode node = mapper.createObjectNode(); + + node.put( "isRows", group.isRows ); + node.put( "lowerBound", group.lowerBound.toString() ); + node.put( "upperBound", group.upperBound.toString() ); + + ArrayNode aggCalls = mapper.createArrayNode(); + for ( RexWinAggCall call : group.aggCalls ) { + aggCalls.add( PolyAlgUtils.digestWithNames( call, inputFieldNames ) ); + } + node.set( "aggCalls", aggCalls ); + + ArrayNode collList = mapper.createArrayNode(); + for ( AlgFieldCollation coll : group.orderKeys.getFieldCollations() ) { + collList.add( CollationArg.serialize( coll, inputFieldNames, mapper ) ); + } + node.set( "orderKeys", collList ); + return node; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgAbstractParserImpl.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgAbstractParserImpl.java new file mode 100644 index 0000000000..4cce888cef --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgAbstractParserImpl.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser; + +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNode; +import org.polypheny.db.languages.NodeParseException; +import org.polypheny.db.languages.ParserImpl; + +public abstract class PolyAlgAbstractParserImpl implements ParserImpl { + + public abstract PolyAlgNode parsePolyAlgEof() throws Exception; + + /** + * Removes or transforms misleading information from a parse exception or error, and converts to {@link NodeParseException}. + * + * @param ex dirty excn + * @return clean excn + */ + public abstract PolyAlgParseException normalizeException( Throwable ex ); + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParseException.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParseException.java new file mode 100644 index 0000000000..8ba3122726 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParseException.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser; + +import org.polypheny.db.languages.NodeParseException; +import org.polypheny.db.languages.ParserPos; + +public class PolyAlgParseException extends NodeParseException { + + /** + * Creates a PolyAlgParseException. + * + * @param message Message + * @param pos Position + * @param expectedTokenSequences Token sequences + * @param tokenImages Token images + * @param parserException Parser exception + */ + public PolyAlgParseException( String message, ParserPos pos, int[][] expectedTokenSequences, String[] tokenImages, Throwable parserException ) { + super( message, pos, expectedTokenSequences, tokenImages, parserException ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParser.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParser.java new file mode 100644 index 0000000000..3cca239ed9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgParser.java @@ -0,0 +1,64 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser; + +import java.io.Reader; +import org.polypheny.db.languages.NodeParseException; +import org.polypheny.db.languages.Parser; +import org.polypheny.db.languages.ParserImpl; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.util.SourceStringReader; + +public class PolyAlgParser implements Parser { + + private final PolyAlgAbstractParserImpl parser; + + + public PolyAlgParser( PolyAlgAbstractParserImpl parser ) { + this.parser = parser; + } + + + public static PolyAlgParser create( String polyAlg ) { + return create( new SourceStringReader( polyAlg ) ); + } + + + public static PolyAlgParser create( Reader reader ) { + ParserImpl parser = PolyAlgParserImpl.FACTORY.getParser( reader ); + return new PolyAlgParser( (PolyAlgAbstractParserImpl) parser ); + + } + + + @Override + public Node parseQuery() throws NodeParseException { + try { + return parser.parsePolyAlgEof(); + + } catch ( Throwable ex ) { + throw parser.normalizeException( ex ); + } + } + + + @Override + public Node parseStmt() throws NodeParseException { + return parseQuery(); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgToAlgConverter.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgToAlgConverter.java new file mode 100644 index 0000000000..0fba55802e --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/PolyAlgToAlgConverter.java @@ -0,0 +1,678 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser; + +import com.google.common.collect.ImmutableList; +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.stream.IntStream; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollations; +import org.polypheny.db.algebra.AlgDistribution; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.constant.SemiJoinType; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.core.LaxAggregateCall; +import org.polypheny.db.algebra.core.Sort; +import org.polypheny.db.algebra.core.common.Modify; +import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.Parameter; +import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; +import org.polypheny.db.algebra.polyalg.arguments.AggArg; +import org.polypheny.db.algebra.polyalg.arguments.AnyArg; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.CollationArg; +import org.polypheny.db.algebra.polyalg.arguments.CorrelationArg; +import org.polypheny.db.algebra.polyalg.arguments.DoubleArg; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.FieldArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.LaxAggArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; +import org.polypheny.db.algebra.polyalg.arguments.StringArg; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgAliasedArgument; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpression; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpressionExtension; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpressionExtension.ExtensionType; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgLiteral; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgLiteral.LiteralType; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNamedArgument; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNode; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNodeList; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgOperator; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.DocumentType; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.entity.LogicalAdapter; +import org.polypheny.db.catalog.entity.allocation.AllocationEntity; +import org.polypheny.db.catalog.entity.allocation.AllocationPlacement; +import org.polypheny.db.catalog.entity.logical.LogicalEntity; +import org.polypheny.db.catalog.entity.logical.LogicalGraph.SubstitutionGraph; +import org.polypheny.db.catalog.entity.logical.LogicalNamespace; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.information.InformationPolyAlg.PlanType; +import org.polypheny.db.nodes.Operator; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexElementRef; +import org.polypheny.db.rex.RexIndexRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.rex.RexNameRef; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.tools.AlgBuilder; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.util.DateString; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.TimeString; +import org.polypheny.db.util.TimestampString; + +/** + * Converter class, which transforms PolyAlg in its PolyAlgNode form to an equal AlgNode + */ +public class PolyAlgToAlgConverter { + + private final Snapshot snapshot; + private final AlgCluster cluster; + private final RexBuilder builder; + private final PlanType planType; + + + public PolyAlgToAlgConverter( PlanType planType, Snapshot snapshot, AlgCluster cluster ) { + this.snapshot = snapshot; + this.cluster = cluster; + this.builder = cluster.getRexBuilder(); + this.planType = planType; + } + + + public AlgRoot convert( PolyAlgNode root ) { + AlgNode node = buildNode( (PolyAlgOperator) root ); + + // Wrap {@link AlgNode} into a RelRoot + final AlgDataType tupleType = node.getTupleType(); + final List> fields = Pair.zip( IntStream.range( 0, tupleType.getFieldCount() ).boxed().toList(), tupleType.getFieldNames() ); + final AlgCollation collation = + node instanceof Sort + ? ((Sort) node).collation + : AlgCollations.EMPTY; + return new AlgRoot( node, tupleType, Kind.SELECT, fields, collation ); + } + + + private AlgNode buildNode( PolyAlgOperator operator ) { + PolyAlgDeclaration decl = Objects.requireNonNull( PolyAlgRegistry.getDeclaration( operator.getOpName() ), "'" + operator.getOpName() + "' is not a registered PolyAlg Operator." ); + + List children = operator.getChildren().stream() + .map( this::buildNode ) + .toList(); + if ( !decl.supportsNumberOfChildren( children.size() ) ) { + throw new GenericRuntimeException( "Invalid number of children for '" + decl.opName + "'" ); + } + + PolyAlgArgs args = buildArgs( decl, operator.getArguments(), new Context( children, decl.model ) ); + return decl.createNode( args, children, cluster ); + } + + + private PolyAlgArgs buildArgs( PolyAlgDeclaration decl, List namedArgs, Context ctx ) { + PolyAlgArgs converted = new PolyAlgArgs( decl ); + + boolean noMorePosArgs = false; + Set usedParams = new HashSet<>(); + + boolean canUnpack = decl.canUnpackValues(); + List argsToCombine = new ArrayList<>(); // only used if(canUnpack) to temporarily store posArgs + + Parameter p; + for ( int i = 0; i < namedArgs.size(); i++ ) { + PolyAlgNamedArgument namedArg = namedArgs.get( i ); + String name = namedArg.getName(); + PolyAlgAliasedArgument aliasedArg = namedArg.getAliasedArg(); + + if ( namedArg.isPositionalArg() ) { + if ( noMorePosArgs ) { + throw new GenericRuntimeException( "Positional argument after keyword argument encountered for " + decl.opName ); + } + if ( canUnpack ) { + argsToCombine.add( aliasedArg ); + continue; + } else { + p = decl.getPos( i ); + if ( p == null ) { + throw new GenericRuntimeException( "Too many positional arguments were given for " + decl.opName ); + } + } + } else { + noMorePosArgs = true; + + p = decl.getParam( name ); + if ( p == null ) { + throw new GenericRuntimeException( "Unexpected keyword argument '" + name + "' for " + decl.opName ); + } + } + + if ( usedParams.contains( p ) ) { + throw new GenericRuntimeException( "Argument " + p.getName() + " was used more than once for " + decl.opName ); + } + usedParams.add( p ); + converted.put( p, buildArg( p, aliasedArg, ctx ) ); + } + + if ( !argsToCombine.isEmpty() ) { + p = decl.getPos( 0 ); + PolyAlgAliasedArgument firstArg = argsToCombine.get( 0 ); + if ( argsToCombine.size() == 1 ) { + converted.put( p, buildArg( p, firstArg, ctx ) ); + } else { + PolyAlgNodeList listArg = new PolyAlgNodeList( argsToCombine, firstArg.getPos() ); + converted.put( p, buildList( p, listArg, null, ctx, 0 ) ); + } + } + if ( !converted.validate( true ) ) { + throw new GenericRuntimeException( "Missing positional argument for " + decl.opName ); + } + + return converted; + } + + + private PolyAlgArg buildArg( Parameter p, PolyAlgAliasedArgument aliasedArg, Context ctx ) { + PolyAlgNode arg = aliasedArg.getArg(); + String alias = aliasedArg.getAlias(); + + if ( p.isMultiValued() ) { + if ( arg instanceof PolyAlgNodeList ) { + return buildList( p, (PolyAlgNodeList) arg, alias, ctx, 0 ); + } + return buildList( p, new PolyAlgNodeList( List.of( aliasedArg ), arg.getPos() ), alias, ctx, 0 ); + } + + return convertArg( p, aliasedArg, ctx, 0 ); + } + + + private PolyAlgArg buildList( Parameter p, PolyAlgNodeList listArg, String alias, Context ctx, int depth ) { + List args = new ArrayList<>(); + for ( PolyAlgNode node : listArg.getPolyAlgList() ) { + PolyAlgAliasedArgument aliasedArg = (PolyAlgAliasedArgument) node; + args.add( convertArg( p, aliasedArg, ctx, depth + 1 ) ); // aliasedArg is within a list, so we increase its depth by 1 + } + // We do not specify aliases for the entire list. Instead, this should happen on an element level (if necessary). + return new ListArg<>( args ); + } + + + private PolyAlgArg convertArg( Parameter p, PolyAlgAliasedArgument aliasedArg, Context ctx, int depth ) { + if ( aliasedArg.getArg() instanceof PolyAlgExpression ) { + // no more nested args + if ( depth != p.getMultiValued() ) { + throw new GenericRuntimeException( "Invalid depth for list argument " + p.getName() ); + } + return convertExpression( p, (PolyAlgExpression) aliasedArg.getArg(), aliasedArg.getAlias(), ctx ); + } else if ( aliasedArg.getArg() instanceof PolyAlgNodeList ) { + return buildList( p, (PolyAlgNodeList) aliasedArg.getArg(), aliasedArg.getAlias(), ctx, depth ); + } else { + throw new GenericRuntimeException( "This PolyAlgNode type is currently not supported" ); + } + } + + + /** + * Convertes the PolyAlgExpression for the specified parameter to a PolyAlgArg of the same parameter type. + * The logic for converting a new parameter type should be added here. + * + * @param p the target parameter + * @param exp the expression to convert + * @param alias an optional alias (null if not specified) + * @param ctx any additional context + * @return The converted PolyAlgArg instance whose getType() method must be compatible with p. + */ + private PolyAlgArg convertExpression( Parameter p, PolyAlgExpression exp, String alias, Context ctx ) { + if ( p.requiresAlias && (alias == null || alias.isEmpty()) ) { + throw new GenericRuntimeException( "Missing for " + p.getName() ); + } + ParamType pType = p.getType(); + return switch ( pType ) { + case ANY -> new AnyArg( exp.toString() ); + case INTEGER -> new IntArg( exp.toInt( p.getTags() ) ); + case DOUBLE -> new DoubleArg( exp.toDouble( p.getTags() ) ); + case BOOLEAN -> new BooleanArg( exp.toBoolean() ); + case STRING -> new StringArg( exp.toString() ); + case REX -> { + RexNode node = convertRexNode( exp, ctx ); + yield new RexArg( node, alias == null ? exp.getDefaultAlias() : alias ); + } + case AGGREGATE -> new AggArg( convertAggCall( exp, alias, ctx ) ); + case LAX_AGGREGATE -> new LaxAggArg( convertLaxAggCall( exp, alias, ctx ) ); + case ENTITY -> new EntityArg( convertEntity( exp, ctx ), snapshot, ctx.getNonNullDataModel() ); + case JOIN_TYPE_ENUM -> new EnumArg<>( exp.toEnum( JoinAlgType.class ), pType ); + case SEMI_JOIN_TYPE_ENUM -> new EnumArg<>( exp.toEnum( SemiJoinType.class ), pType ); + case MODIFY_OP_ENUM -> new EnumArg<>( exp.toEnum( Modify.Operation.class ), pType ); + case DISTRIBUTION_TYPE_ENUM -> new EnumArg<>( exp.toEnum( AlgDistribution.Type.class ), pType ); + case DATAMODEL_ENUM -> new EnumArg<>( exp.toEnum( DataModel.class ), pType ); + case POLY_TYPE_ENUM -> new EnumArg<>( exp.toEnum( PolyType.class ), pType ); + case FIELD -> new FieldArg( ctx.getFieldOrdinalOrThrow( exp.toIdentifier() ) ); + case LIST -> ListArg.EMPTY; + case COLLATION -> new CollationArg( convertCollation( exp, ctx ) ); + case CORR_ID -> new CorrelationArg( new CorrelationId( exp.toString() ) ); + case WINDOW_GROUP -> throw new NotImplementedException( "Parsing of WindowGroup arguments is not yet supported." ); + default -> throw new IllegalStateException( "Unexpected value: " + p.getType() ); + }; + } + + + private RexNode convertRexNode( PolyAlgExpression exp, Context ctx ) { + if ( exp.isElementRef() ) { + RexNode collectionRef = convertRexNode( exp.getOnlyChild(), ctx ); + return new RexElementRef( collectionRef, DocumentType.ofDoc() ); + } else if ( exp.isCall() ) { + return convertRexCall( exp, ctx ); + } else if ( exp.isSingleLiteral() ) { + PolyAlgLiteral literal = exp.getLiterals().get( 0 ); + return convertRexLiteral( literal, exp.getAlgDataType(), ctx ); + } + throw new GenericRuntimeException( "Invalid RexNode: " + exp ); + } + + + private RexNode convertRexCall( PolyAlgExpression exp, Context ctx ) { + Operator operator = exp.getOperator( ctx.getNonNullDataModel() ); + if ( operator.getOperatorName() == OperatorName.CAST ) { + RexNode child = convertRexNode( exp.getOnlyChild(), ctx ); + return new RexCall( exp.getAlgDataTypeForCast(), operator, ImmutableList.of( child ) ); + } + // TODO: handle other special kinds of calls (Kind.NEW_SPECIFICATION can also specify cast type...) + List operands = exp.getChildExps().stream().map( e -> convertRexNode( e, ctx ) ).toList(); + + return builder.makeCall( operator, operands ); + } + + + /** + * Converts a PolyAlgLiteral into an appropriate RexNode. + * This does not have to be a RexLiteral, but can also be a RexIndexRef or RexDynamicParam or ... + * + * @param literal the PolyAlgLiteral to be converted + * @param type the AlgDataType specified in the PolyAlgebra + * @param ctx Context + * @return A RexNode representing the specified PolyAlgLiteral + */ + private RexNode convertRexLiteral( PolyAlgLiteral literal, AlgDataType type, Context ctx ) { + + // first handle cases where explicit type doesn't matter + if ( literal.getType() == LiteralType.CORRELATION_VAR ) { + Pair pair = literal.toCorrelationFieldAccess(); + RexNode corr = builder.makeCorrel( ctx.getChildTupleType( 0 ), pair.left ); + return builder.makeFieldAccess( corr, pair.right, true ); + } + + if ( type == null ) { + // no explicit type information, so we can only guess which one from the LiteralType the parser detected: + return switch ( literal.getType() ) { + case QUOTED -> { + String str = literal.toUnquotedString(); + int idx = ctx.getFieldOrdinal( str ); + // limitation: cannot have string literal in double quotes called the same as a field, as we would always pick the field + yield idx >= 0 && literal.isDoubleQuoted() ? RexIndexRef.of( idx, ctx.fields ) : builder.makeLiteral( str ); + } + case NUMBER -> AlgBuilder.literal( literal.toNumber(), builder ); + case BOOLEAN -> AlgBuilder.literal( literal.toBoolean(), builder ); + case NULL -> AlgBuilder.literal( null, builder ); + case POLY_VALUE -> builder.makeLiteral( literal.toPolyValue() ); + case STRING -> { + String str = literal.toString(); + DataModel dataModel = ctx.getNonNullDataModel(); + if ( dataModel == DataModel.DOCUMENT || (dataModel == DataModel.GRAPH && str.contains( "@" )) ) { + String[] idxSplit = str.split( "@", 2 ); + Integer idx = null; + if ( idxSplit.length == 2 ) { + idx = Integer.parseInt( idxSplit[1] ); + } + yield RexNameRef.create( List.of( idxSplit[0].split( "\\." ) ), idx, ctx.children.get( 0 ).getTupleType() ); + } else { + // indexRef + int idx = ctx.getFieldOrdinalOrThrow( str ); + yield RexIndexRef.of( idx, ctx.fields ); + } + } + default -> throw new GenericRuntimeException( "Invalid Literal: '" + literal + "'" ); + }; + } else { + if ( literal.getType() == LiteralType.DYNAMIC_PARAM ) { + return builder.makeDynamicParam( type, literal.toDynamicParam() ); + } + if ( literal.getType() == LiteralType.NULL ) { + return builder.makeNullLiteral( type ); + } + if ( literal.getType() == LiteralType.LOCAL_REF ) { + return new RexLocalRef( literal.toLocalRef(), type ); + } + + String str = literal.toUnquotedString(); + return switch ( type.getPolyType() ) { + case BOOLEAN -> builder.makeLiteral( literal.toBoolean() ); + case TINYINT, SMALLINT, INTEGER, BIGINT, DECIMAL -> builder.makeExactLiteral( new BigDecimal( str ), type ); + case FLOAT, REAL, DOUBLE -> builder.makeApproxLiteral( new BigDecimal( str ), type ); + case DATE -> builder.makeDateLiteral( new DateString( str ) ); + case TIME -> builder.makeTimeLiteral( new TimeString( str ), type.getPrecision() ); + case TIMESTAMP -> builder.makeTimestampLiteral( new TimestampString( str ), type.getPrecision() ); + case CHAR, VARCHAR -> builder.makeLiteral( PolyString.of( str ), type, type.getPolyType() ); + case NULL -> builder.constantNull(); + case NODE -> builder.makeLiteral( literal.toPolyValue(), type ); + case DOCUMENT -> builder.makeDocumentLiteral( switch ( literal.getType() ) { + case NUMBER -> ((RexLiteral) AlgBuilder.literal( literal.toNumber(), builder )).value; + case BOOLEAN -> ((RexLiteral) AlgBuilder.literal( literal.toBoolean(), builder )).value; + default -> PolyString.of( str ); + } ); + default -> throw new GenericRuntimeException( "Unsupported type: " + type.getFullTypeString() ); + }; + } + + } + + + private Entity convertEntity( PolyAlgExpression exp, Context ctx ) { + if ( planType == PlanType.PHYSICAL ) { + PhysicalEntity e = getPhysicalEntity( exp.toIdentifier() ); + ctx.updateFieldNamesIfEmpty( e ); + return e; + } + String[] atSplit = exp.toIdentifier().split( "@", 2 ); + + String[] names = atSplit[0].split( "\\.", 2 ); + GenericRuntimeException exception = new GenericRuntimeException( "Invalid entity name: " + String.join( ".", names ) ); + + String namespaceName; + String rest = null; // contains everything after the first "." + if ( names.length == 2 ) { + namespaceName = names[0]; + rest = names[1]; + } else if ( names.length == 1 ) { + namespaceName = names[0]; + } else { + throw exception; // names.length == 0 + } + + LogicalNamespace ns = snapshot.getNamespace( namespaceName ).orElseThrow( () -> new GenericRuntimeException( "no namespace named " + namespaceName ) ); + + if ( planType == PlanType.ALLOCATION ) { + AllocationEntity e = getAllocationEntity( atSplit, ns, rest ); + if ( ns.dataModel == DataModel.GRAPH && rest != null ) { + return e.withName( rest ); + } + return e; + } + + return switch ( ns.dataModel ) { + case RELATIONAL -> { + if ( rest == null ) { + yield new SubstitutionGraph( ns.id, "sub", false, ns.caseSensitive, List.of() ); + } else if ( ctx.getNonNullDataModel() == DataModel.GRAPH ) { + yield getSubGraph( ns, rest ); + } + yield snapshot.rel().getTable( ns.id, rest ).orElseThrow( () -> exception ); + } + case DOCUMENT -> { + if ( rest == null ) { + yield new SubstitutionGraph( ns.id, "sub", false, ns.caseSensitive, List.of() ); + } else if ( ctx.getNonNullDataModel() == DataModel.GRAPH ) { + yield getSubGraph( ns, rest ); + } + yield snapshot.doc().getCollection( ns.id, rest ).orElseThrow( () -> exception ); + } + case GRAPH -> { + if ( rest == null ) { + yield snapshot.graph().getGraph( ns.id ).orElseThrow( () -> new GenericRuntimeException( "no graph with id " + ns.id ) ); + } else { + yield getSubGraph( ns, rest ); + } + } + }; + } + + + private SubstitutionGraph getSubGraph( LogicalNamespace ns, String namesStr ) { + List subNames = Arrays.asList( namesStr.split( "\\." ) ); + String name = subNames.size() == 1 ? subNames.get( 0 ) : "sub"; + return new SubstitutionGraph( ns.id, name, false, ns.caseSensitive, subNames.stream().map( PolyString::of ).toList() ); + } + + + private AllocationEntity getAllocationEntity( String[] atSplit, LogicalNamespace ns, String entityName ) { + // atSplit has structure [ns.entity, adapterName.partition] + GenericRuntimeException exception = new GenericRuntimeException( "Invalid AllocationEntity: " + String.join( "@", atSplit ) ); + if ( atSplit.length != 2 ) { + throw exception; + } + LogicalEntity logicalEntity = getLogicalEntity( entityName, ns ); + String[] apSplit = atSplit[1].split( "\\.", 2 ); // [adapterName, partition] + LogicalAdapter adapter = snapshot.getAdapter( apSplit[0] ).orElseThrow( () -> exception ); + AllocationPlacement placement = snapshot.alloc().getPlacement( adapter.id, logicalEntity.id ).orElseThrow( () -> exception ); + + if ( apSplit.length == 1 ) { + List entities = snapshot.alloc().getAllocsOfPlacement( placement.id ); + if ( entities.isEmpty() ) { + throw exception; + } + return entities.get( 0 ); + } + try { + return snapshot.alloc().getAlloc( placement.id, Long.parseLong( apSplit[1] ) ).orElseThrow( () -> exception ); + } catch ( NumberFormatException e ) { + long partitionId = snapshot.alloc().getPartitionFromName( logicalEntity.id, apSplit[1] ).orElseThrow( () -> exception ).id; + return snapshot.alloc().getAlloc( placement.id, partitionId ).orElseThrow( () -> exception ); + } + + } + + + private LogicalEntity getLogicalEntity( String entityName, LogicalNamespace ns ) { + if ( entityName == null ) { + if ( ns.dataModel == DataModel.GRAPH ) { + return snapshot.graph().getGraph( ns.id ).orElseThrow( + () -> new GenericRuntimeException( "Cannot find entity: " + ns.name ) ); + } else { + throw new GenericRuntimeException( "Entity name must not be null for a non-graph namespace." ); + } + } + return snapshot.getLogicalEntity( ns.id, entityName ).orElseThrow( + () -> new GenericRuntimeException( "Cannot find entity: " + ns.name + "." + entityName ) ); + } + + + private PhysicalEntity getPhysicalEntity( String s ) { + GenericRuntimeException exception = new GenericRuntimeException( "Invalid PhysicalEntity: " + s ); + + String[] apSplit = s.split( "\\.", 2 ); // [adapterName, physicalId] + LogicalAdapter adapter = snapshot.getAdapter( apSplit[0] ).orElseThrow( () -> exception ); + long physicalId = Long.parseLong( apSplit[1] ); + return Catalog.getInstance().getAdapterCatalog( adapter.id ).orElseThrow( () -> exception ).getPhysical( physicalId ); + } + + + private AlgFieldCollation convertCollation( PolyAlgExpression exp, Context ctx ) { + List literals = exp.getLiterals(); + int size = literals.size(); + int fieldIndex = ctx.getFieldOrdinalOrThrow( literals.get( 0 ).toString() ); + return switch ( size ) { + case 1 -> new AlgFieldCollation( fieldIndex ); + case 2 -> new AlgFieldCollation( fieldIndex, literals.get( 1 ).toDirection() ); + case 3 -> new AlgFieldCollation( + fieldIndex, + literals.get( 1 ).toDirection(), + literals.get( 2 ).toNullDirection() ); + default -> throw new GenericRuntimeException( "Too many values for AlgFieldCollation" ); + }; + } + + + private AggregateCall convertAggCall( PolyAlgExpression exp, String name, Context ctx ) { + AggFunction f = exp.getAggFunction(); + List args = new ArrayList<>(); + boolean isDistinct = false; + for ( PolyAlgExpression child : exp.getChildExps() ) { + String fieldName = child.getLastLiteral().toString(); + if ( child.getLiterals().size() == 2 && child.getLiterals().get( 0 ).toString().equals( "DISTINCT" ) ) { + isDistinct = true; + } + args.add( ctx.getFieldOrdinalOrThrow( fieldName ) ); + } + + int filter = -1; + PolyAlgExpressionExtension extension = exp.getExtension( ExtensionType.FILTER ); + if ( extension != null ) { + PolyAlgLiteral filterLiteral = extension.getLiterals().get( 0 ); + filter = ctx.getFieldOrdinalOrThrow( filterLiteral.toString() ); + } + boolean isApproximate = exp.getExtension( ExtensionType.APPROXIMATE ) != null; + // TODO: parse WITHIN clause for Collation (low priority, since not supported by practically all AggFunctions) + return AggregateCall.create( f, isDistinct, isApproximate, args, filter, AlgCollations.EMPTY, + 0, ctx.children.get( 0 ), null, name ); // type can be null with this create method + } + + + private LaxAggregateCall convertLaxAggCall( PolyAlgExpression exp, String name, Context ctx ) { + RexNode input = null; + if ( !exp.getChildExps().isEmpty() ) { + input = convertRexNode( exp.getOnlyChild(), ctx ); + } + if ( name == null ) { + name = exp.getDefaultAlias(); + } + return LaxAggregateCall.create( name, exp.getAggFunction(), input ); + } + + + private static final class Context { + + private final List children; + private List fieldNames; + private List fields; + private final DataModel dataModel; + + + private Context( List children, DataModel dataModel ) { + this.children = children; + this.fieldNames = children.stream() + .flatMap( node -> node.getTupleType().getFieldNames().stream() ) + .toList(); + this.fields = children.stream() + .flatMap( node -> node.getTupleType().getFields().stream() ) + .toList(); + this.dataModel = dataModel; + } + + + private AlgNode getChildFromFieldOrdinal( int ord ) { + int offset = 0; + for ( AlgNode child : children ) { + int count = child.getTupleType().getFieldCount(); + if ( ord < offset + count ) { + return child; + } + offset += count; + } + throw new GenericRuntimeException( "Invalid field index" ); + } + + + private int getFieldOrdinalOrThrow( String fieldName ) { + int idx = getFieldOrdinal( fieldName ); + if ( idx < 0 ) { + throw new GenericRuntimeException( "Invalid field name: '" + fieldName + "'" ); + } + return idx; + } + + + private int getFieldOrdinal( String fieldName ) { + return fieldNames.indexOf( fieldName ); + } + + + private AlgDataType getDataTypeFromFieldName( String fieldName ) { + int ord = getFieldOrdinalOrThrow( fieldName ); + int offset = 0; + for ( AlgNode child : children ) { + int count = child.getTupleType().getFieldCount(); + if ( ord < offset + count ) { + return child.getTupleType().getFields().get( ord - offset ).getType(); + } + offset += count; + } + throw new GenericRuntimeException( "Invalid field index" ); + } + + + public AlgDataType getChildTupleType( int idx ) { + return children.get( idx ).getTupleType(); + } + + + /** + * The data model could be null for common AlgNodes. + * In this case this method returns the default DataModel. + * + * @return the DataModel of this context or the default DataModel if it is null + */ + public DataModel getNonNullDataModel() { + return Objects.requireNonNullElse( dataModel, DataModel.getDefault() ); + } + + + /** + * In the case of a leaf node (e.g. a SCAN operation), fieldNames are empty. + * We can manually add the fieldNames given the scanned entity to allow other arguments to use a field name instead of an index. + * + * @param e The entity that defines the field names for this node + */ + public void updateFieldNamesIfEmpty( PhysicalEntity e ) { + if ( fieldNames.isEmpty() ) { + fieldNames = e.getTupleType().getFieldNames(); + fields = e.getTupleType().getFields(); + } + } + + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgAliasedArgument.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgAliasedArgument.java new file mode 100644 index 0000000000..eea7f3c9b5 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgAliasedArgument.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import lombok.Getter; +import org.polypheny.db.languages.ParserPos; + +@Getter +public class PolyAlgAliasedArgument extends PolyAlgNode { + + private final PolyAlgNode arg; + private final String alias; + + + public PolyAlgAliasedArgument( PolyAlgNode arg, String alias, ParserPos pos ) { + super( pos ); + this.arg = arg; + this.alias = alias; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgDataType.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgDataType.java new file mode 100644 index 0000000000..bc32ee75fc --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgDataType.java @@ -0,0 +1,79 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.List; +import java.util.StringJoiner; +import lombok.NonNull; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.type.PolyType; + +public class PolyAlgDataType extends PolyAlgNode { + + private final String type; + private final List args; // precision / scale + private final boolean nullable, isArray; + + + public PolyAlgDataType( @NonNull String type, @NonNull List args, boolean nullable, boolean isArray, ParserPos pos ) { + super( pos ); + this.type = type; + this.args = args; + this.nullable = nullable; + this.isArray = isArray; + } + + + public AlgDataType toAlgDataType() { + AlgDataTypeFactory factory = AlgDataTypeFactory.DEFAULT; + PolyType polyType = getPolyType(); + AlgDataType dataType = switch ( args.size() ) { + case 0 -> factory.createPolyType( polyType ); + case 1 -> factory.createPolyType( polyType, args.get( 0 ) ); + case 2 -> factory.createPolyType( polyType, args.get( 0 ), args.get( 1 ) ); + default -> throw new GenericRuntimeException( "Unexpected number of type arguments: " + args.size() ); + }; + if ( isArray ) { + dataType = factory.createArrayType( dataType, -1 ); + } + return factory.createTypeWithNullability( dataType, nullable ); + } + + + public PolyType getPolyType() { + return PolyType.get( type ); + } + + + @Override + public String toString() { + String str = type; + if ( !args.isEmpty() ) { + str += "("; + StringJoiner joiner = new StringJoiner( ", " ); + for ( Integer arg : args ) { + joiner.add( arg.toString() ); + } + str += joiner + ")"; + } + return str; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpression.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpression.java new file mode 100644 index 0000000000..1a10e3222a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpression.java @@ -0,0 +1,288 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.StringJoiner; +import lombok.Getter; +import lombok.NonNull; +import lombok.Setter; +import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamTag; +import org.polypheny.db.algebra.polyalg.PolyAlgUtils; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgExpressionExtension.ExtensionType; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.nodes.Operator; + +/** + * One-size-fits-all class for any RexNodes or even literals not wrapped in a RexNode + */ + +@Getter +public class PolyAlgExpression extends PolyAlgNode { + + private final List literals; + private final List childExps; + private final PolyAlgDataType dataType; // the normal data type specification which is indicated by : after literals + private final Map extensions; + @Setter + private PolyAlgDataType cast; // unlike this.type, this.cast appears in the form CAST(field as INTEGER) + + + public PolyAlgExpression( + @NonNull List literals, List childExps, PolyAlgDataType dataType, + @NonNull List extensions, ParserPos pos ) { + super( pos ); + if ( literals.isEmpty() ) { + throw new GenericRuntimeException( "Expression must have at least one literal" ); + } + + this.literals = literals; + this.childExps = childExps; + this.dataType = dataType; + this.extensions = new HashMap<>(); + for ( PolyAlgExpressionExtension ext : extensions ) { + this.extensions.put( ext.getType(), ext ); + } + + } + + + public boolean isCall() { + // if childExps is an empty list, we have a call with 0 arguments + return childExps != null; + } + + + public boolean isElementRef() { + return isCall() && getLiteralsAsString().equals( PolyAlgUtils.ELEMENT_REF_PREFIX ); + } + + + public boolean isSingleLiteral() { + return !isCall() && !hasExtensions() && literals.size() == 1; + } + + + public boolean hasDataType() { + return dataType != null; + } + + + public boolean hasExtensions() { + return !extensions.isEmpty(); + } + + + public PolyAlgExpressionExtension getExtension( ExtensionType type ) { + return extensions.get( type ); + } + + + /** + * Return the explicitly stated AlgDataType for this PolyAlgExpression or {@code null} if it has none. + * + * @return the stated AlgDataType of this expression + */ + public AlgDataType getAlgDataType() { + return dataType == null ? null : dataType.toAlgDataType(); + } + + + /** + * Return the explicitly stated AlgDataType to be used in the CAST operator. + * + * @return the stated AlgDataType of this expression + */ + public AlgDataType getAlgDataTypeForCast() { + PolyAlgDataType cast = getOnlyChild().getCast(); + if ( cast == null ) { + throw new GenericRuntimeException( "No AlgDataType to cast to was specified" ); + } + return cast.toAlgDataType(); + } + + + public int toInt( Set constraints ) { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid integer: " + this ); + } + int i = literals.get( 0 ).toInt(); + if ( i < 0 && constraints.contains( ParamTag.NON_NEGATIVE ) ) { + throw new GenericRuntimeException( "Integer value must not be negative!" ); + } + return i; + } + + + public Double toDouble( Set constraints ) { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid double: " + this ); + } + double d = literals.get( 0 ).toNumber().doubleValue(); + if ( d < 0 && constraints.contains( ParamTag.NON_NEGATIVE ) ) { + throw new GenericRuntimeException( "Double value must not be negative!" ); + } + return d; + } + + + public Number toNumber() { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid number: " + this ); + } + return literals.get( 0 ).toNumber(); + } + + + public boolean toBoolean() { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid integer: " + this ); + } + return literals.get( 0 ).toBoolean(); + } + + + public > T toEnum( Class enumClass ) { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid enum: " + this ); + } + return Enum.valueOf( enumClass, literals.get( 0 ).toString() ); + } + + + public String toIdentifier() { + if ( !isSingleLiteral() ) { + throw new GenericRuntimeException( "Not a valid identifier: " + this ); + } + return literals.get( 0 ).toUnquotedString(); + } + + + public List getLiteralsAsStrings() { + return literals.stream().map( PolyAlgLiteral::toString ).toList(); + } + + + public String getLiteralsAsString() { + return String.join( " ", getLiteralsAsStrings() ); + } + + + public PolyAlgLiteral getLastLiteral() { + return literals.get( literals.size() - 1 ); + } + + + public PolyAlgExpression getOnlyChild() { + if ( childExps.size() != 1 ) { + throw new GenericRuntimeException( "Unexpected number of child expressions: " + childExps.size() ); + } + return childExps.get( 0 ); + } + + + public String getDefaultAlias() { + if ( isCastOperator() ) { + return getOnlyChild().getDefaultAlias(); + } + return toString(); + } + + + public boolean isCastOperator() { + if ( isCall() ) { + try { + return getOperator( DataModel.RELATIONAL ).getOperatorName() == OperatorName.CAST; + } catch ( GenericRuntimeException ignored ) { + } + } + return false; + } + + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + for ( PolyAlgLiteral literal : literals ) { + sb.append( literal.toUnquotedString() ); + } + if ( isCall() ) { + StringJoiner joiner = new StringJoiner( ", " ); + for ( PolyAlgExpression e : childExps ) { + joiner.add( e.toString() ); + } + sb.append( "(" ).append( joiner ).append( ")" ); + } + if ( hasDataType() ) { + sb.append( ":" ).append( dataType ); + } + if ( hasExtensions() ) { + StringJoiner joiner = new StringJoiner( " " ); + for ( PolyAlgExpressionExtension extension : extensions.values() ) { + joiner.add( extension.toString() ); + } + sb.append( joiner ); + } + return sb.toString(); + } + + + public Operator getOperator( DataModel model ) { + String str = getLiteralsAsString(); + String upper = str.toUpperCase( Locale.ROOT ); + + Operator op = OperatorRegistry.getFromUniqueName( model, upper ); + if ( op == null ) { + if ( model == DataModel.RELATIONAL ) { + // We might have a common operator -> also test GRAPH and DOCUMENT + op = OperatorRegistry.getFromUniqueName( DataModel.GRAPH, upper ); + if ( op == null ) { + op = OperatorRegistry.getFromUniqueName( DataModel.DOCUMENT, upper ); + } + } else { + op = OperatorRegistry.getFromUniqueName( DataModel.RELATIONAL, upper ); // generic operator fallback + } + if ( op == null ) { + throw new GenericRuntimeException( "Operator '" + str + "' is not yet supported" ); + } + } + return op; + } + + + public AggFunction getAggFunction() { + String str = getLiteralsAsString(); + OperatorName opName = OperatorName.valueOf( str.toUpperCase( Locale.ROOT ) ); + if ( opName.getClazz() != AggFunction.class ) { + throw new GenericRuntimeException( "Operator '" + str + "' is not a valid aggregate function" ); + } + return OperatorRegistry.getAgg( opName ); + } + + +} + diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpressionExtension.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpressionExtension.java new file mode 100644 index 0000000000..891e0fcb51 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgExpressionExtension.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.List; +import java.util.StringJoiner; +import lombok.Getter; + +@Getter +public class PolyAlgExpressionExtension { + + private final List literals; + private final ExtensionType type; + + + public PolyAlgExpressionExtension( List literals, ExtensionType type ) { + this.literals = literals; + this.type = type; + } + + + @Override + public String toString() { + StringJoiner joiner = new StringJoiner( " " ); + for ( PolyAlgLiteral literal : literals ) { + joiner.add( literal.toString() ); + } + return joiner.toString(); + } + + + public enum ExtensionType { + FILTER, + WITHIN_GROUP, + APPROXIMATE, + WITH, + OVER; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgLiteral.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgLiteral.java new file mode 100644 index 0000000000..d8e089edbb --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgLiteral.java @@ -0,0 +1,164 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.Locale; +import lombok.Getter; +import lombok.NonNull; +import org.polypheny.db.algebra.AlgFieldCollation; +import org.polypheny.db.algebra.AlgFieldCollation.Direction; +import org.polypheny.db.algebra.AlgFieldCollation.NullDirection; +import org.polypheny.db.algebra.core.CorrelationId; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.rex.RexLocalRef; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.util.Pair; + +public class PolyAlgLiteral extends PolyAlgNode { + + private final Object object; + private final String str; + @Getter + private final LiteralType type; + + + public PolyAlgLiteral( @NonNull Object o, @NonNull LiteralType type, ParserPos pos ) { + super( pos ); + this.object = o; + this.str = o.toString(); + this.type = type; + } + + + public void checkType( LiteralType type ) { + if ( this.type != type ) { + throw new GenericRuntimeException( "Not a valid " + this.type + ": '" + str + "'" ); + } + } + + + public int toInt() { + checkType( LiteralType.NUMBER ); + return Integer.parseInt( str ); + } + + + public boolean toBoolean() { + checkType( LiteralType.BOOLEAN ); + return Boolean.parseBoolean( str ); + } + + + public Number toNumber() { + checkType( LiteralType.NUMBER ); + + Number num; + double dbl = Double.parseDouble( str ); + num = dbl; + if ( dbl % 1 == 0 ) { + num = Integer.parseInt( str ); + } + return num; + } + + + public AlgFieldCollation.Direction toDirection() { + checkType( LiteralType.DIRECTION ); + return switch ( str.toUpperCase( Locale.ROOT ) ) { + case "ASC" -> Direction.ASCENDING; + case "DESC" -> Direction.DESCENDING; + case "SASC" -> Direction.STRICTLY_ASCENDING; + case "SDESC" -> Direction.STRICTLY_DESCENDING; + case "CLU" -> Direction.CLUSTERED; + default -> throw new IllegalArgumentException( "'" + str + "' is not a valid direction" ); + }; + } + + + public AlgFieldCollation.NullDirection toNullDirection() { + checkType( LiteralType.NULL_DIRECTION ); + return NullDirection.valueOf( str.toUpperCase( Locale.ROOT ) ); + } + + + public Pair toCorrelationFieldAccess() { + checkType( LiteralType.CORRELATION_VAR ); + String[] parts = str.split( "\\.", 2 ); + if ( parts.length != 2 ) { + throw new GenericRuntimeException( "Missing field access in '" + str + "'" ); + } + return Pair.of( new CorrelationId( parts[0] ), parts[1] ); + } + + + public int toLocalRef() { + checkType( LiteralType.LOCAL_REF ); + return Integer.parseInt( str.substring( RexLocalRef.PREFIX.length() ) ); + } + + + @Override + public String toString() { + return str; + } + + + public String toUnquotedString() { + if ( type == LiteralType.QUOTED ) { + return str.substring( 1, str.length() - 1 ); + } + return str; + } + + + public boolean isDoubleQuoted() { + return type == LiteralType.QUOTED && str.charAt( 0 ) == '"'; + } + + + public int toDynamicParam() { + checkType( LiteralType.DYNAMIC_PARAM ); + return Integer.parseInt( str.substring( 1 ) ); // str looks like "?0" + } + + + public PolyValue toPolyValue() { + checkType( LiteralType.POLY_VALUE ); + return (PolyValue) object; + } + + + public enum LiteralType { + + QUOTED, + NUMBER, + BOOLEAN, + NULL, + DIRECTION, // AlgFieldCollation.Direction + NULL_DIRECTION, // AlgFieldCollation.NullDirection + DYNAMIC_PARAM, + CORRELATION_VAR, + LOCAL_REF, + POLY_VALUE, + STRING; // This is the least specific type and is used e.g. for field or entity names + + public static LiteralType DEFAULT = STRING; + + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNamedArgument.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNamedArgument.java new file mode 100644 index 0000000000..419ed72f37 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNamedArgument.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import lombok.Getter; +import org.polypheny.db.languages.ParserPos; + +@Getter +public class PolyAlgNamedArgument extends PolyAlgNode { + + private final String name; + private final PolyAlgAliasedArgument aliasedArg; + + + public PolyAlgNamedArgument( String name, PolyAlgAliasedArgument aliasedArg, ParserPos pos ) { + super( pos ); + this.name = name; + this.aliasedArg = aliasedArg; + } + + + public boolean isPositionalArg() { + return name == null; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNode.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNode.java new file mode 100644 index 0000000000..68723d935b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNode.java @@ -0,0 +1,84 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.Set; +import lombok.Getter; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.nodes.NodeVisitor; +import org.polypheny.db.util.Litmus; + +/** + * At this point this class + */ +@Getter +public abstract class PolyAlgNode implements Node { + + protected final ParserPos pos; + + + protected PolyAlgNode( ParserPos pos ) { + this.pos = pos; + } + + + @Override + public Node clone( ParserPos pos ) { + return null; + } + + + @Override + public Kind getKind() { + return null; + } + + + @Override + public QueryLanguage getLanguage() { + return null; + } + + + @Override + public boolean isA( Set category ) { + return category.contains( this.getKind() ); + } + + + @Override + public boolean equalsDeep( Node node, Litmus litmus ) { + return false; + } + + + @Override + public @Nullable String getEntity() { + return null; + } + + + @Override + public R accept( NodeVisitor visitor ) { + return null; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNodeList.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNodeList.java new file mode 100644 index 0000000000..c47f66b0bb --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgNodeList.java @@ -0,0 +1,95 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import lombok.Getter; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.languages.ParserPos; +import org.polypheny.db.nodes.Node; +import org.polypheny.db.nodes.NodeList; + +public class PolyAlgNodeList extends PolyAlgNode implements NodeList { + + private final List list; + @Getter + private final List polyAlgList; + + + public PolyAlgNodeList( ParserPos pos ) { + super( pos ); + list = new ArrayList<>(); + polyAlgList = new ArrayList<>(); + } + + + public PolyAlgNodeList( Collection collection, ParserPos pos ) { + super( pos ); + list = new ArrayList<>( collection ); + polyAlgList = new ArrayList<>( collection.stream().map( e -> (PolyAlgNode) e ).toList() ); + } + + + @Override + public List getList() { + return list; + } + + + @Override + public void add( Node node ) { + list.add( node ); + polyAlgList.add( (PolyAlgNode) node ); + + } + + + @Override + public Node get( int n ) { + return list.get( n ); + } + + + @Override + public Node set( int n, Node node ) { + polyAlgList.set( n, (PolyAlgNode) node ); + return list.set( n, node ); + } + + + @Override + public int size() { + return list.size(); + } + + + @Override + public Node[] toArray() { + return list.toArray( new Node[0] ); + } + + + @NotNull + @Override + public Iterator iterator() { + return list.iterator(); + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgOperator.java b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgOperator.java new file mode 100644 index 0000000000..0a244e2113 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/algebra/polyalg/parser/nodes/PolyAlgOperator.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.algebra.polyalg.parser.nodes; + +import java.util.List; +import lombok.Getter; +import org.polypheny.db.languages.ParserPos; + +@Getter +public class PolyAlgOperator extends PolyAlgNode { + + private final String opName; + private final List arguments; + private final List children; + + + public PolyAlgOperator( String opName, List arguments, List children, ParserPos pos ) { + super( pos ); + + this.opName = opName; + this.arguments = arguments == null ? List.of() : arguments; + this.children = children == null ? List.of() : children; + } + +} diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java index ee7d0e33ed..e5df35e124 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptOptimizeJoinRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -42,6 +42,7 @@ import java.util.List; import java.util.ListIterator; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.TreeSet; import org.polypheny.db.algebra.AlgNode; @@ -1398,14 +1399,13 @@ private boolean swapInputs( AlgMetadataQuery mq, LoptMultiJoin multiJoin, LoptJo return !multiJoin.isLeftFactorInRemovableSelfJoin( ((LoptJoinTree.Leaf) left.getFactorTree()).getId() ); } - final Double leftRowCount = mq.getTupleCount( left.getJoinTree() ); - final Double rightRowCount = mq.getTupleCount( right.getJoinTree() ); + Optional leftRowCount = mq.getTupleCount( left.getJoinTree() ); + Optional rightRowCount = mq.getTupleCount( right.getJoinTree() ); // The left side is smaller than the right if it has fewer rows, or if it has the same number of rows as the right (excluding roundoff), but fewer columns. - if ( (leftRowCount != null) - && (rightRowCount != null) - && ((leftRowCount < rightRowCount) - || ((Math.abs( leftRowCount - rightRowCount ) + if ( (leftRowCount.isPresent()) && (rightRowCount.isPresent()) + && ((leftRowCount.get() < rightRowCount.get()) + || ((Math.abs( leftRowCount.get() - rightRowCount.get() ) < AlgOptUtil.EPSILON) && (rowWidthCost( left.getJoinTree() ) < rowWidthCost( right.getJoinTree() )))) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java index 22e10fab74..77968d8e07 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/LoptSemiJoinOptimizer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -531,7 +531,7 @@ private double computeScore( AlgNode factRel, AlgNode dimRel, SemiJoin semiJoin } // Compute the cost of doing an extra relScan on the dimension table, including the distinct sort on top of the relScan; if the dimension columns are already unique, no need to add on the dup removal cost. - final Double dimSortCost = mq.getTupleCount( dimRel ); + final Double dimSortCost = mq.getTupleCount( dimRel ).orElse( Double.MAX_VALUE ); final Double dupRemCost = uniq ? 0 : dimSortCost; final AlgOptCost dimCost = mq.getCumulativeCost( dimRel ); if ( (dimSortCost == null) || (dupRemCost == null) || (dimCost == null) ) { diff --git a/core/src/main/java/org/polypheny/db/algebra/rules/MultiJoinOptimizeBushyRule.java b/core/src/main/java/org/polypheny/db/algebra/rules/MultiJoinOptimizeBushyRule.java index 2fc2338e31..2085bdacf7 100644 --- a/core/src/main/java/org/polypheny/db/algebra/rules/MultiJoinOptimizeBushyRule.java +++ b/core/src/main/java/org/polypheny/db/algebra/rules/MultiJoinOptimizeBushyRule.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -110,7 +110,7 @@ public void onMatch( AlgOptRuleCall call ) { int x = 0; for ( int i = 0; i < multiJoin.getNumJoinFactors(); i++ ) { final AlgNode alg = multiJoin.getJoinFactor( i ); - double cost = mq.getTupleCount( alg ); + double cost = mq.getTupleCount( alg ).orElse( Double.MAX_VALUE ); vertexes.add( new LeafVertex( i, alg, cost, x ) ); x += alg.getTupleType().getFieldCount(); } @@ -123,7 +123,7 @@ public void onMatch( AlgOptRuleCall call ) { // Comparator that chooses the best edge. A "good edge" is one that has a large difference in the number of rows on LHS and RHS. final Comparator edgeComparator = - new Comparator() { + new Comparator<>() { @Override public int compare( LoptMultiJoin.Edge e0, LoptMultiJoin.Edge e1 ) { return Double.compare( rowCountDiff( e0 ), rowCountDiff( e1 ) ); diff --git a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java index 6516e35704..ecf6f10164 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/AlgDataTypeImpl.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,7 +40,6 @@ import java.nio.charset.Charset; import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; import org.polypheny.db.nodes.IntervalQualifier; import org.polypheny.db.type.BasicPolyType; import org.polypheny.db.type.PolyType; @@ -69,7 +68,7 @@ protected AlgDataTypeImpl( List fields ) { if ( fields != null ) { // Create a defensive copy of the list. this.fields = ImmutableList.copyOf( fields ); - this.ids = fields.stream().map( AlgDataTypeField::getId ).collect( Collectors.toList() ); + this.ids = fields.stream().map( AlgDataTypeField::getId ).toList(); } else { this.fields = null; this.ids = null; @@ -161,7 +160,10 @@ public List getFields() { @Override public List getFieldNames() { - return fields.stream().map( AlgDataTypeField::getName ).collect( Collectors.toList() ); + if ( fields == null ) { + return ImmutableList.of(); + } + return fields.stream().map( AlgDataTypeField::getName ).toList(); } @@ -172,10 +174,10 @@ public List getFieldIds() { public List getPhysicalFieldNames() { - // TODO MV: Is there a more efficient way for doing this? - List l = new ArrayList<>(); - fields.forEach( f -> l.add( f.getPhysicalName() ) ); - return l; + if ( fields == null ) { + return ImmutableList.of(); + } + return fields.stream().map( AlgDataTypeField::getPhysicalName ).toList(); } @@ -207,8 +209,7 @@ public boolean isStruct() { @Override public boolean equals( Object obj ) { - if ( obj instanceof AlgDataTypeImpl ) { - final AlgDataTypeImpl that = (AlgDataTypeImpl) obj; + if ( obj instanceof AlgDataTypeImpl that ) { return this.digest.equals( that.digest ); } return false; diff --git a/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java index a49b875501..bcab8802a3 100644 --- a/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java +++ b/core/src/main/java/org/polypheny/db/algebra/type/DocumentType.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -36,6 +36,7 @@ public class DocumentType implements AlgDataType, AlgDataTypeFamily { public static final String DOCUMENT_ID = "_id"; public static final String DOCUMENT_DATA = "_data"; + public static final String DOCUMENT_FIELD = "d"; public StructKind structKind; @@ -94,7 +95,7 @@ public static AlgDataTypeFieldImpl getRelationalId() { public static AlgDataType ofDoc() { - return new DocumentType( List.of( new AlgDataTypeFieldImpl( -1L, "d", 0, DocumentType.ofId() ) ) ); + return new DocumentType( List.of( new AlgDataTypeFieldImpl( -1L, DOCUMENT_FIELD, 0, DocumentType.ofId() ) ) ); } @@ -105,7 +106,7 @@ public static DocumentType ofIncludes( Map includes ) public static AlgDataType ofCrossRelational() { return new AlgRecordType( List.of( - new AlgDataTypeFieldImpl( -1L, "d", 0, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.DOCUMENT ) ) + new AlgDataTypeFieldImpl( -1L, DOCUMENT_FIELD, 0, AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.DOCUMENT ) ) ) ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java index 7932743170..8d1eb64dae 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/allocation/AllocationEntity.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,8 @@ @JsonTypeInfo(use = Id.CLASS) public abstract class AllocationEntity extends Entity { + public static String PREFIX = "$alloc$"; + @Serialize @JsonProperty public long adapterId; @@ -68,7 +70,7 @@ protected AllocationEntity( long namespaceId, long adapterId, DataModel type ) { - super( id, "$alloc$" + id, namespaceId, EntityType.ENTITY, type, true ); + super( id, PREFIX + id, namespaceId, EntityType.ENTITY, type, true ); this.adapterId = adapterId; this.logicalId = logicalId; this.partitionId = partitionId; diff --git a/core/src/main/java/org/polypheny/db/functions/CrossModelFunctions.java b/core/src/main/java/org/polypheny/db/functions/CrossModelFunctions.java index 568daeab9b..1656ee0b0e 100644 --- a/core/src/main/java/org/polypheny/db/functions/CrossModelFunctions.java +++ b/core/src/main/java/org/polypheny/db/functions/CrossModelFunctions.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -187,11 +187,6 @@ public static Enumerable nodesToCollection( Enumerable } - public static PolyValue cypherOnlyLabelGraph( PolyValue value, PolyString label ) { - return null; - } - - /** * Modify operation, which inserts edge properties. * diff --git a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java index 88042a17c3..9fda48677f 100644 --- a/core/src/main/java/org/polypheny/db/interpreter/Bindables.java +++ b/core/src/main/java/org/polypheny/db/interpreter/Bindables.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -74,10 +74,18 @@ import org.polypheny.db.algebra.logical.relational.LogicalWindow; import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.IntArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgOptRule; @@ -218,6 +226,14 @@ public static BindableScan create( AlgCluster cluster, Entity entity, List children, AlgCluster cluster ) { + return create( cluster, + args.getArg( 0, EntityArg.class ).getEntity(), + args.getListArg( "filters", RexArg.class ).map( RexArg::getNode ), + args.getListArg( "projects", IntArg.class ).map( IntArg::getArg ) ); + } + + @Override public AlgDataType deriveRowType() { final AlgDataTypeFactory.Builder builder = getCluster().getTypeFactory().builder(); @@ -293,6 +309,20 @@ public boolean isImplementationCacheable() { return false; } + + @Override + public PolyAlgArgs bindArguments() { + PolyAlgArgs args = new PolyAlgArgs( getPolyAlgDeclaration() ); + List fieldNames = entity.getTupleType().getFieldNames(); + PolyAlgArg filtersArg = new ListArg<>( filters, f -> new RexArg( f, fieldNames ), args.getDecl().canUnpackValues() ); + PolyAlgArg projectsArg = new ListArg<>( projects, IntArg::new, args.getDecl().canUnpackValues() ); + + args.put( "entity", new EntityArg( entity, Catalog.snapshot(), DataModel.RELATIONAL ) ) + .put( "filters", filtersArg ) + .put( "projects", projectsArg ); + return args; + } + } diff --git a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java index dc47b543d0..ddcfe1b4a9 100644 --- a/core/src/main/java/org/polypheny/db/languages/LanguageManager.java +++ b/core/src/main/java/org/polypheny/db/languages/LanguageManager.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -37,8 +37,11 @@ import org.polypheny.db.processing.Processor; import org.polypheny.db.processing.QueryContext; import org.polypheny.db.processing.QueryContext.ParsedQueryContext; +import org.polypheny.db.processing.QueryContext.PhysicalQueryContext; +import org.polypheny.db.processing.QueryContext.TranslatedQueryContext; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.util.DeadlockException; import org.polypheny.db.util.Pair; @@ -98,29 +101,39 @@ public List anyPrepareQuery( QueryContext context, Statem context.getInformationTarget().accept( statement.getTransaction().getQueryAnalyzer() ); } - if ( transaction.isAnalyze() ) { - statement.getOverviewDuration().start( "Parsing" ); - } List parsedQueries; - try { - // handle empty query - if ( context.getQuery().trim().isEmpty() ) { - throw new GenericRuntimeException( String.format( "%s query is empty", context.getLanguage().serializedName() ) ); + if ( context instanceof ParsedQueryContext ) { + parsedQueries = List.of( (ParsedQueryContext) context ); + } else { + try { + if ( transaction.isAnalyze() ) { + statement.getOverviewDuration().start( "Parsing" ); + } + + // handle empty query + if ( context.getQuery().trim().isEmpty() ) { + throw new GenericRuntimeException( String.format( "%s query is empty", context.getLanguage().serializedName() ) ); + } + + parsedQueries = context.getLanguage().parser().apply( context ); + } catch ( Throwable e ) { + if ( transaction.isAnalyze() ) { + transaction.getQueryAnalyzer().attachStacktrace( e ); + } + cancelTransaction( transaction, String.format( "Error on preparing query: %s", e.getMessage() ) ); + context.removeTransaction( transaction ); + return List.of( ImplementationContext.ofError( e, ParsedQueryContext.fromQuery( context.getQuery(), null, context ), statement ) ); } - parsedQueries = context.getLanguage().parser().apply( context ); - } catch ( Throwable e ) { if ( transaction.isAnalyze() ) { - transaction.getQueryAnalyzer().attachStacktrace( e ); + statement.getOverviewDuration().stop( "Parsing" ); } - cancelTransaction( transaction, String.format( "Error on preparing query: %s", e.getMessage() ) ); - context.removeTransaction( transaction ); - return List.of( ImplementationContext.ofError( e, ParsedQueryContext.fromQuery( context.getQuery(), null, context ), statement ) ); + } - if ( transaction.isAnalyze() ) { - statement.getOverviewDuration().stop( "Parsing" ); + if ( context instanceof TranslatedQueryContext ) { + return implementTranslatedQuery( statement, transaction, (TranslatedQueryContext) context ); } Processor processor = context.getLanguage().processorSupplier().get(); @@ -236,7 +249,12 @@ private static void cancelTransaction( @Nullable Transaction transaction, @Nulla public List anyQuery( QueryContext context ) { - List prepared = anyPrepareQuery( context, context.getTransactions().get( context.getTransactions().size() - 1 ) ); + List prepared; + if ( context instanceof TranslatedQueryContext ) { + prepared = anyPrepareQuery( context, context.getStatement() ); + } else { + prepared = anyPrepareQuery( context, context.getTransactions().get( context.getTransactions().size() - 1 ) ); + } List executedContexts = new ArrayList<>(); for ( ImplementationContext implementation : prepared ) { @@ -261,6 +279,37 @@ public List anyQuery( QueryContext context ) { } + private List implementTranslatedQuery( Statement statement, Transaction transaction, TranslatedQueryContext translated ) { + try { + PolyImplementation implementation; + + if ( translated instanceof PhysicalQueryContext physical ) { + for ( int i = 0; i < physical.getDynamicValues().size(); i++ ) { + PolyValue v = physical.getDynamicValues().get( i ); + AlgDataType type = physical.getDynamicTypes().get( i ); + statement.getDataContext().addParameterValues( i, type, List.of( v ) ); + } + implementation = statement.getQueryProcessor().prepareQuery( physical.getRoot(), translated.isRouted(), true, true ); + } else { + implementation = statement.getQueryProcessor().prepareQuery( translated.getRoot(), translated.isRouted(), true ); + } + + return List.of( new ImplementationContext( implementation, translated, statement, null ) ); + } catch ( Throwable e ) { + if ( transaction.isAnalyze() ) { + transaction.getQueryAnalyzer().attachStacktrace( e ); + } + if ( !(e instanceof DeadlockException) ) { + // we only log unexpected cases with stacktrace + log.warn( "Caught exception: ", e ); + } + + cancelTransaction( transaction, String.format( "Caught %s exception: %s", e.getClass().getSimpleName(), e.getMessage() ) ); + return List.of( (ImplementationContext.ofError( e, translated, statement )) ); + } + } + + public static List toQueryNodes( QueryContext queries ) { Processor processor = queries.getLanguage().processorSupplier().get(); List splitQueries = processor.splitStatements( queries.getQuery() ); diff --git a/core/src/main/java/org/polypheny/db/languages/OperatorRegistry.java b/core/src/main/java/org/polypheny/db/languages/OperatorRegistry.java index 36c6ee731b..575f0ff391 100644 --- a/core/src/main/java/org/polypheny/db/languages/OperatorRegistry.java +++ b/core/src/main/java/org/polypheny/db/languages/OperatorRegistry.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,6 +21,8 @@ import java.util.stream.Collectors; import org.polypheny.db.algebra.fun.AggFunction; import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.nodes.BinaryOperator; import org.polypheny.db.nodes.Operator; import org.polypheny.db.util.Pair; @@ -37,6 +39,11 @@ public class OperatorRegistry { private static final Map> registry = new HashMap<>(); + // Maps an Operator to a unique name (either op.getName() or op.getOperatorName().name() in case of colliding names within a QueryLanguage. + // Exception: the operator with the shortest OperatorName can use op.getName() + private static Map uniqueNames = null; + private static Map, Operator> nameLookup = null; // maps a query language and uniqueName to the operator + static { // we register a new map for each language per default @@ -176,4 +183,92 @@ public static void remove( QueryLanguage language ) { registry.remove( language ); } + + /** + * Returns the operator registered with a QueryLanguage of the given DataModel + * and name equal the unique name registered in uniqueNames. + * Calling this method the first time creates a lookup table based on the current state of the registry. + * If new operators are registered after that point, they cannot be found. + * + * @param model the data model of the query language the operator was registered with + * @param name the unique name of the operator (either op.getName() or op.getOperatorName().name() in case of collisions) + * @return the specified operator or {@code null} if no such operator is registered. + */ + public static Operator getFromUniqueName( DataModel model, String name ) { + if ( nameLookup == null ) { + buildNameLookup(); + } + QueryLanguage ql = switch ( model ) { + case RELATIONAL -> null; + case DOCUMENT -> QueryLanguage.from( "mongo" ); + case GRAPH -> QueryLanguage.from( "cypher" ); + }; + return nameLookup.get( Pair.of( ql, name ) ); + } + + + /** + * Returns the unique name (within its query language) the given operator. + * By default, the unique name is equal to op.getName(). In case multiple operators have the same name, all but one have to change their unique name to op.getOperatorName().name(). + * The operator that can keep using op.getName() is the one whose OperatorName is the shortest (and first in lexical order in case of same length). + * Example: OperatorName.PLUS has unique name "+", while OperatorName.DATETIME_PLUS has unique name "DATETIME_PLUS" (since PLUS is shorter than DATETIME_PLUS). + * + * Calling this method the first time creates a lookup table based on the current state of the registry. + * If new operators are registered after that point, they cannot be found. + * + * @param op the operator + * @return the unique name of the specified operator (either equal to op.getName() or op.getOperatorName().name() in case of collisions) + */ + public static String getUniqueName( Operator op ) { + if ( uniqueNames == null ) { + buildNameLookup(); + } + return uniqueNames.get( op ); + } + + + private static void buildNameLookup() { + Map, Operator> nameLookup = new HashMap<>(); + Map uniqueNames = new HashMap<>(); + for ( Map.Entry, Operator> entry : OperatorRegistry.getAllOperators().entrySet() ) { + QueryLanguage ql = entry.getKey().left; + String opName = entry.getKey().right.name(); // this is not the same as op.getName()! + Operator op = entry.getValue(); + + if ( op != null ) { + String uniqueName = op.getName(); + if ( uniqueName.isEmpty() ) { + uniqueName = op.getOperatorName().name(); + } + + Operator prevOp = nameLookup.get( Pair.of( ql, uniqueName ) ); + if ( prevOp != null ) { + String prevOpName = prevOp.getOperatorName().name(); + int d = prevOpName.length() - opName.length(); + if ( d == 0 && prevOpName.compareTo( opName ) == 0 ) { + throw new GenericRuntimeException( "Cannot have two operators registered for the same query language and OperatorName." ); + } + + if ( d > 0 || (d == 0 && prevOpName.compareTo( opName ) > 0) ) { + // found shorter name -> new operator takes priority and can use op.getName() as uniqueName. + nameLookup.put( Pair.of( ql, prevOpName ), prevOp ); + uniqueNames.put( prevOp, prevOpName ); + } else { + uniqueName = opName; + } + } + nameLookup.put( Pair.of( ql, uniqueName ), op ); + uniqueNames.put( op, uniqueName ); + } + } + + // prevent overwriting in case lookups are created concurrently + if ( OperatorRegistry.nameLookup == null ) { + OperatorRegistry.nameLookup = nameLookup; + } + if ( OperatorRegistry.uniqueNames == null ) { + OperatorRegistry.uniqueNames = uniqueNames; + } + } + } diff --git a/core/src/main/java/org/polypheny/db/nodes/DeserializeFunctionOperator.java b/core/src/main/java/org/polypheny/db/nodes/DeserializeFunctionOperator.java index 8c678cf787..1626f2396c 100644 --- a/core/src/main/java/org/polypheny/db/nodes/DeserializeFunctionOperator.java +++ b/core/src/main/java/org/polypheny/db/nodes/DeserializeFunctionOperator.java @@ -60,7 +60,7 @@ public AlgDataType inferReturnType( OperatorBinding opBinding ) { @Override public AlgDataType deriveType( Validator validator, ValidatorScope scope, Call call ) { - return null; + return AlgDataTypeFactory.DEFAULT.createPolyType( PolyType.DOCUMENT ); } diff --git a/core/src/main/java/org/polypheny/db/nodes/LangFunctionOperator.java b/core/src/main/java/org/polypheny/db/nodes/LangFunctionOperator.java index 63a2935354..d8f6763974 100644 --- a/core/src/main/java/org/polypheny/db/nodes/LangFunctionOperator.java +++ b/core/src/main/java/org/polypheny/db/nodes/LangFunctionOperator.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,9 @@ package org.polypheny.db.nodes; import java.util.List; +import java.util.function.Function; +import javax.annotation.Nullable; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.constant.Syntax; import org.polypheny.db.algebra.type.AlgDataType; @@ -25,11 +28,40 @@ import org.polypheny.db.languages.ParserPos; import org.polypheny.db.nodes.validate.Validator; import org.polypheny.db.nodes.validate.ValidatorScope; +import org.polypheny.db.type.PolyType; public class LangFunctionOperator extends OperatorImpl { - public LangFunctionOperator( String name, Kind kind ) { + private final @NotNull Function fromBindingCreator; + private final @NotNull Function, AlgDataType> fromOperandsCreator; + + + public LangFunctionOperator( String name, Kind kind, @NotNull Function fromBindingCreator, @NotNull Function, AlgDataType> fromOperandsCreator ) { super( name, kind, null, null, null ); + this.fromOperandsCreator = fromOperandsCreator; + this.fromBindingCreator = fromBindingCreator; + } + + + public LangFunctionOperator( String name, Kind kind, PolyType returnType, @Nullable PolyType returnComponentType ) { + this( name, kind, (op -> fromFixedTyped( returnType, returnComponentType )), (types -> fromFixedTyped( returnType, returnComponentType )) ); + } + + + private static AlgDataType fromFixedTyped( PolyType returnType, @Nullable PolyType returnComponentType ) { + if ( returnComponentType != null ) { + return AlgDataTypeFactory.DEFAULT.createArrayType( AlgDataTypeFactory.DEFAULT.createPolyType( returnComponentType ), -1 ); + } + + return switch ( returnType ) { + case VARCHAR -> AlgDataTypeFactory.DEFAULT.createPolyType( returnType, 2050 ); + default -> AlgDataTypeFactory.DEFAULT.createPolyType( returnType ); + }; + } + + + public LangFunctionOperator( String name, Kind kind, PolyType returnType ) { + this( name, kind, returnType, null ); } @@ -47,7 +79,7 @@ public Call createCall( Literal functionQualifier, ParserPos pos, Node... operan @Override public AlgDataType inferReturnType( OperatorBinding opBinding ) { - throw new GenericRuntimeException( "Not Implemented" ); + return fromBindingCreator.apply( opBinding ); } @@ -59,7 +91,7 @@ public AlgDataType deriveType( Validator validator, ValidatorScope scope, Call c @Override public AlgDataType inferReturnType( AlgDataTypeFactory typeFactory, List operandTypes ) { - return null; + return fromOperandsCreator.apply( operandTypes ); } } diff --git a/core/src/main/java/org/polypheny/db/nodes/OperatorBinding.java b/core/src/main/java/org/polypheny/db/nodes/OperatorBinding.java index c69882037b..61abc589c4 100644 --- a/core/src/main/java/org/polypheny/db/nodes/OperatorBinding.java +++ b/core/src/main/java/org/polypheny/db/nodes/OperatorBinding.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -113,7 +113,7 @@ public boolean isOperandLiteral( int ordinal, boolean allowCast ) { * @return collected list */ public List collectOperandTypes() { - return new AbstractList() { + return new AbstractList<>() { @Override public AlgDataType get( int index ) { return getOperandType( index ); diff --git a/core/src/main/java/org/polypheny/db/plan/hep/HepAlgVertex.java b/core/src/main/java/org/polypheny/db/plan/hep/HepAlgVertex.java index 131a88694a..61a82c9de6 100644 --- a/core/src/main/java/org/polypheny/db/plan/hep/HepAlgVertex.java +++ b/core/src/main/java/org/polypheny/db/plan/hep/HepAlgVertex.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -91,7 +91,7 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { @Override public double estimateTupleCount( AlgMetadataQuery mq ) { - return mq.getTupleCount( currentAlg ); + return mq.getTupleCount( currentAlg ).orElse( Double.MAX_VALUE ); } diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/AlgSubset.java b/core/src/main/java/org/polypheny/db/plan/volcano/AlgSubset.java index eed119c71e..4d96d1ae5f 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/AlgSubset.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/AlgSubset.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -194,9 +194,9 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { @Override public double estimateTupleCount( AlgMetadataQuery mq ) { if ( best != null ) { - return mq.getTupleCount( best ); + return mq.getTupleCount( best ).orElse( Double.MAX_VALUE ); } else { - return mq.getTupleCount( set.alg ); + return mq.getTupleCount( set.alg ).orElse( Double.MAX_VALUE ); } } diff --git a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java index e9f1919dda..736cf39c89 100644 --- a/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java +++ b/core/src/main/java/org/polypheny/db/plan/volcano/VolcanoPlanner.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -1089,7 +1089,7 @@ public void dump( PrintWriter pw ) { pw.print( ", importance=" + importance ); } AlgMetadataQuery mq = alg.getCluster().getMetadataQuery(); - pw.print( ", rowcount=" + mq.getTupleCount( alg ) ); + pw.print( ", rowcount=" + mq.getTupleCount( alg ).map( Object::toString ).orElse( "INFINITY" ) ); pw.println( ", cumulative cost=" + getCost( alg, mq ) ); } } @@ -1100,7 +1100,7 @@ public void dump( PrintWriter pw ) { /** * Re-computes the digest of a {@link AlgNode}. - * + *

    * Since an algebra expression's digest contains the identifiers of its children, this method needs to be called * when the child has been renamed, for example if the child's set merges with another. * diff --git a/core/src/main/java/org/polypheny/db/processing/QueryContext.java b/core/src/main/java/org/polypheny/db/processing/QueryContext.java index a984b93db7..b961611dcf 100644 --- a/core/src/main/java/org/polypheny/db/processing/QueryContext.java +++ b/core/src/main/java/org/polypheny/db/processing/QueryContext.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -28,6 +28,8 @@ import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.information.InformationManager; @@ -36,6 +38,7 @@ import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.type.entity.PolyValue; @Slf4j @Value @@ -96,6 +99,7 @@ public void removeTransaction( Transaction transaction ) { @EqualsAndHashCode(callSuper = true) @Value + @NonFinal @SuperBuilder(toBuilder = true) public static class ParsedQueryContext extends QueryContext { @@ -144,6 +148,73 @@ public Optional getQueryNode() { } + @EqualsAndHashCode(callSuper = true) + @Value + @NonFinal + @SuperBuilder(toBuilder = true) + public static class TranslatedQueryContext extends ParsedQueryContext { + + AlgRoot root; + boolean isRouted; + + + // A TranslatedQueryContext is not associated with a specific a namespaceId or queryNode + public static TranslatedQueryContext fromQuery( String query, AlgRoot root, boolean isRouted, QueryContext context ) { + return TranslatedQueryContext.builder() + .query( query ) + .queryNode( null ) + .language( context.language ) + .isAnalysed( context.isAnalysed ) + .usesCache( context.usesCache ) + .userId( context.userId ) + .origin( context.getOrigin() ) + .batch( context.batch ) + .statement( context.statement ) + .transactions( context.transactions ) + .transactionManager( context.transactionManager ) + .informationTarget( context.informationTarget ) + .root( root ) + .isRouted( isRouted ) + .build(); + } + + } + + + @EqualsAndHashCode(callSuper = true) + @Value + @SuperBuilder(toBuilder = true) + public static class PhysicalQueryContext extends TranslatedQueryContext { + + List dynamicValues; + List dynamicTypes; + + + // AlgRoot represents a physical execution plan + public static PhysicalQueryContext fromQuery( String query, AlgRoot root, List dynamicValues, List dynamicTypes, QueryContext context ) { + return PhysicalQueryContext.builder() + .query( query ) + .queryNode( null ) + .language( context.language ) + .isAnalysed( context.isAnalysed ) + .usesCache( context.usesCache ) + .userId( context.userId ) + .origin( context.getOrigin() ) + .batch( context.batch ) + .statement( context.statement ) + .transactions( context.transactions ) + .transactionManager( context.transactionManager ) + .informationTarget( context.informationTarget ) + .root( root ) + .dynamicValues( dynamicValues ) + .dynamicTypes( dynamicTypes ) + .isRouted( true ) + .build(); + } + + } + + public T addTransaction( Transaction transaction ) { if ( transaction == null ) { return (T) this; diff --git a/core/src/main/java/org/polypheny/db/processing/QueryProcessor.java b/core/src/main/java/org/polypheny/db/processing/QueryProcessor.java index c918dc1719..d7e4629d8a 100644 --- a/core/src/main/java/org/polypheny/db/processing/QueryProcessor.java +++ b/core/src/main/java/org/polypheny/db/processing/QueryProcessor.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,6 +33,23 @@ public interface QueryProcessor { */ PolyImplementation prepareQuery( AlgRoot logicalRoot, boolean withMonitoring ); + /** + * @param logicalRoot Logical query plan. + * @param isRouted Indicated whether query already routed. + * @param withMonitoring Activates or deactivates the monitoring. + * @return prepared PolyphenyDbSignature + */ + PolyImplementation prepareQuery( AlgRoot logicalRoot, boolean isRouted, boolean withMonitoring ); + + /** + * @param root Logical query plan. + * @param isRouted Indicated whether query already routed. + * @param isPhysical Indicates whether root is a physical query plan + * @param withMonitoring Activates or deactivates the monitoring. + * @return prepared PolyphenyDbSignature + */ + PolyImplementation prepareQuery( AlgRoot root, boolean isRouted, boolean isPhysical, boolean withMonitoring ); + /** * @param logicalRoot Logical query plan. * @param parameters Row type (required with prepared statements). @@ -51,6 +68,17 @@ public interface QueryProcessor { */ PolyImplementation prepareQuery( AlgRoot logicalRoot, AlgDataType parameters, boolean isRouted, boolean isSubquery, boolean withMonitoring ); + /** + * @param root Logical or physical query plan + * @param parameters Row type (required with prepared statements). + * @param isRouted Indicates whether query already routed. + * @param isPhysical Indicates whether root is a physical query plan + * @param isSubquery Indicates whether the query is a subquery (used with constraint enforcement) + * @param withMonitoring Activates or deactivates the monitoring. + * @return prepared PolyphenyDbSignature + */ + PolyImplementation prepareQuery( AlgRoot root, AlgDataType parameters, boolean isRouted, boolean isPhysical, boolean isSubquery, boolean withMonitoring ); + /** * @return Gets the planner. */ diff --git a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java index 10e52ac557..b56a3fc376 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexBuilder.java +++ b/core/src/main/java/org/polypheny/db/rex/RexBuilder.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -62,6 +62,7 @@ import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.Function.FunctionType; @@ -763,6 +764,29 @@ public RexLiteral makeLiteral( PolyValue o, AlgDataType type, PolyType typeName } + /** + * Creates a literal for the specified PolyValue with no precision or scale. + */ + public RexLiteral makeLiteral( PolyValue o ) { + AlgDataType type = switch ( o.getType() ) { + case PATH -> typeFactory.createPathType( o.asPath().getPathType( + typeFactory.createPolyType( PolyType.NODE ), + typeFactory.createPolyType( PolyType.EDGE ) + ) ); + default -> typeFactory.createPolyType( o.getType() ); + }; + return makeLiteral( o, type ); + } + + + /** + * Creates a literal for the specified PolyValue and AlgDataType + */ + public RexLiteral makeLiteral( PolyValue o, AlgDataType type ) { + return new RexLiteral( o, type, o.getType() ); + } + + /** * Creates a boolean literal. */ @@ -1043,6 +1067,11 @@ public RexLiteral makeIntervalLiteral( PolyInterval interval, IntervalQualifier } + public RexLiteral makeDocumentLiteral( PolyValue v ) { + return makeLiteral( v, new DocumentType() ); + } + + /** * Creates a reference to a dynamic parameter * @@ -1418,14 +1447,6 @@ public RexCall makeHasLabel( String label ) { } - public RexCall makeLabelFilter( String label ) { - return new RexCall( - typeFactory.createPolyType( PolyType.BOOLEAN ), - OperatorRegistry.get( QueryLanguage.from( "cypher" ), OperatorName.CYPHER_GRAPH_ONLY_LABEL ), - List.of( makeInputRef( typeFactory.createPolyType( PolyType.GRAPH ), 0 ), makeLiteral( label ) ) ); - } - - public RexCall makeToJson( RexNode node ) { return new RexCall( typeFactory.createPolyType( PolyType.VARCHAR, 2024 ), diff --git a/core/src/main/java/org/polypheny/db/rex/RexCall.java b/core/src/main/java/org/polypheny/db/rex/RexCall.java index 75bd38e075..af98ad7613 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexCall.java +++ b/core/src/main/java/org/polypheny/db/rex/RexCall.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -69,7 +69,7 @@ public class RexCall extends RexNode { public final ImmutableList operands; public final AlgDataType type; - private static final Set SIMPLE_BINARY_OPS; + public static final Set SIMPLE_BINARY_OPS; static { @@ -138,7 +138,7 @@ protected final StringBuilder appendOperands( StringBuilder sb ) { * @param b second type * @return true if the types are equal or the only difference is nullability */ - private static boolean equalSansNullability( AlgDataType a, AlgDataType b ) { + public static boolean equalSansNullability( AlgDataType a, AlgDataType b ) { String x = a.getFullTypeString(); String y = b.getFullTypeString(); if ( x.length() < y.length() ) { diff --git a/core/src/main/java/org/polypheny/db/rex/RexFieldCollation.java b/core/src/main/java/org/polypheny/db/rex/RexFieldCollation.java index dc0ffe1f29..fbadf6de47 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexFieldCollation.java +++ b/core/src/main/java/org/polypheny/db/rex/RexFieldCollation.java @@ -71,7 +71,12 @@ public RexFieldCollation( RexNode left, Set right ) { @Override public String toString() { - final String s = left.toString(); + return toString( null ); + } + + + public String toString( RexVisitor visitor ) { + final String s = visitor == null ? left.toString() : left.accept( visitor ); if ( right.isEmpty() ) { return s; } diff --git a/core/src/main/java/org/polypheny/db/rex/RexLiteral.java b/core/src/main/java/org/polypheny/db/rex/RexLiteral.java index 04cf738b1b..29bdd8564d 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexLiteral.java +++ b/core/src/main/java/org/polypheny/db/rex/RexLiteral.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -245,7 +245,7 @@ public String computeDigest( RexDigestIncludeType includeType ) { * @return true if {@link RexDigestIncludeType#OPTIONAL} digest would include data type * @see RexCall#computeDigest(boolean) */ - RexDigestIncludeType digestIncludesType() { + public RexDigestIncludeType digestIncludesType() { return shouldIncludeType( value, type ); } @@ -509,7 +509,11 @@ private static void printAsJava( PolyValue value, PrintWriter pw, PolyType typeN break; case DOCUMENT: // assert value.isDocument(); documents can be any PolyValue - pw.println( value ); + if ( !value.isDocument() ) { + printAsJava( value, pw, value.getType(), java ); + } else { + pw.println( value ); + } break; default: assert valueMatchesType( value, typeName, true ); diff --git a/core/src/main/java/org/polypheny/db/rex/RexLocalRef.java b/core/src/main/java/org/polypheny/db/rex/RexLocalRef.java index 26e3cefe84..8a45a8fa85 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexLocalRef.java +++ b/core/src/main/java/org/polypheny/db/rex/RexLocalRef.java @@ -50,9 +50,11 @@ */ public class RexLocalRef extends RexSlot { + public static final String PREFIX = "$t"; + // array of common names, to reduce memory allocations @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") - private static final List NAMES = new SelfPopulatingList( "$t", 30 ); + private static final List NAMES = new SelfPopulatingList( PREFIX, 30 ); /** diff --git a/core/src/main/java/org/polypheny/db/rex/RexWindowBound.java b/core/src/main/java/org/polypheny/db/rex/RexWindowBound.java index c7f0a2c5c7..f68d27980d 100644 --- a/core/src/main/java/org/polypheny/db/rex/RexWindowBound.java +++ b/core/src/main/java/org/polypheny/db/rex/RexWindowBound.java @@ -136,6 +136,18 @@ public RexWindowBound accept( RexVisitor visitor ) { } + /** + * Returns a string representation of the bound using the given visitor to transform + * any RexNode in the process to a string. + * + * @param visitor the RexVisitor used to transform RexNodes into strings + * @return String representation of this bound + */ + public String toString( RexVisitor visitor ) { + return toString(); + } + + /** * Implements UNBOUNDED PRECEDING/FOLLOWING bound. */ @@ -301,6 +313,15 @@ public String toString() { } + @Override + public String toString( RexVisitor visitor ) { + if ( visitor == null ) { + return toString(); + } + return offset.accept( visitor ) + " " + kind.toString(); + } + + @Override public boolean equals( Object o ) { if ( this == o ) { diff --git a/core/src/main/java/org/polypheny/db/runtime/Unit.java b/core/src/main/java/org/polypheny/db/runtime/Unit.java index 361b29c79f..ef465597ce 100644 --- a/core/src/main/java/org/polypheny/db/runtime/Unit.java +++ b/core/src/main/java/org/polypheny/db/runtime/Unit.java @@ -36,8 +36,8 @@ /** * Synthetic record with zero fields. - * - * Since all instances are identical, {@code Unit} is a singleton. + *

    + * Since all instances are identical, {@link Unit} is a singleton. */ public class Unit implements Comparable { diff --git a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java index c17b21c71a..2678bea70e 100644 --- a/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java +++ b/core/src/main/java/org/polypheny/db/tools/AlgBuilder.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -429,6 +429,11 @@ private int inputOffset( int inputCount, int inputOrdinal ) { */ public RexNode literal( Object value ) { final RexBuilder rexBuilder = cluster.getRexBuilder(); + return literal( value, rexBuilder ); + } + + + public static RexNode literal( Object value, RexBuilder rexBuilder ) { if ( value == null ) { return rexBuilder.constantNull(); } else if ( value instanceof Boolean ) { diff --git a/core/src/main/java/org/polypheny/db/transaction/Statement.java b/core/src/main/java/org/polypheny/db/transaction/Statement.java index 009523a6da..0c655ef26e 100644 --- a/core/src/main/java/org/polypheny/db/transaction/Statement.java +++ b/core/src/main/java/org/polypheny/db/transaction/Statement.java @@ -41,6 +41,13 @@ public interface Statement { StatementEvent getMonitoringEvent(); + /** + * Get the index of this statement in the list of statements for that transaction + * + * @return the index this statement has in the statements list of the transaction + */ + long getIndex(); + void setMonitoringEvent( StatementEvent event ); void close(); diff --git a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java index 3d8c42f17e..c82e000baa 100644 --- a/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java +++ b/core/src/main/java/org/polypheny/db/util/BuiltInMethod.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -543,7 +543,6 @@ public enum BuiltInMethod { X_MODEL_ITEM( CrossModelFunctions.class, "docItem", String.class, String.class ), SINGLE_TO_ARRAY_ENUMERABLE( Functions.class, "singleToArray", Enumerable.class ), - X_MODEL_GRAPH_ONLY_LABEL( CrossModelFunctions.class, "cypherOnlyLabelGraph", PolyValue.class, PolyString.class ), TO_JSON( PolyValue.class, "toPolyJson" ); private static final String toIntOptional = "toIntOptional"; diff --git a/core/src/main/java/org/polypheny/db/util/Quadruple.java b/core/src/main/java/org/polypheny/db/util/Quadruple.java new file mode 100644 index 0000000000..842b96411f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/util/Quadruple.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.util; + +import lombok.EqualsAndHashCode; +import lombok.Value; +import lombok.experimental.NonFinal; + +@Value(staticConstructor = "of") +@EqualsAndHashCode +@NonFinal +public class Quadruple implements Comparable> { + + public A a; + public B b; + public C c; + public D d; + + + public Quadruple( A a, B b, C c, D d ) { + this.a = a; + this.b = b; + this.c = c; + this.d = d; + } + + + @Override + public int compareTo( Quadruple o ) { + //noinspection unchecked + int cmp = compare( (Comparable) this.a, (Comparable) o.a ); + if ( cmp == 0 ) { + //noinspection unchecked + cmp = compare( (Comparable) this.b, (Comparable) o.b ); + if ( cmp == 0 ) { + //noinspection unchecked + cmp = compare( (Comparable) this.c, (Comparable) o.c ); + if ( cmp == 0 ) { + //noinspection unchecked + cmp = compare( (Comparable) this.d, (Comparable) o.d ); + } + } + } + return cmp; + } + + + private static > int compare( C c1, C c2 ) { + if ( c1 == null ) { + return (c2 == null) ? 0 : -1; + } else if ( c2 == null ) { + return 1; + } else { + return c1.compareTo( c2 ); + } + } + +} diff --git a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java index e06dcb4d83..9c58652fa8 100644 --- a/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java +++ b/core/src/main/java/org/polypheny/db/util/ValidatorUtil.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -115,17 +115,9 @@ private static void addFields( List fieldList, List null, - c -> c ); - LanguageManager.getINSTANCE().addQueryLanguage( language ); - // Initialize index manager initializeIndexManager(); diff --git a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java index e3b2934e77..5d172cfaab 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java +++ b/dbms/src/main/java/org/polypheny/db/processing/AbstractQueryProcessor.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -17,6 +17,8 @@ package org.polypheny.db.processing; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.lang.reflect.Type; @@ -73,6 +75,7 @@ import org.polypheny.db.algebra.logical.relational.LogicalRelProject; import org.polypheny.db.algebra.logical.relational.LogicalRelScan; import org.polypheny.db.algebra.logical.relational.LogicalRelValues; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.catalog.Catalog; @@ -89,7 +92,8 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; -import org.polypheny.db.information.InformationQueryPlan; +import org.polypheny.db.information.InformationPolyAlg; +import org.polypheny.db.information.InformationPolyAlg.PlanType; import org.polypheny.db.interpreter.BindableConvention; import org.polypheny.db.interpreter.Interpreters; import org.polypheny.db.monitoring.events.DmlEvent; @@ -199,6 +203,18 @@ public PolyImplementation prepareQuery( AlgRoot logicalRoot, boolean withMonitor } + @Override + public PolyImplementation prepareQuery( AlgRoot logicalRoot, boolean isRouted, boolean withMonitoring ) { + return prepareQuery( logicalRoot, logicalRoot.alg.getCluster().getTypeFactory().builder().build(), isRouted, false, false, withMonitoring ); + } + + + @Override + public PolyImplementation prepareQuery( AlgRoot logicalRoot, boolean isRouted, boolean isPhysical, boolean withMonitoring ) { + return prepareQuery( logicalRoot, logicalRoot.alg.getCluster().getTypeFactory().builder().build(), isRouted, isPhysical, false, withMonitoring ); + } + + @Override public PolyImplementation prepareQuery( AlgRoot logicalRoot, AlgDataType parameterRowType, boolean withMonitoring ) { return prepareQuery( logicalRoot, parameterRowType, false, false, withMonitoring ); @@ -207,14 +223,23 @@ public PolyImplementation prepareQuery( AlgRoot logicalRoot, AlgDataType paramet @Override public PolyImplementation prepareQuery( AlgRoot logicalRoot, AlgDataType parameterRowType, boolean isRouted, boolean isSubquery, boolean withMonitoring ) { - if ( statement.getTransaction().isAnalyze() ) { - attachQueryPlans( logicalRoot ); + return prepareQuery( logicalRoot, parameterRowType, isRouted, false, isSubquery, withMonitoring ); + } + + + @Override + public PolyImplementation prepareQuery( AlgRoot root, AlgDataType parameterRowType, boolean isRouted, boolean isPhysical, boolean isSubquery, boolean withMonitoring ) { + if ( isPhysical ) { + return implementPhysicalPlan( root, parameterRowType ); + } + if ( !isRouted && statement.getTransaction().isAnalyze() ) { + attachPolyAlgPlan( root.alg ); } if ( statement.getTransaction().isAnalyze() ) { statement.getOverviewDuration().start( "Processing" ); } - final ProposedImplementations proposedImplementations = prepareQueries( logicalRoot, parameterRowType, isRouted, isSubquery ); + final ProposedImplementations proposedImplementations = prepareQueries( root, parameterRowType, isRouted, isSubquery ); if ( statement.getTransaction().isAnalyze() ) { statement.getOverviewDuration().stop( "Processing" ); @@ -235,17 +260,36 @@ public PolyImplementation prepareQuery( AlgRoot logicalRoot, AlgDataType paramet } - private void attachQueryPlans( AlgRoot logicalRoot ) { - InformationManager queryAnalyzer = statement.getTransaction().getQueryAnalyzer(); - InformationPage page = new InformationPage( "Logical Query Plan" ).setLabel( "plans" ); - page.fullWidth(); - InformationGroup group = new InformationGroup( page, "Logical Query Plan" ); - queryAnalyzer.addPage( page ); - queryAnalyzer.addGroup( group ); - InformationQueryPlan informationQueryPlan = new InformationQueryPlan( - group, - AlgOptUtil.dumpPlan( "Logical Query Plan", logicalRoot.alg, ExplainFormat.JSON, ExplainLevel.ALL_ATTRIBUTES ) ); - queryAnalyzer.registerInformation( informationQueryPlan ); + private void attachPolyAlgPlan( AlgNode alg ) { + ObjectMapper objectMapper = new ObjectMapper(); + GlobalStats gs = GlobalStats.computeGlobalStats( alg ); + try { + ObjectNode objectNode = alg.serializePolyAlgebra( objectMapper, gs ); + String jsonString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString( objectNode ); + + InformationManager queryAnalyzer = statement.getTransaction().getQueryAnalyzer(); + InformationPage page = new InformationPage( "Logical Query Plan" ).setStmtLabel( statement.getIndex() ); + page.fullWidth(); + InformationGroup group = new InformationGroup( page, "Logical Query Plan" ); + queryAnalyzer.addPage( page ); + queryAnalyzer.addGroup( group ); + + InformationPolyAlg infoPolyAlg = new InformationPolyAlg( group, jsonString, PlanType.LOGICAL ); + if ( shouldAttachTextualPolyAlg() ) { + // when testing, we want to access the human-readable form + String serialized = alg.buildPolyAlgebra( (String) null ); + if ( serialized == null ) { + throw new GenericRuntimeException( "Could not serialize PolyAlgebra" ); + } + infoPolyAlg.setTextualPolyAlg( serialized ); + } + + queryAnalyzer.registerInformation( infoPolyAlg ); + + } catch ( Exception e ) { + throw new GenericRuntimeException( e.getMessage(), e ); + } + } @@ -519,6 +563,22 @@ private ProposedImplementations prepareQueries( AlgRoot logicalRoot, AlgDataType } + private PolyImplementation implementPhysicalPlan( AlgRoot root, AlgDataType parameterRowType ) { + final Convention resultConvention = ENABLE_BINDABLE ? BindableConvention.INSTANCE : EnumerableConvention.INSTANCE; + + PreparedResult preparedResult = implement( root, parameterRowType ); + UiRoutingPageUtil.addPhysicalPlanPage( root.alg, statement.getTransaction().getQueryAnalyzer(), statement.getIndex(), shouldAttachTextualPolyAlg() ); + return createPolyImplementation( + preparedResult, + root.kind, + root.alg, + root.validatedRowType, + resultConvention, + new ExecutionTimeMonitor(), + Objects.requireNonNull( root.alg.getTraitSet().getTrait( ModelTraitDef.INSTANCE ) ).dataModel() ); + } + + @NotNull private List routePlans( AlgRoot indexLookupRoot, LogicalQueryInformation logicalQueryInformation, List plans, boolean isAnalyze ) { if ( RuntimeConfig.ROUTING_PLAN_CACHING.getBoolean() && !indexLookupRoot.kind.belongsTo( Kind.DML ) ) { @@ -906,7 +966,7 @@ private List route( AlgRoot logicalRoot, Statement statemen .toList(); proposedPlans.addAll( plans ); } catch ( Throwable e ) { - log.warn( String.format( "Router: %s was not able to route the query.", router.getClass().getSimpleName() ) ); + log.warn( "Router: {} was not able to route the query.", router.getClass().getSimpleName() ); } } @@ -1417,7 +1477,8 @@ private Pair selectPlan( ProposedImplem if ( statement.getTransaction().isAnalyze() ) { UiRoutingPageUtil.outputSingleResult( proposed.plans.get( 0 ), - statement.getTransaction().getQueryAnalyzer() ); + statement.getTransaction().getQueryAnalyzer(), + statement.getIndex(), shouldAttachTextualPolyAlg() ); addGeneratedCodeToQueryAnalyzer( proposed.plans.get( 0 ).generatedCodes() ); } return new Pair<>( proposed.plans.get( 0 ).result(), proposed.plans.get( 0 ).proposedRoutingPlan() ); @@ -1435,7 +1496,8 @@ private Pair selectPlan( ProposedImplem if ( statement.getTransaction().isAnalyze() ) { AlgNode optimalNode = proposed.plans.get( index ).optimalNode(); - UiRoutingPageUtil.addPhysicalPlanPage( optimalNode, statement.getTransaction().getQueryAnalyzer() ); + UiRoutingPageUtil.addPhysicalPlanPage( optimalNode, statement.getTransaction().getQueryAnalyzer(), + statement.getIndex(), shouldAttachTextualPolyAlg() ); addGeneratedCodeToQueryAnalyzer( proposed.plans.get( index ).generatedCodes() ); } @@ -1447,7 +1509,7 @@ private Pair selectPlan( ProposedImplem private void addGeneratedCodeToQueryAnalyzer( String code ) { if ( code != null ) { InformationManager queryAnalyzer = statement.getTransaction().getQueryAnalyzer(); - InformationPage page = new InformationPage( "Implementation" ); + InformationPage page = new InformationPage( "Implementation" ).setStmtLabel( statement.getIndex() ); page.fullWidth(); InformationGroup group = new InformationGroup( page, "Java Code" ); queryAnalyzer.addPage( page ); @@ -1481,6 +1543,11 @@ private CachedProposedRoutingPlan selectCachedPlan( List parse( String query ) { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " does not support string representation!" ); - } - - - @Override - public Pair validate( Transaction transaction, Node parsed, boolean addDefaultValues ) { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " does not support validation!" ); - } - - - @Override - public AlgRoot translate( Statement statement, ParsedQueryContext context ) { - try { - return AlgRoot.of( QueryPlanBuilder.buildFromJsonAlg( statement, context.getQuery() ), Kind.SELECT ); - } catch ( JsonProcessingException e ) { - throw new GenericRuntimeException( e ); - } - } - - - @Override - public PolyImplementation prepareDdl( Statement statement, ExecutableStatement node, ParsedQueryContext context ) { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " AlgProcessor does not support DDLs!" ); - } - - - @Override - public void unlock( Statement statement ) { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " does not support DML or DDLs and should therefore not lock." ); - } - - - @Override - public void lock( Statement statement ) throws DeadlockException { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " does not support DML or DDLs and should therefore not lock." ); - } - - - @Override - public String getQuery( Node parsed, QueryParameters parameters ) { - return parameters.getQuery(); - } - - - @Override - public AlgDataType getParameterRowType( Node left ) { - throw new GenericRuntimeException( AlgProcessor.class.getSimpleName() + " does not support getParameterRowType!" ); - } - - - @Override - public List splitStatements( String statements ) { - throw new GenericRuntimeException( "not implemented" ); - } - -} diff --git a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java index 5560d51982..130c09c991 100644 --- a/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java +++ b/dbms/src/main/java/org/polypheny/db/processing/ConstraintEnforceAttacher.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,8 @@ package org.polypheny.db.processing; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableSet; import java.util.ArrayList; import java.util.HashMap; @@ -33,8 +35,6 @@ import org.polypheny.db.PolyImplementation; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.ExplainFormat; -import org.polypheny.db.algebra.constant.ExplainLevel; import org.polypheny.db.algebra.constant.Kind; import org.polypheny.db.algebra.core.JoinAlgType; import org.polypheny.db.algebra.core.Project; @@ -69,9 +69,9 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; -import org.polypheny.db.information.InformationQueryPlan; +import org.polypheny.db.information.InformationPolyAlg; +import org.polypheny.db.information.InformationPolyAlg.PlanType; import org.polypheny.db.languages.OperatorRegistry; -import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.rex.RexDynamicParam; import org.polypheny.db.rex.RexFieldAccess; @@ -585,16 +585,21 @@ public RexNode visitFieldAccess( RexFieldAccess fieldAccess ) { AlgRoot enforcementRoot = new AlgRoot( lceRoot, logicalRoot.validatedRowType, logicalRoot.kind, logicalRoot.fields, logicalRoot.collation ); // Send the generated tree with all unoptimized constraint enforcement checks to the UI if ( statement.getTransaction().isAnalyze() ) { - InformationManager queryAnalyzer = statement.getTransaction().getQueryAnalyzer(); - InformationPage page = new InformationPage( "Constraint Enforcement Plan" ).setLabel( "plans" ); - page.fullWidth(); - InformationGroup group = new InformationGroup( page, "Constraint Enforcement Plan" ); - queryAnalyzer.addPage( page ); - queryAnalyzer.addGroup( group ); - InformationQueryPlan informationQueryPlan = new InformationQueryPlan( - group, - AlgOptUtil.dumpPlan( "Constraint Enforcement Plan", enforcementRoot.alg, ExplainFormat.JSON, ExplainLevel.ALL_ATTRIBUTES ) ); - queryAnalyzer.registerInformation( informationQueryPlan ); + ObjectMapper objectMapper = new ObjectMapper(); + try { + ObjectNode objectNode = enforcementRoot.alg.serializePolyAlgebra( objectMapper ); + String jsonString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString( objectNode ); + InformationManager queryAnalyzer = statement.getTransaction().getQueryAnalyzer(); + InformationPage page = new InformationPage( "Constraint Enforcement Plan" ).setStmtLabel( statement.getIndex() ); + page.fullWidth(); + InformationGroup group = new InformationGroup( page, "Constraint Enforcement Plan" ); + queryAnalyzer.addPage( page ); + queryAnalyzer.addGroup( group ); + InformationPolyAlg infoPolyAlg = new InformationPolyAlg( group, jsonString, PlanType.LOGICAL ); + queryAnalyzer.registerInformation( infoPolyAlg ); + } catch ( Exception e ) { + e.printStackTrace(); + } } return enforcementRoot; } diff --git a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java index 32bd3fa08b..e8d3d67353 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java +++ b/dbms/src/main/java/org/polypheny/db/routing/UiRoutingPageUtil.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,24 +16,30 @@ package org.polypheny.db.routing; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableList; import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicInteger; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.ExplainFormat; -import org.polypheny.db.algebra.constant.ExplainLevel; +import org.polypheny.db.algebra.polyalg.PolyAlgMetadata.GlobalStats; import org.polypheny.db.catalog.entity.logical.LogicalCollection; import org.polypheny.db.catalog.entity.logical.LogicalEntity; import org.polypheny.db.catalog.entity.logical.LogicalGraph; import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; -import org.polypheny.db.information.InformationQueryPlan; +import org.polypheny.db.information.InformationPolyAlg; +import org.polypheny.db.information.InformationPolyAlg.PlanType; import org.polypheny.db.information.InformationTable; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptUtil; import org.polypheny.db.processing.util.Plan; import org.polypheny.db.routing.ColumnDistribution.FullPartition; import org.polypheny.db.routing.ColumnDistribution.PartialPartition; @@ -47,9 +53,18 @@ @Slf4j public class UiRoutingPageUtil { + private static final int RUNNERS = 1; + private static final ExecutorService executorService = Executors.newFixedThreadPool( RUNNERS ); + private static final AtomicInteger counter = new AtomicInteger( 0 ); - public static void outputSingleResult( Plan plan, InformationManager queryAnalyzer ) { - addPhysicalPlanPage( plan.optimalNode(), queryAnalyzer ); + + public static int runningTasks() { + return counter.get(); + } + + + public static void outputSingleResult( Plan plan, InformationManager queryAnalyzer, long stmtIdx, boolean attachTextualPlan ) { + addPhysicalPlanPage( plan.optimalNode(), queryAnalyzer, stmtIdx, attachTextualPlan ); InformationPage page = queryAnalyzer.getPage( "routing" ); if ( page == null ) { @@ -57,35 +72,51 @@ public static void outputSingleResult( Plan plan, InformationManager queryAnalyz } addSelectedAdapterTable( queryAnalyzer, plan.proposedRoutingPlan(), page ); final AlgRoot root = plan.proposedRoutingPlan().getRoutedRoot(); - addRoutedPlanPage( root.alg, queryAnalyzer ); + addRoutedPolyPlanPage( root.alg, queryAnalyzer, stmtIdx, false, attachTextualPlan ); } - public static void addPhysicalPlanPage( AlgNode optimalNode, InformationManager queryAnalyzer ) { - new Thread( () -> { - InformationPage page = new InformationPage( "Physical Query Plan" ).setLabel( "plans" ); - page.fullWidth(); - InformationGroup group = new InformationGroup( page, "Physical Query Plan" ); - queryAnalyzer.addPage( page ); - queryAnalyzer.addGroup( group ); - InformationQueryPlan informationQueryPlan = new InformationQueryPlan( - group, - AlgOptUtil.dumpPlan( "Physical Query Plan", optimalNode, ExplainFormat.JSON, ExplainLevel.ALL_ATTRIBUTES ) ); - queryAnalyzer.registerInformation( informationQueryPlan ); - } ).start(); + public static void addPhysicalPlanPage( AlgNode optimalNode, InformationManager queryAnalyzer, long stmtIdx, boolean attachTextualPlan ) { + counter.incrementAndGet(); + executorService.submit( () -> { + try { + addRoutedPolyPlanPage( optimalNode, queryAnalyzer, stmtIdx, true, attachTextualPlan ); + } catch ( Throwable t ) { + log.error( "Error adding routing plan", t ); + } + counter.decrementAndGet(); + } ); + } - private static void addRoutedPlanPage( AlgNode routedNode, InformationManager queryAnalyzer ) { - InformationPage page = new InformationPage( "Routed Query Plan" ).setLabel( "plans" ); + private static void addRoutedPolyPlanPage( AlgNode routedNode, InformationManager queryAnalyzer, long stmtIdx, boolean isPhysical, boolean attachTextualPlan ) { + ObjectMapper objectMapper = new ObjectMapper(); + GlobalStats gs = GlobalStats.computeGlobalStats( routedNode ); + String prefix = isPhysical ? "Physical" : "Routed"; + + ObjectNode objectNode = routedNode.serializePolyAlgebra( objectMapper, gs ); + String jsonString; + try { + jsonString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString( objectNode ); + } catch ( JsonProcessingException e ) { + throw new GenericRuntimeException( e ); + } + + InformationPage page = new InformationPage( prefix + " Query Plan" ).setStmtLabel( stmtIdx ); page.fullWidth(); - InformationGroup group = new InformationGroup( page, "Routed Query Plan" ); + InformationGroup group = new InformationGroup( page, prefix + " Query Plan" ); queryAnalyzer.addPage( page ); queryAnalyzer.addGroup( group ); - InformationQueryPlan informationQueryPlan = new InformationQueryPlan( - group, - AlgOptUtil.dumpPlan( "Routed Query Plan", routedNode, ExplainFormat.JSON, ExplainLevel.ALL_ATTRIBUTES ) ); - queryAnalyzer.registerInformation( informationQueryPlan ); + InformationPolyAlg infoPolyAlg = new InformationPolyAlg( group, jsonString, isPhysical ? PlanType.PHYSICAL : PlanType.ALLOCATION ); + if ( attachTextualPlan ) { + String serialized = routedNode.buildPolyAlgebra( (String) null ); + if ( serialized == null ) { + throw new GenericRuntimeException( "Unable to serialize routing plan" ); + } + infoPolyAlg.setTextualPolyAlg( serialized ); + } + queryAnalyzer.registerInformation( infoPolyAlg ); } @@ -193,7 +224,7 @@ public static void addRoutingAndPlanPage( if ( selectedPlan instanceof ProposedRoutingPlan plan ) { addSelectedAdapterTable( queryAnalyzer, plan, page ); AlgRoot root = plan.getRoutedRoot(); - addRoutedPlanPage( root.alg, queryAnalyzer ); + addRoutedPolyPlanPage( root.alg, queryAnalyzer, statement.getIndex(), false, statement.getTransaction().getOrigin().equals( "PolyAlgParsingTest" ) ); } } diff --git a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java index 7510fd4e03..7a4f0ddf34 100644 --- a/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java +++ b/dbms/src/main/java/org/polypheny/db/routing/routers/BaseRouter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -48,6 +48,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgDataTypeFieldImpl; import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.algebra.type.GraphType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.Entity; @@ -285,7 +286,7 @@ public AlgNode buildJoinedScan( ColumnDistribution columnDistribution, RoutingCo // todo dl: remove after RowType refactor if ( catalog.getSnapshot().getNamespace( columnDistribution.getTable().namespaceId ).orElseThrow().dataModel == DataModel.DOCUMENT ) { - AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( 1L, "d", 0, context.getCluster().getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); + AlgDataType rowType = new AlgRecordType( List.of( new AlgDataTypeFieldImpl( 1L, DocumentType.DOCUMENT_FIELD, 0, context.getCluster().getTypeFactory().createPolyType( PolyType.DOCUMENT ) ) ) ); builder.push( LogicalTransformer.create( node.getCluster(), List.of( node ), diff --git a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java index 7f22093c0f..f691df8bd2 100644 --- a/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java +++ b/dbms/src/main/java/org/polypheny/db/transaction/StatementImpl.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -47,6 +47,7 @@ public class StatementImpl implements Statement { private final long id; @Getter private final TransactionImpl transaction; + private final List fileInputHandles = new ArrayList<>(); private QueryProcessor queryProcessor; @@ -145,6 +146,12 @@ public StatementEvent getMonitoringEvent() { } + @Override + public long getIndex() { + return id; + } + + @Override public void setMonitoringEvent( StatementEvent event ) { this.statementEvent = event; @@ -155,6 +162,7 @@ private InformationDuration initDuration( String title, int order ) { InformationManager im = transaction.getQueryAnalyzer(); if ( executionTimePage == null ) { executionTimePage = new InformationPage( "Execution Time", "Query processing & execution time" ); + executionTimePage.setStmtLabel( id ); im.addPage( executionTimePage ); } InformationGroup group = new InformationGroup( executionTimePage, title ); diff --git a/dbms/src/test/java/org/polypheny/db/planner/PlannerTest.java b/dbms/src/test/java/org/polypheny/db/planner/PlannerTest.java index 2475f09f3f..1d0cf18d39 100644 --- a/dbms/src/test/java/org/polypheny/db/planner/PlannerTest.java +++ b/dbms/src/test/java/org/polypheny/db/planner/PlannerTest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -40,7 +40,8 @@ import org.polypheny.db.rex.RexBuilder; import org.polypheny.db.type.PolyTypeFactoryImpl; -public class PlannerTest { +public class +PlannerTest { static final Convention PHYS_CALLING_CONVENTION = new Convention.Impl( "PHYS", AlgNode.class ) { diff --git a/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java b/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java new file mode 100644 index 0000000000..935d0d455d --- /dev/null +++ b/dbms/src/test/java/org/polypheny/db/polyalg/PolyAlgParsingTest.java @@ -0,0 +1,543 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.polyalg; + +import static java.lang.String.format; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.List; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.polypheny.db.ResultIterator; +import org.polypheny.db.TestHelper; +import org.polypheny.db.TestHelper.JdbcConnection; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.logical.relational.LogicalRelFilter; +import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelSort; +import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; +import org.polypheny.db.algebra.polyalg.parser.PolyAlgParser; +import org.polypheny.db.algebra.polyalg.parser.PolyAlgToAlgConverter; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNode; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.cypher.CypherTestTemplate; +import org.polypheny.db.information.Information; +import org.polypheny.db.information.InformationPolyAlg; +import org.polypheny.db.information.InformationPolyAlg.PlanType; +import org.polypheny.db.languages.LanguageManager; +import org.polypheny.db.languages.NodeParseException; +import org.polypheny.db.languages.QueryLanguage; +import org.polypheny.db.mql.MqlTestTemplate; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.volcano.VolcanoPlanner; +import org.polypheny.db.processing.ImplementationContext.ExecutedContext; +import org.polypheny.db.processing.QueryContext; +import org.polypheny.db.processing.QueryContext.TranslatedQueryContext; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.routing.UiRoutingPageUtil; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.transaction.TransactionException; +import org.polypheny.db.transaction.TransactionManager; +import org.polypheny.db.transaction.TransactionManagerImpl; +import org.polypheny.db.type.entity.PolyValue; + + +@Slf4j +@SuppressWarnings("SqlNoDataSourceInspection") +public class PolyAlgParsingTest { + + private static final String ORIGIN = "PolyAlgParsingTest"; + private static final String GRAPH_NAME = "polyAlgGraph"; + private static final String DOC_NAME = MqlTestTemplate.namespace; + private static final String DOC_COLL = "polyalgdocs"; + + + @BeforeAll + public static void start() throws SQLException { + //noinspection ResultOfMethodCallIgnored + TestHelper.getInstance(); + addTestData(); + + } + + + @AfterAll + public static void tearDown() throws SQLException { + try ( JdbcConnection jdbcConnection = new JdbcConnection( true ) ) { + Connection connection = jdbcConnection.getConnection(); + try ( java.sql.Statement statement = connection.createStatement() ) { + statement.executeUpdate( "DROP TABLE polyalg_test" ); + } + connection.commit(); + connection.close(); + } + CypherTestTemplate.deleteData( GRAPH_NAME ); + MqlTestTemplate.dropCollection( DOC_COLL ); + } + + + private static void addTestData() throws SQLException { + try ( JdbcConnection jdbcConnection = new JdbcConnection( false ) ) { + Connection connection = jdbcConnection.getConnection(); + try ( java.sql.Statement statement = connection.createStatement() ) { + statement.executeUpdate( "CREATE TABLE polyalg_test( id INTEGER NOT NULL, name VARCHAR(39), foo INTEGER, gender VARCHAR(39), PRIMARY KEY (id))" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (1, 'Hans', 5, 'Male')" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (2, 'Alice', 7, 'Female')" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (3, 'Bob', 4, 'Male')" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (4, 'Saskia', 6, 'Female')" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (5, 'Rebecca', 6, 'Female')" ); + statement.executeUpdate( "INSERT INTO polyalg_test VALUES (6, 'Georg', 9, 'Male')" ); + connection.commit(); + } + connection.close(); + } + + CypherTestTemplate.createGraph( GRAPH_NAME ); + CypherTestTemplate.execute( "CREATE (p:Person {name: 'Ann', age: 45, depno: 13})", GRAPH_NAME ); + CypherTestTemplate.execute( "CREATE (p:Person {name: 'Bob', age: 31, depno: 13})", GRAPH_NAME ); + CypherTestTemplate.execute( "CREATE (p:Person {name: 'Hans', age: 55, depno: 7})", GRAPH_NAME ); + CypherTestTemplate.execute( "CREATE (p:Person {name: 'Max'})-[rel:OWNER_OF]->(a:Animal {name:'Kira', age:3, type:'dog'})", GRAPH_NAME ); + + MqlTestTemplate.initDatabase( DOC_NAME ); + MqlTestTemplate.createCollection( DOC_COLL, DOC_NAME ); + List docs = List.of( + "{\"item\": \"journal\", \"qty\": 25, \"tags\": [\"blank\", \"red\"], \"dim_cm\": [14, 21]}", + "{\"item\": \"notebook\", \"qty\": 50, \"tags\": [\"red\", \"blank\"], \"dim_cm\": [14, 21]}", + "{ \"item\": \"paper\", \"qty\": 100, \"tags\": [\"red\", \"blank\", \"plain\"], \"dim_cm\": [14, 21], }", + "{\"item\": \"planner\", \"qty\": 75, \"tags\": [\"blank\", \"red\"], \"dim_cm\": [22.85, 30]}", + "{\"item\": \"postcard\", \"qty\": 45, \"tags\": [\"blue\"], \"dim_cm\": [10, 15.25]}" + ); + MqlTestTemplate.insertMany( docs, DOC_COLL ); + } + + + private static void testCypherRoundTrip( String query ) throws NodeParseException { + testQueryRoundTrip( query, QueryLanguage.from( "cypher" ), GRAPH_NAME ); + } + + + private static void testSqlRoundTrip( String query ) throws NodeParseException { + testQueryRoundTrip( query, QueryLanguage.from( "sql" ), null ); + } + + + private static void testMqlRoundTrip( String query ) throws NodeParseException { + testQueryRoundTrip( query, QueryLanguage.from( "mql" ), DOC_NAME ); + } + + + /** + * Executes the query and checks whether both the result and the (logical and allocation) PolyAlgebra are still equal after one round-trip. + * For this we do the following: + * Query -> AlgNode1 -> Result1 + *

    + * AlgNode1 -> PolyAlg1 -> AlgNode2 -> Result2 + *

    + * AlgNode2 -> PolyAlg2 + *

    + * Then we check whether PolyAlg1 equals PolyAlg2 and Result1 equals Result2. + */ + private static void testQueryRoundTrip( String query, QueryLanguage ql, String namespace ) throws NodeParseException { + long ns = namespace == null ? Catalog.defaultNamespaceId : Catalog.snapshot().getNamespace( namespace ).orElseThrow().id; + TransactionManager transactionManager = TransactionManagerImpl.getInstance(); + Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, ns, true, ORIGIN ); + + try { + + QueryContext qc = QueryContext.builder() + .query( query ) + .language( ql ) + .isAnalysed( true ) + .usesCache( true ) + .origin( ORIGIN ) + .namespaceId( ns ) + .batch( -1 ) + .transactionManager( transactionManager ) + .transactions( List.of( transaction ) ).build(); + List executedContexts = LanguageManager.getINSTANCE().anyQuery( qc ); + String result = getResultAsString( executedContexts, ql.dataModel() ); + + String logical = null, allocation = null, physical = null; + + int tries = 5; + try { + // plans are serialized in a separate thread, which might take some time + while ( UiRoutingPageUtil.runningTasks() > 0 && tries-- > 0 ) { + Thread.sleep( 2000 ); + } + if ( tries == 0 ) { + throw new RuntimeException( "Took too long to set all plans" ); + } + + for ( Information info : transaction.getQueryAnalyzer().getInformationArray() ) { + if ( info instanceof InformationPolyAlg polyInfo && polyInfo.getTextualPolyAlg() != null ) { + switch ( PlanType.valueOf( polyInfo.planType ) ) { + case LOGICAL -> logical = polyInfo.getTextualPolyAlg(); + case ALLOCATION -> allocation = polyInfo.getTextualPolyAlg(); + case PHYSICAL -> physical = polyInfo.getTextualPolyAlg(); + } + } + } + + assertNotNull( logical ); + assertNotNull( allocation ); + assertNotNull( physical ); // Physical is not yet tested further since it is only partially implemented + + } catch ( InterruptedException e ) { + throw new RuntimeException( e ); + } finally { + transaction.commit(); // execute PolyAlg creates new transaction, as long as only DQLs are tested like this + } + if ( transactionManager.getNumberOfActiveTransactions() > 0 ) { + throw new RuntimeException(); + } + + // Check that parsing and executing again returns the same result + String resultFromLogical = executePolyAlg( logical, PlanType.LOGICAL, ql ); + assertEquals( result, resultFromLogical, "Result from query does not match result when executing the logical plan." ); + String resultFromAllocation = executePolyAlg( allocation, PlanType.ALLOCATION, ql ); + assertEquals( result, resultFromAllocation, "Result from query does not match result when executing the allocation plan." ); + } catch ( Exception e ) { + transaction.rollback( "Error during testing round trip: " + e.getMessage() ); + throw new RuntimeException( e ); + } + + } + + + /** + * Parses the given polyAlg into a plan of the specified type. + * The plan is then executed and the result is returned as a string. + * The plan is also serialized back to polyAlgebra to check whether it is equal to before. + */ + private static String executePolyAlg( String polyAlg, PlanType planType, QueryLanguage ql ) throws NodeParseException { + assert planType != PlanType.PHYSICAL : "Physical plan is not yet supported by this helper function"; + + TransactionManager transactionManager = TransactionManagerImpl.getInstance(); + Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, Catalog.defaultNamespaceId, true, ORIGIN ); + Statement statement = transaction.createStatement(); + try { + AlgRoot root = buildFromPolyAlg( polyAlg, planType, statement ); + assertEqualAfterRoundtrip( polyAlg, root.alg ); + + QueryContext qc = QueryContext.builder() + .query( polyAlg ) + .language( ql ) + .isAnalysed( true ) + .usesCache( true ) + .origin( ORIGIN ) + .batch( -1 ) + .transactionManager( transactionManager ) + .transactions( List.of( transaction ) ) + .statement( statement ).build(); + TranslatedQueryContext translated = TranslatedQueryContext.fromQuery( polyAlg, root, planType == PlanType.ALLOCATION, qc ); + List executedContexts = LanguageManager.getINSTANCE().anyQuery( translated ); + String result = getResultAsString( executedContexts, ql.dataModel() ); + try { + transaction.commit(); + } catch ( TransactionException e ) { + throw new RuntimeException( e ); + } + return result; + + } catch ( Exception e ) { + transaction.rollback( "Error during execution of polyAlg: " + e.getMessage() ); + throw new RuntimeException( e ); + } + } + + + private static String getResultAsString( List executedContexts, DataModel dataModel ) { + assertEquals( 1, executedContexts.size() ); + ExecutedContext context = executedContexts.get( 0 ); + + assertTrue( context.getException().isEmpty(), "Query resulted in an exception" ); + try { + + @NotNull ResultIterator iter = context.getIterator(); + String tupleType = context.getImplementation().tupleType.toString(); + List> rows = iter.getAllRowsAndClose(); + + StringBuilder sb = new StringBuilder( tupleType ); + + for ( List row : rows ) { + sb.append( "\n" ); + for ( PolyValue v : row ) { + String json = v == null ? "NULL" : v.toJson(); + if ( json.contains( "\"id\"" ) ) { + try { + ObjectMapper objectMapper = new ObjectMapper(); + JsonNode jsonNode = objectMapper.readTree( json ); + ((ObjectNode) jsonNode).remove( "id" ); + json = objectMapper.writeValueAsString( jsonNode ); + } catch ( JsonProcessingException ignored ) { + } + } + sb.append( json ).append( "," ); + } + } + return sb.toString(); + } catch ( Exception e ) { + throw new RuntimeException( e ); + } + } + + + private static AlgRoot buildFromPolyAlg( String polyAlg, PlanType planType, Statement statement ) throws NodeParseException { + Snapshot snapshot = statement.getTransaction().getSnapshot(); + RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); + AlgCluster cluster = AlgCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, snapshot ); + PolyAlgToAlgConverter converter = new PolyAlgToAlgConverter( planType, snapshot, cluster ); + return converter.convert( (PolyAlgNode) PolyAlgParser.create( polyAlg ).parseQuery() ); + } + + + /** + * Get AlgNode tree for the given (logical) PolyAlgebra. + */ + private static AlgRoot buildFromPolyAlg( String polyAlg ) throws NodeParseException { + AlgCluster cluster = AlgCluster.create( new VolcanoPlanner(), new RexBuilder( AlgDataTypeFactory.DEFAULT ), null, null ); + PolyAlgToAlgConverter converter = new PolyAlgToAlgConverter( PlanType.LOGICAL, Catalog.snapshot(), cluster ); + return converter.convert( (PolyAlgNode) PolyAlgParser.create( polyAlg ).parseQuery() ); + } + + + private static String toPolyAlg( AlgNode node ) { + StringBuilder sb = new StringBuilder(); + node.buildPolyAlgebra( sb, "" ); + return sb.toString(); + } + + + private static void assertEqualAfterRoundtrip( String polyAlgBefore, AlgNode node ) { + // Remove any whitespaces before comparing + String polyAlgAfter = toPolyAlg( node ); + assertEquals( polyAlgBefore.replaceAll( "\\s", "" ), polyAlgAfter.replaceAll( "\\s", "" ) ); + } + + + @Test + public void projectPolyAlgTest() throws NodeParseException { + String REL_PROJECT = PolyAlgRegistry.getDeclaration( LogicalRelProject.class ).opName; + String REL_FILTER = PolyAlgRegistry.getDeclaration( LogicalRelFilter.class ).opName; + String REL_SCAN = PolyAlgRegistry.getDeclaration( LogicalRelScan.class ).opName; + + String polyAlg = format( """ + %s[id, name, foo]( + %s[>(foo, 5)]( + %s[public.polyalg_test])) + """, REL_PROJECT, REL_FILTER, REL_SCAN ); + AlgNode node = buildFromPolyAlg( polyAlg ).alg; + assertEqualAfterRoundtrip( polyAlg, node ); + } + + + @Test + public void opAliasPolyAlgTest() throws NodeParseException { + String REL_PROJECT = PolyAlgRegistry.getDeclaration( LogicalRelProject.class ).opName; + String ALIAS = PolyAlgRegistry.getDeclaration( LogicalRelProject.class ).opAliases.iterator().next(); + String REL_SCAN = PolyAlgRegistry.getDeclaration( LogicalRelScan.class ).opName; + + String polyAlg = format( """ + %s[foo, name]( + %s[public.polyalg_test]) + """, ALIAS, REL_SCAN ); + AlgNode node = buildFromPolyAlg( polyAlg ).alg; + String polyAlgAfter = toPolyAlg( node ); + assertEquals( polyAlg.replace( ALIAS + "[", REL_PROJECT + "[" ).replaceAll( "\\s", "" ), + polyAlgAfter.replaceAll( "\\s", "" ) ); + } + + + @Test + public void paramAliasPolyAlgTest() throws NodeParseException { + String REL_SORT = PolyAlgRegistry.getDeclaration( LogicalRelSort.class ).opName; + String REL_SCAN = PolyAlgRegistry.getDeclaration( LogicalRelScan.class ).opName; + String LIMIT_ALIAS = PolyAlgRegistry.getDeclaration( LogicalRelSort.class ).getParam( "limit" ).getAliases().iterator().next(); + String polyAlg = format( """ + %s[%s=2]( + %s[public.polyalg_test]) + """, REL_SORT, LIMIT_ALIAS, REL_SCAN ); + AlgNode node = buildFromPolyAlg( polyAlg ).alg; + String polyAlgAfter = toPolyAlg( node ); + assertEquals( polyAlg.replace( LIMIT_ALIAS + "=", "limit=" ).replaceAll( "\\s", "" ), + polyAlgAfter.replaceAll( "\\s", "" ) ); + } + + + @Test + public void sqlProjectFilterTest() throws NodeParseException { + testSqlRoundTrip( "SELECT id, name AS first_name FROM polyalg_test WHERE foo <= 6" ); + } + + + @Test + public void sqlDistinctAggregateTest() throws NodeParseException { + testSqlRoundTrip( "SELECT gender, COUNT(distinct foo) FROM polyalg_test GROUP BY gender" ); + } + + + @Test + public void sqlFilterAggregateTest() throws NodeParseException { + testSqlRoundTrip( "SELECT gender, COUNT(foo) FILTER(WHERE foo < 5) as filtered FROM polyalg_test GROUP BY gender" ); + } + + + @Test + public void sqlAggregateWithNullNameTest() throws NodeParseException { + testSqlRoundTrip( "SELECT id, gender FROM polyalg_test as e WHERE EXISTS ( SELECT 1 FROM polyalg_test WHERE gender = 'Male' AND id = e.id );" ); + } + + + @Test + public void sqlUnionTest() throws NodeParseException { + testSqlRoundTrip( "(SELECT id FROM polyalg_test) UNION (SELECT foo FROM polyalg_test)" ); + } + + + @Test + public void sqlCastTest() throws NodeParseException { + testSqlRoundTrip( "SELECT id, CAST(foo as VARCHAR(3)), 14.2 FROM polyalg_test" ); + } + + + @Test + public void sqlSortTest() throws NodeParseException { + testSqlRoundTrip( "SELECT * FROM polyalg_test ORDER BY foo desc" ); + } + + + @Test + public void sqlJoinWithRenameTest() throws NodeParseException { + testSqlRoundTrip( "SELECT * FROM polyalg_test t1 JOIN polyalg_test t2 ON t1.id=t2.foo" ); + } + + + @Test + public void sqlInsertTest() throws NodeParseException { + testSqlRoundTrip( "INSERT INTO polyalg_test VALUES (7, 'Mike', 12, 'Male')" ); + } + + + @Test + public void sqlAliasWithSpaceFilterTest() throws NodeParseException { + testSqlRoundTrip( "SELECT *, 'foo value' FROM (SELECT foo AS \"foo value\" FROM polyalg_test) WHERE \"foo value\" < 10" ); + } + + + @Test + public void cypherRelCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "MATCH (n) where n.foo < 6 RETURN n ORDER BY n.name LIMIT 3", QueryLanguage.from( "cypher" ), null ); + testQueryRoundTrip( "MATCH (n:polyalg_test {gender: 'Female'}) where n.foo < 6 RETURN n ORDER BY n.name LIMIT 3", QueryLanguage.from( "cypher" ), null ); + } + + + @Test + public void cypherExtractFromPathTest() throws NodeParseException { + testQueryRoundTrip( "MATCH (n)-[r]->(m) RETURN r", QueryLanguage.from( "cypher" ), null ); + } + + + @Test + public void mongoRelCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "db.polyalg_test.find({'gender': 'Female'})", QueryLanguage.from( "mql" ), null ); + } + + + @Test + public void cypherMatchNodeTest() throws NodeParseException { + testCypherRoundTrip( "MATCH (n:Person) RETURN n ORDER BY n.name" ); + } + + + @Test + public void cypherMatchPathTest() throws NodeParseException { + testCypherRoundTrip( "MATCH (n:Person)-[rel:OWNER_OF]->(a:Animal) RETURN n" ); + } + + + @Test + public void cypherCreateTest() throws NodeParseException { + testCypherRoundTrip( "CREATE (c:Car {color: 'red'}), (p:Person {name: 'Martin'}), (p)-[:OWNS_CAR]->(c)" ); + } + + + @Test + public void sqlLpgCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "SELECT * FROM " + GRAPH_NAME + ".Person", QueryLanguage.from( "sql" ), GRAPH_NAME ); + } + + + @Test + public void mongoLpgCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "db.Person.find({})", QueryLanguage.from( "mql" ), GRAPH_NAME ); + } + + + @Test + public void mongoFindTest() throws NodeParseException { + testMqlRoundTrip( "db." + DOC_COLL + ".find({item: 'journal'})" ); + } + + + @Test + public void mongoArrayFindTest() throws NodeParseException { + testMqlRoundTrip( "db." + DOC_COLL + ".find( { tags: [\"red\", \"blank\"] } )" ); + } + + + @Test + public void mongoElementRefTest() throws NodeParseException { + testMqlRoundTrip( "db." + DOC_COLL + ".find({\"dim_cm\": {\"$elemMatch\": {\"$gt\": 22}}})" ); + } + + + @Test + public void mongoInsertTest() throws NodeParseException { + testMqlRoundTrip( "db." + DOC_COLL + ".insertOne({item: \"canvas\"})" ); + } + + + @Test + public void sqlDocCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "SELECT * FROM " + DOC_NAME + "." + DOC_COLL, QueryLanguage.from( "sql" ), DOC_NAME ); + } + + + @Test + public void cypherDocCrossModelTest() throws NodeParseException { + testQueryRoundTrip( "MATCH (n:" + DOC_COLL + ") RETURN n", QueryLanguage.from( "cypher" ), DOC_NAME ); + } + +} diff --git a/information/src/main/java/org/polypheny/db/information/InformationManager.java b/information/src/main/java/org/polypheny/db/information/InformationManager.java index abd34311c6..c9e94326b3 100644 --- a/information/src/main/java/org/polypheny/db/information/InformationManager.java +++ b/information/src/main/java/org/polypheny/db/information/InformationManager.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -257,6 +257,11 @@ public String getPageList() { } + public Information[] getInformationArray() { + return this.informationMap.values().toArray( new Information[0] ); + } + + /** * Get a page from the Information Manager with a certain id. * diff --git a/information/src/main/java/org/polypheny/db/information/InformationPage.java b/information/src/main/java/org/polypheny/db/information/InformationPage.java index 311ffb04b8..47acbf384d 100644 --- a/information/src/main/java/org/polypheny/db/information/InformationPage.java +++ b/information/src/main/java/org/polypheny/db/information/InformationPage.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -172,6 +172,12 @@ public void overrideWith( final InformationPage page ) { } + public InformationPage setStmtLabel( long stmtIdx ) { + this.label = "Query " + (stmtIdx + 1); + return this; + } + + /** * Convert page to Json using the custom TypeAdapter * diff --git a/information/src/main/java/org/polypheny/db/information/InformationPolyAlg.java b/information/src/main/java/org/polypheny/db/information/InformationPolyAlg.java new file mode 100644 index 0000000000..cd8755570e --- /dev/null +++ b/information/src/main/java/org/polypheny/db/information/InformationPolyAlg.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.information; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.UUID; +import lombok.Getter; +import lombok.Setter; + +public class InformationPolyAlg extends Information { + + @JsonProperty + public String jsonPolyAlg; + + @JsonProperty + public String planType; + + @Getter + @Setter + private String textualPolyAlg; // this is only not null during testing, where it is desirable to have the human-readable PolyAlgebra + + + /** + * Constructor + * + * @param group The InformationGroup to which this information belongs + */ + public InformationPolyAlg( final InformationGroup group, final String jsonPolyAlg, final PlanType planType ) { + super( UUID.randomUUID().toString(), group.getId() ); + this.jsonPolyAlg = jsonPolyAlg; + this.planType = planType.name(); + fullWidth( true ); + } + + + public void updatePolyAlg( final String queryPlan ) { + this.jsonPolyAlg = queryPlan; + notifyManager(); + } + + + public enum PlanType { + LOGICAL, + ALLOCATION, + PHYSICAL + } + +} diff --git a/information/src/main/java/org/polypheny/db/information/InformationQueryPlan.java b/information/src/main/java/org/polypheny/db/information/InformationQueryPlan.java deleted file mode 100644 index 929736ec94..0000000000 --- a/information/src/main/java/org/polypheny/db/information/InformationQueryPlan.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019-2024 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.information; - - -import com.fasterxml.jackson.annotation.JsonProperty; -import java.util.UUID; - - -/** - * An information object containing a query plan. This class is mainly used for the debugger in the UI. - */ -public class InformationQueryPlan extends Information { - - @JsonProperty - public String queryPlan; - - - /** - * Constructor - * - * @param group The InformationGroup to which this information belongs - */ - public InformationQueryPlan( final InformationGroup group, final String queryPlan ) { - this( group.getId(), queryPlan ); - fullWidth( true ); - } - - - /** - * Constructor - * - * @param groupId The id of the InformationGroup to which this information belongs - */ - public InformationQueryPlan( final String groupId, final String queryPlan ) { - this( UUID.randomUUID().toString(), groupId, queryPlan ); - } - - - /** - * Constructor - * - * @param id Unique id for this information object - * @param group The id of the InformationGroup to which this information belongs - */ - public InformationQueryPlan( final String id, final String group, final String queryPlan ) { - super( id, group ); - this.queryPlan = queryPlan; - } - - - public void updateQueryPlan( final String queryPlan ) { - this.queryPlan = queryPlan; - notifyManager(); - } - -} diff --git a/information/src/main/java/org/polypheny/db/webui/UiTestingMonitoringPage.java b/information/src/main/java/org/polypheny/db/webui/UiTestingMonitoringPage.java index 556f6e4c7e..8b09370287 100644 --- a/information/src/main/java/org/polypheny/db/webui/UiTestingMonitoringPage.java +++ b/information/src/main/java/org/polypheny/db/webui/UiTestingMonitoringPage.java @@ -24,7 +24,6 @@ import org.polypheny.db.information.InformationManager; import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationProgress; -import org.polypheny.db.information.InformationQueryPlan; import org.polypheny.db.information.InformationText; public class UiTestingMonitoringPage { @@ -66,9 +65,8 @@ public class UiTestingMonitoringPage { Information i7 = new InformationProgress( g2, "progval", 30 ); Information i8 = new InformationText( g2, "This is Text!!" ); - Information i9 = new InformationQueryPlan( g2, "THIS IS QUERY PLAN" ); - im.registerInformation( i1, i2, i3, i4, i5, i6, i7, i8, i9 ); + im.registerInformation( i1, i2, i3, i4, i5, i6, i7, i8 ); } } diff --git a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java index 30d6de23df..e5e510982e 100644 --- a/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java +++ b/monitoring/src/main/java/org/polypheny/db/monitoring/statistics/StatisticsManagerImpl.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -1041,7 +1041,7 @@ private synchronized void updateRelCalls( long tableId, MonitoringType kind, Tab entityStatistic.put( tableId, statisticTable ); break; default: - log.warn( "Currently, only SELECT, INSERT, DELETE and UPDATE are available in Statistics." ); + log.warn( "Currently, only SELECT, INSERT, DELETE and UPDATE are available in Statistics. Found " + kind ); } } diff --git a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailSort.java b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailSort.java index 7fa567c104..5d01dd72db 100644 --- a/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailSort.java +++ b/plugins/cottontail-adapter/src/main/java/org/polypheny/db/adapter/cottontail/algebra/CottontailSort.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -58,7 +58,7 @@ public CottontailSort( AlgCluster cluster, AlgTraitSet traits, AlgNode child, Al @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - final double rowCount = mq.getTupleCount( this ) + 0.01; + final double rowCount = mq.getTupleCount( this ).orElse( Double.MAX_VALUE ) + 0.01; return planner.getCostFactory().makeCost( rowCount, 0, 0 ); } diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherFromPathUtil.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherFromPathUtil.java new file mode 100644 index 0000000000..93b18344fa --- /dev/null +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherFromPathUtil.java @@ -0,0 +1,65 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.cypher; + +import java.util.List; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.nodes.OperatorBinding; +import org.polypheny.db.rex.RexCallBinding; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.type.PathType; + +public class CypherFromPathUtil { + + public static AlgDataType inferReturnType( OperatorBinding opBinding ) { + return inferReturnType( opBinding.collectOperandTypes(), opBinding ); + } + + + public static AlgDataType inferReturnType( List operandTypes ) { + if ( operandTypes.size() < 2 || !(operandTypes.get( 0 ) instanceof PathType pathType) ) { + throw new GenericRuntimeException( "Could not get element to derive type for extract from path" ); + } + AlgDataType toExtract = operandTypes.get( 1 ); + String targetName = toExtract.getFieldNames().get( 0 ); + + for ( AlgDataTypeField element : pathType.getFields() ) { + if ( element.getName().equals( targetName ) ) { + return element.getType(); + } + } + throw new GenericRuntimeException( "Could not get element to derive type for extract from path" ); + } + + + public static AlgDataType inferReturnType( List operandTypes, OperatorBinding opBinding ) { + if ( operandTypes.size() < 2 || !(operandTypes.get( 0 ) instanceof PathType pathType) || !(opBinding instanceof RexCallBinding callBinding) || !(callBinding.getOperands().get( 1 ) instanceof RexLiteral extract) ) { + throw new GenericRuntimeException( "Could not get element to derive type for extract from path" ); + } + String targetName = extract.value.asString().value; + + for ( AlgDataTypeField element : pathType.getFields() ) { + if ( element.getName().equals( targetName ) ) { + return element.getType(); + } + } + throw new GenericRuntimeException( "Could not get element to derive type for extract from path" ); + } + +} diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherPathMatchUtil.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherPathMatchUtil.java new file mode 100644 index 0000000000..f2fba7eba0 --- /dev/null +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherPathMatchUtil.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.cypher; + +import java.util.List; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.nodes.OperatorBinding; +import org.polypheny.db.type.PathType; + +public class CypherPathMatchUtil { + + public static AlgDataType inferReturnType( OperatorBinding opBinding ) { + return inferReturnType( opBinding.collectOperandTypes() ); + } + + + public static AlgDataType inferReturnType( List operandTypes ) { + if ( operandTypes.size() < 2 || !(operandTypes.get( 1 ) instanceof PathType) ) { + throw new GenericRuntimeException( "Could not get element to derive type for path match" ); + } + return operandTypes.get( 1 ); + } + +} diff --git a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java index 2377610312..e48972f6e4 100644 --- a/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java +++ b/plugins/cypher-language/src/main/java/org/polypheny/db/cypher/CypherRegisterer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -25,6 +25,7 @@ import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.nodes.LangFunctionOperator; import org.polypheny.db.nodes.Operator; +import org.polypheny.db.type.PolyType; public class CypherRegisterer { @@ -38,53 +39,51 @@ public static void registerOperators() { throw new GenericRuntimeException( "Cypher operators were already registered." ); } - register( OperatorName.CYPHER_LIKE, new LangFunctionOperator( OperatorName.CYPHER_LIKE.name(), Kind.LIKE ) ); + register( OperatorName.CYPHER_LIKE, new LangFunctionOperator( OperatorName.CYPHER_LIKE.name(), Kind.LIKE, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_HAS_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_HAS_PROPERTY.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_HAS_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_HAS_PROPERTY.name(), Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_HAS_LABEL, new LangFunctionOperator( OperatorName.CYPHER_HAS_LABEL.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_HAS_LABEL, new LangFunctionOperator( OperatorName.CYPHER_HAS_LABEL.name(), Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_GRAPH_ONLY_LABEL, new LangFunctionOperator( OperatorName.CYPHER_GRAPH_ONLY_LABEL.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_PATH_MATCH, new LangFunctionOperator( OperatorName.CYPHER_PATH_MATCH.name(), Kind.CYPHER_FUNCTION, CypherPathMatchUtil::inferReturnType, CypherPathMatchUtil::inferReturnType ) ); - register( OperatorName.CYPHER_PATH_MATCH, new LangFunctionOperator( OperatorName.CYPHER_PATH_MATCH.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_NODE_EXTRACT, new LangFunctionOperator( OperatorName.CYPHER_NODE_EXTRACT.name(), Kind.CYPHER_FUNCTION, PolyType.NODE ) ); - register( OperatorName.CYPHER_NODE_EXTRACT, new LangFunctionOperator( OperatorName.CYPHER_NODE_EXTRACT.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_EXTRACT_FROM_PATH, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_FROM_PATH.name(), Kind.CYPHER_FUNCTION, CypherFromPathUtil::inferReturnType, CypherFromPathUtil::inferReturnType ) ); - register( OperatorName.CYPHER_EXTRACT_FROM_PATH, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_FROM_PATH.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_NODE_MATCH, new LangFunctionOperator( OperatorName.CYPHER_NODE_MATCH.name(), Kind.CYPHER_FUNCTION, PolyType.NODE ) ); - register( OperatorName.CYPHER_NODE_MATCH, new LangFunctionOperator( OperatorName.CYPHER_NODE_MATCH.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_EXTRACT_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_PROPERTY.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_EXTRACT_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_PROPERTY.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_EXTRACT_PROPERTIES, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_PROPERTIES.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_EXTRACT_PROPERTIES, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_PROPERTIES.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_EXTRACT_ID, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_ID.name(), Kind.CYPHER_FUNCTION, PolyType.VARCHAR ) ); - register( OperatorName.CYPHER_EXTRACT_ID, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_ID.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_EXTRACT_LABELS, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_LABELS.name(), Kind.CYPHER_FUNCTION, PolyType.ARRAY, PolyType.VARCHAR ) ); - register( OperatorName.CYPHER_EXTRACT_LABELS, new LangFunctionOperator( OperatorName.CYPHER_EXTRACT_LABELS.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_HAS_LABEL, new LangFunctionOperator( OperatorName.CYPHER_HAS_LABEL.name(), Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_HAS_LABEL, new LangFunctionOperator( OperatorName.CYPHER_HAS_LABEL.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_TO_LIST, new LangFunctionOperator( OperatorName.CYPHER_TO_LIST.name(), Kind.CYPHER_FUNCTION, PolyType.ARRAY, PolyType.ANY ) ); - register( OperatorName.CYPHER_TO_LIST, new LangFunctionOperator( OperatorName.CYPHER_TO_LIST.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_ADJUST_EDGE, new LangFunctionOperator( OperatorName.CYPHER_ADJUST_EDGE.name(), Kind.CYPHER_FUNCTION, PolyType.EDGE ) ); - register( OperatorName.CYPHER_ADJUST_EDGE, new LangFunctionOperator( OperatorName.CYPHER_ADJUST_EDGE.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_SET_LABELS, new LangFunctionOperator( OperatorName.CYPHER_SET_LABELS.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_SET_LABELS, new LangFunctionOperator( OperatorName.CYPHER_SET_LABELS.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_SET_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_SET_PROPERTY.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_SET_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_SET_PROPERTY.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_SET_PROPERTIES, new LangFunctionOperator( OperatorName.CYPHER_SET_PROPERTIES.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_SET_PROPERTIES, new LangFunctionOperator( OperatorName.CYPHER_SET_PROPERTIES.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_REMOVE_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_REMOVE_PROPERTY.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_REMOVE_PROPERTY, new LangFunctionOperator( OperatorName.CYPHER_REMOVE_PROPERTY.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_REMOVE_LABELS, new LangFunctionOperator( OperatorName.CYPHER_REMOVE_LABELS.name(), Kind.CYPHER_FUNCTION, PolyType.ANY ) ); - register( OperatorName.CYPHER_REMOVE_LABELS, new LangFunctionOperator( OperatorName.CYPHER_REMOVE_LABELS.name(), Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_GEO_DISTANCE, new LangFunctionOperator( "GEO_DISTANCE", Kind.CYPHER_FUNCTION, PolyType.GEOMETRY ) ); - register( OperatorName.CYPHER_GEO_DISTANCE, new LangFunctionOperator( "GEO_DISTANCE", Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_GEO_CONTAINS, new LangFunctionOperator( "GEO_CONTAINS", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_GEO_CONTAINS, new LangFunctionOperator( "GEO_CONTAINS", Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_GEO_INTERSECTS, new LangFunctionOperator( "GEO_INTERSECTS", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); - register( OperatorName.CYPHER_GEO_INTERSECTS, new LangFunctionOperator( "GEO_INTERSECTS", Kind.CYPHER_FUNCTION ) ); - - register( OperatorName.CYPHER_GEO_WITHIN, new LangFunctionOperator( "GEO_WITHIN", Kind.CYPHER_FUNCTION ) ); + register( OperatorName.CYPHER_GEO_WITHIN, new LangFunctionOperator( "GEO_WITHIN", Kind.CYPHER_FUNCTION, PolyType.BOOLEAN ) ); isInit = true; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java index 8da62b2ce7..4dd06268ec 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcAdapterFramework.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,6 +16,39 @@ package org.polypheny.db.adapter.jdbc; +import com.google.common.collect.ImmutableList; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcAggregate; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcCalc; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcFilter; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcIntersect; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcJoin; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcMinus; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcProject; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcSort; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcTableModify; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcUnion; +import org.polypheny.db.adapter.jdbc.JdbcRules.JdbcValues; +import org.polypheny.db.algebra.core.JoinAlgType; +import org.polypheny.db.algebra.logical.relational.LogicalCalc; +import org.polypheny.db.algebra.logical.relational.LogicalRelAggregate; +import org.polypheny.db.algebra.logical.relational.LogicalRelIntersect; +import org.polypheny.db.algebra.logical.relational.LogicalRelMinus; +import org.polypheny.db.algebra.logical.relational.LogicalRelModify; +import org.polypheny.db.algebra.logical.relational.LogicalRelProject; +import org.polypheny.db.algebra.logical.relational.LogicalRelScan; +import org.polypheny.db.algebra.logical.relational.LogicalRelSort; +import org.polypheny.db.algebra.logical.relational.LogicalRelUnion; +import org.polypheny.db.algebra.logical.relational.LogicalRelValues; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.OperatorTag; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.ParamType; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.Parameter; +import org.polypheny.db.algebra.polyalg.PolyAlgDeclaration.SimpleType; +import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; +import org.polypheny.db.algebra.polyalg.arguments.EnumArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.plan.Convention; import org.polypheny.db.plugins.PluginContext; import org.polypheny.db.plugins.PolyPlugin; @@ -27,6 +60,81 @@ public class JdbcAdapterFramework extends PolyPlugin { */ public JdbcAdapterFramework( PluginContext context ) { super( context ); + registerPolyAlg(); + } + + + private void registerPolyAlg() { + ImmutableList physTags = ImmutableList.of( OperatorTag.PHYSICAL, OperatorTag.ADVANCED ); + Convention c = JdbcConvention.NONE; // TODO: set correct convention + + PolyAlgRegistry.register( JdbcProject.class, PolyAlgDeclaration.builder() + .creator( JdbcProject::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_PROJECT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelProject.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcToEnumerableConverter.class, PolyAlgDeclaration.builder() + .creator( JdbcToEnumerableConverter::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_TO_E" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .build() ); + PolyAlgRegistry.register( JdbcJoin.class, PolyAlgDeclaration.builder() + .creator( JdbcJoin::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_JOIN" ).convention( c ).numInputs( 2 ).opTags( physTags ) + .param( Parameter.builder().name( "condition" ).alias( "on" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .param( Parameter.builder().name( "type" ).type( ParamType.JOIN_TYPE_ENUM ).defaultValue( new EnumArg<>( JoinAlgType.INNER, ParamType.JOIN_TYPE_ENUM ) ).build() ) + .param( Parameter.builder().name( "variables" ).type( ParamType.CORR_ID ).simpleType( SimpleType.HIDDEN ).multiValued( 1 ).defaultValue( ListArg.EMPTY ).build() ) + .build() ); + PolyAlgRegistry.register( JdbcCalc.class, PolyAlgDeclaration.builder() + .creator( JdbcCalc::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_CALC" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalCalc.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcFilter.class, PolyAlgDeclaration.builder() + .creator( JdbcFilter::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_FILTER" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .param( Parameter.builder().name( "condition" ).type( ParamType.REX ).simpleType( SimpleType.REX_PREDICATE ).build() ) + .build() ); + PolyAlgRegistry.register( JdbcAggregate.class, PolyAlgDeclaration.builder() + .creator( JdbcAggregate::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_AGGREGATE" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelAggregate.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcSort.class, PolyAlgDeclaration.builder() + .creator( JdbcSort::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_SORT" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelSort.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcUnion.class, PolyAlgDeclaration.builder() + .creator( JdbcUnion::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_UNION" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelUnion.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcIntersect.class, PolyAlgDeclaration.builder() + .creator( JdbcIntersect::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_INTERSECT" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelIntersect.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcMinus.class, PolyAlgDeclaration.builder() + .creator( JdbcMinus::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_MINUS" ).convention( c ).numInputs( -1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelMinus.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcTableModify.class, PolyAlgDeclaration.builder() + .creator( JdbcTableModify::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_MODIFY" ).convention( c ).numInputs( 1 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelModify.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcValues.class, PolyAlgDeclaration.builder() + .creator( JdbcValues::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_VALUES" ).convention( c ).numInputs( 0 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelValues.class ) ) + .build() ); + PolyAlgRegistry.register( JdbcScan.class, PolyAlgDeclaration.builder() + .creator( JdbcScan::create ).model( DataModel.RELATIONAL ) + .opName( "JDBC_SCAN" ).convention( c ).numInputs( 0 ).opTags( physTags ) + .params( PolyAlgRegistry.getParams( LogicalRelScan.class ) ) + .build() ); + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java index 24000a464b..386a5b24a8 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcRules.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,6 +46,7 @@ import org.polypheny.db.adapter.jdbc.rel2sql.SqlImplementor.Result; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgCollation; +import org.polypheny.db.algebra.AlgCollationTraitDef; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgWriter; import org.polypheny.db.algebra.InvalidAlgException; @@ -68,9 +69,15 @@ import org.polypheny.db.algebra.core.Values; import org.polypheny.db.algebra.core.relational.RelModify; import org.polypheny.db.algebra.logical.relational.LogicalRelValues; +import org.polypheny.db.algebra.metadata.AlgMdCollation; import org.polypheny.db.algebra.metadata.AlgMdUtil; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.algebra.polyalg.arguments.BooleanArg; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.ListArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; +import org.polypheny.db.algebra.polyalg.arguments.RexArg; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.nodes.Function; @@ -91,6 +98,7 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexOver; import org.polypheny.db.rex.RexProgram; +import org.polypheny.db.rex.RexUtil; import org.polypheny.db.rex.RexVisitorImpl; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.schema.trait.ModelTrait; @@ -102,7 +110,11 @@ import org.polypheny.db.tools.AlgBuilderFactory; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.ImmutableBitSet; +import org.polypheny.db.util.Pair; +import org.polypheny.db.util.Quadruple; +import org.polypheny.db.util.Triple; import org.polypheny.db.util.UnsupportedRexCallVisitor; +import org.polypheny.db.util.ValidatorUtil; import org.polypheny.db.util.trace.PolyphenyDbTrace; import org.slf4j.Logger; @@ -323,6 +335,16 @@ public JdbcJoin( } + public static JdbcJoin create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, JoinAlgType> extracted = extractArgs( args ); + try { + return new JdbcJoin( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), children.get( 1 ), extracted.left, extracted.middle, extracted.right ); + } catch ( InvalidAlgException e ) { + throw new RuntimeException( e ); + } + } + + @Override public JdbcJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode left, AlgNode right, JoinAlgType joinType, boolean semiJoinDone ) { try { @@ -337,9 +359,12 @@ public JdbcJoin copy( AlgTraitSet traitSet, RexNode condition, AlgNode left, Alg @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { // We always "build" the - double rowCount = mq.getTupleCount( this ); + Optional rowCount = mq.getTupleCount( this ); + if ( rowCount.isEmpty() ) { + return planner.getCostFactory().makeInfiniteCost(); + } - return planner.getCostFactory().makeCost( rowCount, 0, 0 ); + return planner.getCostFactory().makeCost( rowCount.get(), 0, 0 ); } @@ -409,6 +434,12 @@ public JdbcCalc( AlgCluster cluster, AlgTraitSet traitSet, AlgNode input, RexPro } + public static JdbcCalc create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new JdbcCalc( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), + Calc.getProgramFromArgs( args, children.get( 0 ), cluster.getRexBuilder() ) ); + } + + @Override public AlgWriter explainTerms( AlgWriter pw ) { return program.explainCalc( super.explainTerms( pw ) ); @@ -423,8 +454,8 @@ public double estimateTupleCount( AlgMetadataQuery mq ) { @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { - double dRows = mq.getTupleCount( this ); - double dCpu = mq.getTupleCount( getInput() ) * program.getExprCount(); + double dRows = mq.getTupleCount( this ).orElse( Double.MAX_VALUE ); + double dCpu = mq.getTupleCount( getInput() ).orElse( Double.MAX_VALUE ) * program.getExprCount(); double dIo = 0; return planner.getCostFactory().makeCost( dRows, dCpu, dIo ); } @@ -594,6 +625,27 @@ public JdbcProject( AlgCluster cluster, AlgTraitSet traitSet, AlgNode input, Lis } + public static JdbcProject create( final AlgNode input, final List projects, AlgDataType rowType ) { + final AlgCluster cluster = input.getCluster(); + final AlgMetadataQuery mq = cluster.getMetadataQuery(); + final AlgTraitSet traitSet = input.getTraitSet().replaceIfs( AlgCollationTraitDef.INSTANCE, () -> AlgMdCollation.project( mq, input, projects ) ); + return new JdbcProject( cluster, traitSet, input, projects, rowType ); + } + + + static AlgNode create( AlgNode child, List projects, List fieldNames ) { + final AlgCluster cluster = child.getCluster(); + final AlgDataType rowType = RexUtil.createStructType( cluster.getTypeFactory(), projects, fieldNames, ValidatorUtil.F_SUGGESTER ); + return create( child, projects, rowType ); + } + + + public static AlgNode create( PolyAlgArgs args, List children, AlgCluster cluster ) { + ListArg projects = args.getListArg( 0, RexArg.class ); + return create( children.get( 0 ), projects.map( RexArg::getNode ), projects.map( RexArg::getAlias ) ); + } + + @Override public JdbcProject copy( AlgTraitSet traitSet, AlgNode input, List projects, AlgDataType rowType ) { return new JdbcProject( getCluster(), traitSet, input, projects, rowType ); @@ -787,6 +839,12 @@ public JdbcFilter( AlgCluster cluster, AlgTraitSet traitSet, AlgNode input, RexN } + public static JdbcFilter create( PolyAlgArgs args, List children, AlgCluster cluster ) { + RexArg condition = args.getArg( "condition", RexArg.class ); + return new JdbcFilter( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), condition.getNode() ); + } + + @Override public JdbcFilter copy( AlgTraitSet traitSet, AlgNode input, RexNode condition ) { return new JdbcFilter( getCluster(), traitSet, input, condition ); @@ -877,6 +935,17 @@ public JdbcAggregate( } + public static JdbcAggregate create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple, List> extracted = extractArgs( args ); + try { + return new JdbcAggregate( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), false, + extracted.left, extracted.middle, extracted.right ); + } catch ( InvalidAlgException e ) { + throw new RuntimeException( e ); + } + } + + @Override public JdbcAggregate copy( AlgTraitSet traitSet, @@ -957,6 +1026,12 @@ public JdbcSort( AlgCluster cluster, AlgTraitSet traitSet, AlgNode input, AlgCol } + public static JdbcSort create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Triple extracted = extractArgs( args ); + return new JdbcSort( cluster, children.get( 0 ).getTraitSet(), children.get( 0 ), extracted.left, extracted.middle, extracted.right ); + } + + @Override public JdbcSort copy( AlgTraitSet traitSet, AlgNode newInput, AlgCollation newCollation, ImmutableList nodes, RexNode offset, RexNode fetch ) { return new JdbcSort( getCluster(), traitSet, newInput, newCollation, offset, fetch ); @@ -1005,6 +1080,11 @@ public JdbcUnion( AlgCluster cluster, AlgTraitSet traitSet, List inputs } + public static JdbcUnion create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new JdbcUnion( cluster, children.get( 0 ).getTraitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public JdbcUnion copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new JdbcUnion( getCluster(), traitSet, inputs, all ); @@ -1063,6 +1143,11 @@ public JdbcIntersect( AlgCluster cluster, AlgTraitSet traitSet, List in } + public static JdbcIntersect create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new JdbcIntersect( cluster, children.get( 0 ).getTraitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public JdbcIntersect copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new JdbcIntersect( getCluster(), traitSet, inputs, all ); @@ -1114,6 +1199,11 @@ public JdbcMinus( AlgCluster cluster, AlgTraitSet traitSet, List inputs } + public static JdbcMinus create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new JdbcMinus( cluster, children.get( 0 ).getTraitSet(), children, args.getArg( "all", BooleanArg.class ).toBool() ); + } + + @Override public JdbcMinus copy( AlgTraitSet traitSet, List inputs, boolean all ) { return new JdbcMinus( getCluster(), traitSet, inputs, all ); @@ -1203,6 +1293,14 @@ public JdbcTableModify( } + public static JdbcTableModify create( PolyAlgArgs args, List children, AlgCluster cluster ) { + EntityArg entity = args.getArg( "table", EntityArg.class ); + Quadruple, List, Boolean> extracted = extractArgs( args ); + return new JdbcTableModify( cluster, children.get( 0 ).getTraitSet(), (JdbcTable) entity.getEntity(), children.get( 0 ), + extracted.a, extracted.b, extracted.c, extracted.d ); + } + + @Override public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { double cost = super.computeSelfCost( planner, mq ).getCosts(); @@ -1264,6 +1362,12 @@ public static class JdbcValues extends Values implements JdbcAlg { } + public static JdbcValues create( PolyAlgArgs args, List children, AlgCluster cluster ) { + Pair>> extracted = extractArgs( args, cluster ); + return new JdbcValues( cluster, extracted.left, extracted.right, cluster.traitSetOf( ModelTrait.RELATIONAL ) ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java index ac3bf8741b..f71ffe86aa 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcScan.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -39,6 +39,8 @@ import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.EntityArg; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.plan.AlgCluster; import org.polypheny.db.plan.AlgOptCost; import org.polypheny.db.plan.AlgPlanner; @@ -61,11 +63,17 @@ public AlgOptCost computeSelfCost( AlgPlanner planner, AlgMetadataQuery mq ) { public JdbcScan( AlgCluster cluster, JdbcTable jdbcTable, JdbcConvention jdbcConvention ) { - super( cluster, cluster.traitSetOf( jdbcConvention ).replace( ModelTrait.RELATIONAL ), jdbcTable ); + super( cluster, cluster.traitSetOf( jdbcConvention ).plus( ModelTrait.RELATIONAL ), jdbcTable ); this.jdbcTable = jdbcTable; } + public static JdbcScan create( PolyAlgArgs args, List children, AlgCluster cluster ) { + JdbcTable table = (JdbcTable) args.getArg( "entity", EntityArg.class ).getEntity(); + return new JdbcScan( cluster, table, table.getSchema().getConvention() ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { assert inputs.isEmpty(); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java index bfd3b148ed..5a97524ec6 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/JdbcToEnumerableConverter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -63,10 +63,12 @@ import org.polypheny.db.algebra.convert.ConverterImpl; import org.polypheny.db.algebra.enumerable.EnumerableAlg; import org.polypheny.db.algebra.enumerable.EnumerableAlgImplementor; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; import org.polypheny.db.algebra.enumerable.JavaTupleFormat; import org.polypheny.db.algebra.enumerable.PhysType; import org.polypheny.db.algebra.enumerable.PhysTypeImpl; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.polyalg.arguments.PolyAlgArgs; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.config.RuntimeConfig; @@ -149,6 +151,11 @@ protected JdbcToEnumerableConverter( AlgCluster cluster, AlgTraitSet traits, Alg } + public static AlgNode create( PolyAlgArgs args, List children, AlgCluster cluster ) { + return new JdbcToEnumerableConverter( cluster, children.get( 0 ).getTraitSet().replace( EnumerableConvention.INSTANCE ), children.get( 0 ) ); + } + + @Override public AlgNode copy( AlgTraitSet traitSet, List inputs ) { return new JdbcToEnumerableConverter( getCluster(), traitSet, AbstractAlgNode.sole( inputs ) ); diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java index d90388580c..0b3ab13366 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/MongoLanguagePlugin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -41,6 +41,7 @@ import org.polypheny.db.plugins.PolyPluginManager; import org.polypheny.db.processing.QueryContext; import org.polypheny.db.processing.QueryContext.ParsedQueryContext; +import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Pair; import org.polypheny.db.webui.crud.LanguageCrud; @@ -138,73 +139,78 @@ private static List anyQuerySplitter( QueryContext context ) } + public String preprocessing( String query, QueryContext context ) { + return query; + } + + public static void registerOperators() { if ( isInit ) { throw new GenericRuntimeException( "Mql operators were already registered." ); } - register( OperatorName.MQL_EQUALS, new LangFunctionOperator( "MQL_EQUALS", Kind.EQUALS ) ); + register( OperatorName.MQL_EQUALS, new LangFunctionOperator( "MQL_EQUALS", Kind.EQUALS, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_SIZE_MATCH, new LangFunctionOperator( "MQL_SIZE_MATCH", Kind.MQL_SIZE_MATCH ) ); + register( OperatorName.MQL_SIZE_MATCH, new LangFunctionOperator( "MQL_SIZE_MATCH", Kind.MQL_SIZE_MATCH, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_JSON_MATCH, new LangFunctionOperator( "MQL_JSON_MATCH", Kind.EQUALS ) ); + register( OperatorName.MQL_JSON_MATCH, new LangFunctionOperator( "MQL_JSON_MATCH", Kind.EQUALS, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_REGEX_MATCH, new LangFunctionOperator( "MQL_REGEX_MATCH", Kind.MQL_REGEX_MATCH ) ); + register( OperatorName.MQL_REGEX_MATCH, new LangFunctionOperator( "MQL_REGEX_MATCH", Kind.MQL_REGEX_MATCH, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_TYPE_MATCH, new LangFunctionOperator( "MQL_TYPE_MATCH", Kind.MQL_TYPE_MATCH ) ); + register( OperatorName.MQL_TYPE_MATCH, new LangFunctionOperator( "MQL_TYPE_MATCH", Kind.MQL_TYPE_MATCH, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_QUERY_VALUE, new LangFunctionOperator( "MQL_QUERY_VALUE", Kind.MQL_QUERY_VALUE ) ); + register( OperatorName.MQL_QUERY_VALUE, new LangFunctionOperator( "MQL_QUERY_VALUE", Kind.MQL_QUERY_VALUE, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_SLICE, new LangFunctionOperator( "MQL_SLICE", Kind.MQL_SLICE ) ); + register( OperatorName.MQL_SLICE, new LangFunctionOperator( "MQL_SLICE", Kind.MQL_SLICE, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_ITEM, new LangFunctionOperator( "MQL_ITEM", Kind.MQL_ITEM ) ); + register( OperatorName.MQL_ITEM, new LangFunctionOperator( "MQL_ITEM", Kind.MQL_ITEM, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_ADD_FIELDS, new LangFunctionOperator( "MQL_ADD_FIELDS", Kind.MQL_ADD_FIELDS ) ); + register( OperatorName.MQL_ADD_FIELDS, new LangFunctionOperator( "MQL_ADD_FIELDS", Kind.MQL_ADD_FIELDS, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE_MIN, new LangFunctionOperator( "MQL_UPDATE_MIN", Kind.MIN ) ); + register( OperatorName.MQL_UPDATE_MIN, new LangFunctionOperator( "MQL_UPDATE_MIN", Kind.MIN, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE_MAX, new LangFunctionOperator( "MQL_UPDATE_MAX", Kind.MAX ) ); + register( OperatorName.MQL_UPDATE_MAX, new LangFunctionOperator( "MQL_UPDATE_MAX", Kind.MAX, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE_ADD_TO_SET, new LangFunctionOperator( "MQL_UPDATE_ADD_TO_SET", Kind.MQL_ADD_FIELDS ) ); + register( OperatorName.MQL_UPDATE_ADD_TO_SET, new LangFunctionOperator( "MQL_UPDATE_ADD_TO_SET", Kind.MQL_ADD_FIELDS, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE_RENAME, new LangFunctionOperator( "MQL_UPDATE_RENAME", Kind.MQL_UPDATE_RENAME ) ); + register( OperatorName.MQL_UPDATE_RENAME, new LangFunctionOperator( "MQL_UPDATE_RENAME", Kind.MQL_UPDATE_RENAME, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE_REPLACE, new LangFunctionOperator( "MQL_UPDATE_REPLACE", Kind.MQL_UPDATE_REPLACE ) ); + register( OperatorName.MQL_UPDATE_REPLACE, new LangFunctionOperator( "MQL_UPDATE_REPLACE", Kind.MQL_UPDATE_REPLACE, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_REMOVE, new LangFunctionOperator( "MQL_UPDATE_REMOVE", Kind.MQL_UPDATE_REMOVE ) ); + register( OperatorName.MQL_REMOVE, new LangFunctionOperator( "MQL_UPDATE_REMOVE", Kind.MQL_UPDATE_REMOVE, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_UPDATE, new LangFunctionOperator( "MQL_UPDATE", Kind.MQL_UPDATE ) ); + register( OperatorName.MQL_UPDATE, new LangFunctionOperator( "MQL_UPDATE", Kind.MQL_UPDATE, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_ELEM_MATCH, new LangFunctionOperator( "MQL_ELEM_MATCH", Kind.MQL_ELEM_MATCH ) ); + register( OperatorName.MQL_ELEM_MATCH, new LangFunctionOperator( "MQL_ELEM_MATCH", Kind.MQL_ELEM_MATCH, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_UNWIND, new LangFunctionOperator( "UNWIND", Kind.UNWIND ) ); + register( OperatorName.MQL_UNWIND, new LangFunctionOperator( "UNWIND", Kind.UNWIND, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_EXISTS, new LangFunctionOperator( "MQL_EXISTS", Kind.MQL_EXISTS ) ); + register( OperatorName.MQL_EXISTS, new LangFunctionOperator( "MQL_EXISTS", Kind.MQL_EXISTS, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_LT, new LangFunctionOperator( "MQL_LT", Kind.LESS_THAN ) ); + register( OperatorName.MQL_LT, new LangFunctionOperator( "MQL_LT", Kind.LESS_THAN, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_GT, new LangFunctionOperator( "MQL_GT", Kind.GREATER_THAN ) ); + register( OperatorName.MQL_GT, new LangFunctionOperator( "MQL_GT", Kind.GREATER_THAN, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_LTE, new LangFunctionOperator( "MQL_LTE", Kind.LESS_THAN_OR_EQUAL ) ); + register( OperatorName.MQL_LTE, new LangFunctionOperator( "MQL_LTE", Kind.LESS_THAN_OR_EQUAL, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_GTE, new LangFunctionOperator( "MQL_GTE", Kind.GREATER_THAN_OR_EQUAL ) ); + register( OperatorName.MQL_GTE, new LangFunctionOperator( "MQL_GTE", Kind.GREATER_THAN_OR_EQUAL, PolyType.BOOLEAN ) ); - register( OperatorName.MQL_NOT_UNSET, new LangFunctionOperator( "MQL_NOT_UNSET", Kind.OTHER ) ); + register( OperatorName.MQL_NOT_UNSET, new LangFunctionOperator( "MQL_NOT_UNSET", Kind.OTHER, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_MERGE, new LangFunctionOperator( OperatorName.MQL_MERGE.name(), Kind.OTHER ) ); + register( OperatorName.MQL_MERGE, new LangFunctionOperator( OperatorName.MQL_MERGE.name(), Kind.OTHER, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_REPLACE_ROOT, new LangFunctionOperator( OperatorName.MQL_REPLACE_ROOT.name(), Kind.OTHER ) ); + register( OperatorName.MQL_REPLACE_ROOT, new LangFunctionOperator( OperatorName.MQL_REPLACE_ROOT.name(), Kind.OTHER, PolyType.DOCUMENT ) ); - register( OperatorName.MQL_PROJECT_INCLUDES, new LangFunctionOperator( OperatorName.MQL_PROJECT_INCLUDES.name(), Kind.OTHER ) ); + register( OperatorName.MQL_PROJECT_INCLUDES, new LangFunctionOperator( OperatorName.MQL_PROJECT_INCLUDES.name(), Kind.OTHER, PolyType.DOCUMENT ) ); register( OperatorName.DESERIALIZE, new DeserializeFunctionOperator( "DESERIALIZE_DOC" ) ); - register( OperatorName.EXTRACT_NAME, new LangFunctionOperator( "EXTRACT_NAME", Kind.EXTRACT ) ); + register( OperatorName.EXTRACT_NAME, new LangFunctionOperator( "EXTRACT_NAME", Kind.EXTRACT, PolyType.DOCUMENT ) ); - register( OperatorName.REMOVE_NAMES, new LangFunctionOperator( "REMOVE_NAMES", Kind.EXTRACT ) ); + register( OperatorName.REMOVE_NAMES, new LangFunctionOperator( "REMOVE_NAMES", Kind.EXTRACT, PolyType.DOCUMENT ) ); - register( OperatorName.PLUS, new LangFunctionOperator( OperatorName.PLUS.name(), Kind.PLUS ) ); + register( OperatorName.PLUS, new LangFunctionOperator( OperatorName.PLUS.name(), Kind.PLUS, PolyType.DOCUMENT ) ); - register( OperatorName.MINUS, new LangFunctionOperator( OperatorName.MINUS.name(), Kind.MINUS ) ); + register( OperatorName.MINUS, new LangFunctionOperator( OperatorName.MINUS.name(), Kind.MINUS, PolyType.DOCUMENT ) ); isInit = true; } diff --git a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java index 4998139d05..edc8372c9f 100644 --- a/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java +++ b/plugins/mql-language/src/main/java/org/polypheny/db/languages/mql2alg/MqlToAlgConverter.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -1664,7 +1664,7 @@ private RexNode convertExpr( BsonValue bsonValue, String parentKey, AlgDataType */ private RexNode convertJsonSchema( BsonValue bsonValue, AlgDataType rowType ) { if ( bsonValue.isDocument() ) { - return new RexCall( nullableAny, OperatorRegistry.get( QueryLanguage.from( MONGO ), OperatorName.MQL_JSON_MATCH ), Collections.singletonList( RexIndexRef.of( getIndexOfParentField( "d", rowType ), rowType ) ) ); + return new RexCall( nullableAny, OperatorRegistry.get( QueryLanguage.from( MONGO ), OperatorName.MQL_JSON_MATCH ), Collections.singletonList( RexIndexRef.of( getIndexOfParentField( DocumentType.DOCUMENT_FIELD, rowType ), rowType ) ) ); } else { throw new GenericRuntimeException( "After $jsonSchema there needs to follow a document" ); } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java index ec725ebbb8..e387aafca2 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/SqlLanguagePlugin.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -2519,12 +2519,12 @@ public void unparse( SqlWriter writer, SqlCall call, int leftPrec, int rightPrec */ register( OperatorName.CROSS_MODEL_ITEM, - new LangFunctionOperator( OperatorName.CROSS_MODEL_ITEM.name(), Kind.CROSS_MODEL_ITEM ) ); + new LangFunctionOperator( OperatorName.CROSS_MODEL_ITEM.name(), Kind.CROSS_MODEL_ITEM, PolyType.DOCUMENT ) ); /* * Operator which transforms a value to JSON. */ - register( OperatorName.TO_JSON, new LangFunctionOperator( OperatorName.TO_JSON.name(), Kind.OTHER ) ); + register( OperatorName.TO_JSON, new LangFunctionOperator( OperatorName.TO_JSON.name(), Kind.OTHER, PolyType.TEXT ) ); // GEO functions register( OperatorName.ST_GEOMFROMTEXT, new SqlStGeomFromText() ); diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAggFunction.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAggFunction.java index dba428b82c..60fc6e0e1b 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAggFunction.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlAggFunction.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -46,7 +46,7 @@ public abstract class SqlAggFunction extends SqlFunction implements Context, Agg /** * Creates a built-in or user-defined SqlAggFunction or window function. - * + *

    * A user-defined function will have a value for {@code sqlIdentifier}; for a built-in function it will be null. */ protected SqlAggFunction( diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFunction.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFunction.java index 7f19554519..908169ee73 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFunction.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/SqlFunction.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.Objects; import javax.annotation.Nonnull; +import lombok.Getter; import org.apache.calcite.linq4j.function.Functions; import org.polypheny.db.algebra.constant.FunctionCategory; import org.polypheny.db.algebra.constant.Kind; @@ -46,8 +47,20 @@ public class SqlFunction extends SqlOperator implements Function { private final FunctionCategory category; + /** + * -- GETTER -- + * + * @return fully qualified name of function, or null for a builtin function + */ + @Getter private final SqlIdentifier sqlIdentifier; + /** + * -- GETTER -- + * + * @return List of parameter types, or null for builtin function + */ + @Getter private final List paramTypes; @@ -124,14 +137,6 @@ public SqlSyntax getSqlSyntax() { } - /** - * @return fully qualified name of function, or null for a builtin function - */ - public SqlIdentifier getSqlIdentifier() { - return sqlIdentifier; - } - - @Override public SqlIdentifier getNameAsId() { if ( sqlIdentifier != null ) { @@ -141,14 +146,6 @@ public SqlIdentifier getNameAsId() { } - /** - * @return List of parameter types, or null for builtin function - */ - public List getParamTypes() { - return paramTypes; - } - - /** * Returns a list of parameter names. * diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 0a08993d50..f8599042ae 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; @@ -62,7 +63,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.stream.IntStream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.servlet.MultipartConfigElement; @@ -74,8 +74,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.websocket.api.Session; -import org.polypheny.db.PolyImplementation; -import org.polypheny.db.ResultIterator; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingDirectory; import org.polypheny.db.adapter.Adapter; @@ -88,14 +86,8 @@ import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; -import org.polypheny.db.algebra.AlgCollation; -import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.AlgRoot; -import org.polypheny.db.algebra.constant.Kind; -import org.polypheny.db.algebra.core.Sort; -import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.LogicalAdapter; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; @@ -120,11 +112,9 @@ import org.polypheny.db.catalog.logistic.ForeignKeyOption; import org.polypheny.db.catalog.logistic.NameGenerator; import org.polypheny.db.catalog.logistic.PartitionType; -import org.polypheny.db.catalog.logistic.PlacementType; import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; -import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.docker.AutoDocker; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; @@ -148,6 +138,7 @@ import org.polypheny.db.information.InformationPage; import org.polypheny.db.information.InformationText; import org.polypheny.db.languages.LanguageManager; +import org.polypheny.db.languages.NodeParseException; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.monitoring.events.StatementEvent; import org.polypheny.db.partition.PartitionFunctionInfo; @@ -204,7 +195,6 @@ import org.polypheny.db.webui.models.catalog.PolyTypeModel; import org.polypheny.db.webui.models.catalog.SnapshotModel; import org.polypheny.db.webui.models.catalog.UiColumnDefinition; -import org.polypheny.db.webui.models.requests.AlgRequest; import org.polypheny.db.webui.models.requests.BatchUpdateRequest; import org.polypheny.db.webui.models.requests.BatchUpdateRequest.Update; import org.polypheny.db.webui.models.requests.ColumnRequest; @@ -212,6 +202,7 @@ import org.polypheny.db.webui.models.requests.EditTableRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; +import org.polypheny.db.webui.models.requests.PolyAlgRequest; import org.polypheny.db.webui.models.requests.UIRequest; import org.polypheny.db.webui.models.results.RelationalResult; import org.polypheny.db.webui.models.results.RelationalResult.RelationalResultBuilder; @@ -2362,199 +2353,6 @@ void addForeignKey( final Context ctx ) { } - // helper for relAlg materialized View - private TimeUnit getFreshnessType( String freshnessId ) { - return switch ( freshnessId ) { - case "min", "minutes" -> TimeUnit.MINUTES; - case "hours" -> TimeUnit.HOURS; - case "sec", "seconds" -> TimeUnit.SECONDS; - case "days", "day" -> TimeUnit.DAYS; - case "millisec", "milliseconds" -> TimeUnit.MILLISECONDS; - default -> TimeUnit.MINUTES; - }; - } - - - /** - * Execute a logical plan coming from the Web-Ui plan builder - */ - RelationalResult executeAlg( final AlgRequest request, Session session ) { - Transaction transaction = getTransaction( request.analyze, request.useCache, this ); - transaction.getQueryAnalyzer().setSession( session ); - - Statement statement = transaction.createStatement(); - long executionTime = 0; - long temp = 0; - - InformationManager queryAnalyzer = transaction.getQueryAnalyzer().observe( this ); - - AlgNode result; - try { - temp = System.nanoTime(); - result = QueryPlanBuilder.buildFromTree( request.topNode, statement ); - } catch ( Exception e ) { - log.error( "Caught exception while building the plan builder tree", e ); - return RelationalResult.builder().error( e.getMessage() ).build(); - } - - // Wrap {@link AlgNode} into a AlgRoot - final AlgDataType rowType = result.getTupleType(); - final List> fields = Pair.zip( IntStream.range( 0, rowType.getFieldCount() ).boxed().toList(), rowType.getFieldNames() ); - final AlgCollation collation = - result instanceof Sort - ? ((Sort) result).collation - : AlgCollations.EMPTY; - AlgRoot root = new AlgRoot( result, result.getTupleType(), Kind.SELECT, fields, collation ); - - // Prepare - PolyImplementation polyImplementation = statement.getQueryProcessor().prepareQuery( root, true ); - - if ( request.createView ) { - - String viewName = request.viewName; - boolean replace = false; - String viewType; - - if ( request.freshness != null ) { - viewType = "Materialized View"; - DataStore store = AdapterManager.getInstance().getStore( request.store ).orElseThrow(); - List> stores = new ArrayList<>(); - stores.add( store ); - - PlacementType placementType = PlacementType.MANUAL; - - List columns = new ArrayList<>(); - root.alg.getTupleType().getFields().forEach( f -> columns.add( f.getName() ) ); - - // Default Namespace - long namespaceId = transaction.getDefaultNamespace().id; - - MaterializedCriteria materializedCriteria; - if ( request.freshness.toUpperCase().equalsIgnoreCase( CriteriaType.INTERVAL.toString() ) ) { - materializedCriteria = new MaterializedCriteria( CriteriaType.INTERVAL, Integer.parseInt( request.interval ), getFreshnessType( request.timeUnit ) ); - } else if ( request.freshness.toUpperCase().equalsIgnoreCase( CriteriaType.UPDATE.toString() ) ) { - materializedCriteria = new MaterializedCriteria( CriteriaType.UPDATE, Integer.parseInt( request.interval ) ); - } else if ( request.freshness.toUpperCase().equalsIgnoreCase( CriteriaType.MANUAL.toString() ) ) { - materializedCriteria = new MaterializedCriteria( CriteriaType.MANUAL ); - } else { - materializedCriteria = new MaterializedCriteria(); - } - - Gson gson = new Gson(); - - DdlManager.getInstance().createMaterializedView( - viewName, - namespaceId, - root, - replace, - statement, - stores, - placementType, - columns, - materializedCriteria, - gson.toJson( request.topNode ), - QueryLanguage.from( "rel" ), - false, - false - ); - } else { - viewType = "View"; - List> store = null; - PlacementType placementType = PlacementType.AUTOMATIC; - - List columns = new ArrayList<>(); - root.alg.getTupleType().getFields().forEach( f -> columns.add( f.getName() ) ); - - // Default Namespace - long namespaceId = transaction.getDefaultNamespace().id; - - Gson gson = new Gson(); - - DdlManager.getInstance().createView( - viewName, - namespaceId, - root.alg, - root.collation, - replace, - statement, - placementType, - columns, - gson.toJson( request.topNode ), - QueryLanguage.from( "rel" ) - ); - } - try { - transaction.commit(); - } catch ( TransactionException e ) { - String error = "Caught exception while creating View from Planbuilder. " + e; - - transaction.rollback( error ); - throw e; - } - - return RelationalResult.builder().query( "Created " + viewType + " \"" + viewName + "\" from logical query plan" ).build(); - } - - List> rows; - try { - ResultIterator iterator = polyImplementation.execute( statement, getPageSize() ); - rows = iterator.getNextBatch(); - iterator.close(); - } catch ( Exception e ) { - log.error( "Caught exception while iterating the plan builder tree", e ); - return RelationalResult.builder().error( e.getMessage() ).build(); - } - - UiColumnDefinition[] header = new UiColumnDefinition[polyImplementation.getTupleType().getFieldCount()]; - int counter = 0; - for ( AlgDataTypeField col : polyImplementation.getTupleType().getFields() ) { - header[counter++] = UiColumnDefinition.builder() - .name( col.getName() ) - .dataType( col.getType().getFullTypeString() ) - .nullable( col.getType().isNullable() ) - .precision( col.getType().getPrecision() ).build(); - } - - List data = LanguageCrud.computeResultData( rows, List.of( header ), statement.getTransaction() ); - - try { - executionTime += System.nanoTime() - temp; - transaction.commit(); - } catch ( TransactionException e ) { - String error = "Caught exception while iterating the plan builder tree. " + e; - - transaction.rollback( error ); - throw e; - } - RelationalResult finalResult = RelationalResult.builder() - .header( header ) - .data( data.toArray( new String[0][] ) ) - .xid( transaction.getXid().toString() ) - .query( "Execute logical query plan" ) - .build(); - - if ( queryAnalyzer != null ) { - InformationPage p1 = new InformationPage( "Query analysis", "Analysis of the query." ); - InformationGroup g1 = new InformationGroup( p1, "Execution time" ); - InformationText text; - if ( executionTime < 1e4 ) { - text = new InformationText( g1, String.format( "Execution time: %d nanoseconds", executionTime ) ); - } else { - long millis = TimeUnit.MILLISECONDS.convert( executionTime, TimeUnit.NANOSECONDS ); - // format time: see: https://stackoverflow.com/questions/625433/how-to-convert-milliseconds-to-x-mins-x-seconds-in-java#answer-625444 - DateFormat df = new SimpleDateFormat( "m 'min' s 'sec' S 'ms'" ); - String durationText = df.format( new Date( millis ) ); - text = new InformationText( g1, String.format( "Execution time: %s", durationText ) ); - } - queryAnalyzer.addPage( p1 ); - queryAnalyzer.addGroup( g1 ); - queryAnalyzer.registerInformation( text ); - } - - return finalResult; - } - - /** * Create or drop a namespace */ @@ -2927,6 +2725,29 @@ public static Transaction getTransaction( boolean analyze, boolean useCache, Cru } + public void getPolyAlgRegistry( Context ctx ) { + ctx.json( PolyAlgRegistry.serialize() ); + } + + + /** + * @return a serialized version of the plan built from the given polyAlgRequest + * @throws NodeParseException if the parser is not able to construct the intermediary PolyAlgNode tree + * @throws RuntimeException if polyAlg cannot be parsed into a valid AlgNode tree + */ + public void buildPlanFromPolyAlg( final Context ctx ) { + PolyAlgRequest request = ctx.bodyAsClass( PolyAlgRequest.class ); + try { + AlgNode node = PolyPlanBuilder.buildFromPolyAlg( request.polyAlg, request.planType ).alg; + ctx.json( node.serializePolyAlgebra( new ObjectMapper() ) ); + } catch ( Exception e ) { + //e.printStackTrace(); + ctx.json( Map.of( "errorMsg", e.getMessage() ) ); + ctx.status( 400 ); + } + } + + void createDockerInstance( final Context ctx ) { try { CreateDockerRequest req = ctx.bodyAsClass( CreateDockerRequest.class ); @@ -2941,9 +2762,12 @@ void createDockerInstance( final Context ctx ) { ); ctx.json( new CreateDockerResponse( res.orElse( null ), DockerManager.getInstance().getDockerInstancesMap() ) ); - } catch ( DockerUserException e ) { + } catch ( + DockerUserException e ) { ctx.status( e.getStatus() ).result( e.getMessage() ); } + + } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index c164835ae8..aa6373a9d6 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -353,6 +353,10 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); + + webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); + webuiServer.get( "/isReady", ctx -> ctx.result( String.valueOf( isReady ) ) ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/PolyPlanBuilder.java b/webui/src/main/java/org/polypheny/db/webui/PolyPlanBuilder.java new file mode 100644 index 0000000000..3f5a5cdf1e --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/PolyPlanBuilder.java @@ -0,0 +1,152 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui; + +import java.util.ArrayList; +import java.util.List; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.polyalg.parser.PolyAlgParser; +import org.polypheny.db.algebra.polyalg.parser.PolyAlgToAlgConverter; +import org.polypheny.db.algebra.polyalg.parser.nodes.PolyAlgNode; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeFactory; +import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.catalog.snapshot.Snapshot; +import org.polypheny.db.information.InformationPolyAlg.PlanType; +import org.polypheny.db.languages.NodeParseException; +import org.polypheny.db.plan.AlgCluster; +import org.polypheny.db.plan.volcano.VolcanoPlanner; +import org.polypheny.db.rex.RexBuilder; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyBoolean; +import org.polypheny.db.type.entity.PolyString; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.type.entity.numerical.PolyBigDecimal; +import org.polypheny.db.type.entity.numerical.PolyDouble; +import org.polypheny.db.type.entity.numerical.PolyFloat; +import org.polypheny.db.type.entity.numerical.PolyInteger; +import org.polypheny.db.type.entity.numerical.PolyLong; +import org.polypheny.db.type.entity.temporal.PolyDate; +import org.polypheny.db.type.entity.temporal.PolyTime; +import org.polypheny.db.type.entity.temporal.PolyTimestamp; +import org.polypheny.db.util.Pair; + +public class PolyPlanBuilder { + + private PolyPlanBuilder() { + // This is a utility class + } + + + /** + * Creates a AlgNode tree from the given PolyAlg representation + * + * @param polyAlg string representing the AlgNode tree serialized as PolyAlg + * @param statement transaction statement + * @return AlgRoot with {@code AlgRoot.alg} being the top node of tree + * @throws NodeParseException if the parser is not able to construct the intermediary PolyAlgNode tree + * @throws RuntimeException if polyAlg cannot be parsed into a valid AlgNode tree + */ + public static AlgRoot buildFromPolyAlg( String polyAlg, PlanType planType, Statement statement ) throws NodeParseException { + Snapshot snapshot = statement.getTransaction().getSnapshot(); + RexBuilder rexBuilder = new RexBuilder( statement.getTransaction().getTypeFactory() ); + AlgCluster cluster = AlgCluster.create( statement.getQueryProcessor().getPlanner(), rexBuilder, null, snapshot ); + return buildFromPolyAlg( polyAlg, planType, snapshot, cluster ); + } + + + /** + * Creates a AlgNode tree from the given PolyAlg representation not related to a statement. + * + * @param polyAlg string representing the AlgNode tree serialized as PolyAlg + * @return AlgRoot with {@code AlgRoot.alg} being the top node of tree + * @throws NodeParseException if the parser is not able to construct the intermediary PolyAlgNode tree + * @throws RuntimeException if polyAlg cannot be parsed into a valid AlgNode tree + */ + public static AlgRoot buildFromPolyAlg( String polyAlg, PlanType planType ) throws NodeParseException { + Snapshot snapshot = Catalog.snapshot(); + AlgCluster cluster = AlgCluster.create( + new VolcanoPlanner(), new RexBuilder( AlgDataTypeFactory.DEFAULT ), null, snapshot ); + return buildFromPolyAlg( polyAlg, planType, snapshot, cluster ); + } + + + private static AlgRoot buildFromPolyAlg( String polyAlg, PlanType planType, Snapshot snapshot, AlgCluster cluster ) throws NodeParseException { + PolyAlgToAlgConverter converter = new PolyAlgToAlgConverter( planType, snapshot, cluster ); + + PolyAlgParser parser = PolyAlgParser.create( polyAlg ); + PolyAlgNode node = (PolyAlgNode) parser.parseQuery(); + return converter.convert( node ); + + } + + + public static Pair, List> translateDynamicParams( List vals, List types ) { + if ( vals.size() != types.size() ) { + throw new GenericRuntimeException( "Number of values does not match number of types!" ); + } + + AlgDataTypeFactory factory = AlgDataTypeFactory.DEFAULT; + List translatedVals = new ArrayList<>(); + List translatedTypes = new ArrayList<>(); + for ( int i = 0; i < vals.size(); i++ ) { + String s = vals.get( i ); + AlgDataType t = convertType( types.get( i ), factory ); + + PolyValue value = switch ( t.getPolyType() ) { + case BOOLEAN -> PolyBoolean.of( Boolean.parseBoolean( s ) ); + case TINYINT, SMALLINT, INTEGER -> PolyInteger.of( Integer.parseInt( s ) ); + case BIGINT -> PolyLong.of( Long.parseLong( s ) ); + case DECIMAL -> PolyBigDecimal.of( s ); + case FLOAT, REAL -> PolyFloat.of( Float.parseFloat( s ) ); + case DOUBLE -> PolyDouble.of( Double.parseDouble( s ) ); + case DATE -> PolyDate.of( Long.parseLong( s ) ); + case TIME -> PolyTime.of( Long.parseLong( s ) ); + case TIMESTAMP -> PolyTimestamp.of( Long.parseLong( s ) ); + case CHAR, VARCHAR -> PolyString.of( s ); + default -> throw new NotImplementedException(); + }; + translatedVals.add( value ); + translatedTypes.add( t ); + + + } + return Pair.of( translatedVals, translatedTypes ); + } + + + private static AlgDataType convertType( String t, AlgDataTypeFactory factory ) { + //e.g. t = "CHAR(5)" + String[] parts = t.split( "\\(" ); + PolyType type = PolyType.valueOf( parts[0] ); + if ( parts.length == 1 ) { + return factory.createPolyType( type ); + } + String[] args = parts[1].substring( 0, parts[1].length() - 1 ).split( "," ); + + return switch ( args.length ) { + case 1 -> factory.createPolyType( type, Integer.parseInt( args[0].trim() ) ); + case 2 -> factory.createPolyType( type, Integer.parseInt( args[0].trim() ), Integer.parseInt( args[1].trim() ) ); + default -> throw new GenericRuntimeException( "Unexpected number of type arguments: " + args.length ); + }; + + } + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java b/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java deleted file mode 100644 index fc1e37c0ff..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/QueryPlanBuilder.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright 2019-2024 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui; - - -import com.fasterxml.jackson.core.JsonProcessingException; -import java.util.ArrayList; -import java.util.List; -import org.apache.commons.lang3.math.NumberUtils; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.operators.OperatorName; -import org.polypheny.db.languages.OperatorRegistry; -import org.polypheny.db.nodes.Operator; -import org.polypheny.db.rex.RexNode; -import org.polypheny.db.tools.AlgBuilder; -import org.polypheny.db.transaction.Statement; -import org.polypheny.db.webui.models.SortDirection; -import org.polypheny.db.webui.models.SortState; -import org.polypheny.db.webui.models.UIAlgNode; - - -public class QueryPlanBuilder { - - private QueryPlanBuilder() { - // This is a utility class - } - - - private static AlgBuilder createAlgBuilder( final Statement statement ) { - return AlgBuilder.create( statement ); - } - - - /** - * Build a tree using the AlgBuilder - * - * @param topNode top node from the tree from the user interface, with its children - * @param statement transaction - */ - public static AlgNode buildFromTree( final UIAlgNode topNode, final Statement statement ) { - AlgBuilder b = createAlgBuilder( statement ); - buildStep( b, topNode ); - return b.build(); - } - - - public static AlgNode buildFromJsonAlg( Statement statement, String json ) throws JsonProcessingException { - AlgBuilder b = createAlgBuilder( statement ); - return buildFromTree( HttpServer.mapper.readValue( json, UIAlgNode.class ), statement ); - } - - - /** - * Set up the{@link AlgBuilder} recursively - */ - private static AlgBuilder buildStep( AlgBuilder builder, final UIAlgNode node ) { - if ( node.children != null ) { - for ( UIAlgNode n : node.children ) { - builder = buildStep( builder, n ); - } - } - - String[] field1 = null; - String[] field2 = null; - if ( node.col1 != null ) { - field1 = node.col1.split( "\\." ); - } - if ( node.col2 != null ) { - field2 = node.col2.split( "\\." ); - } - switch ( node.type ) { - case "Scan": - return builder.relScan( node.entityName.split( "\\." ) ).as( node.entityName.split( "\\." )[1] ); - case "Join": - return builder.join( node.join, builder.call( getOperator( node.operator ), builder.field( node.inputCount, field1[0], field1[1] ), builder.field( node.inputCount, field2[0], field2[1] ) ) ); - case "Filter": - String[] field = node.field.split( "\\." ); - if ( NumberUtils.isCreatable( node.filter ) ) { - Number filter; - double dbl = Double.parseDouble( node.filter ); - filter = dbl; - if ( dbl % 1 == 0 ) { - filter = Integer.parseInt( node.filter ); - } - return builder.filter( builder.call( getOperator( node.operator ), builder.field( node.inputCount, field[0], field[1] ), builder.literal( filter ) ) ); - } else { - return builder.filter( builder.call( getOperator( node.operator ), builder.field( node.inputCount, field[0], field[1] ), builder.literal( node.filter ) ) ); - } - case "Project": - List fields = getFields( node.fields, node.inputCount, builder ); - builder.project( fields ); - return builder; - case "Aggregate": - AlgBuilder.AggCall aggregation; - String[] aggFields = node.field.split( "\\." ); - aggregation = switch ( node.aggregation ) { - case "SUM" -> builder.sum( false, node.alias, builder.field( node.inputCount, aggFields[0], aggFields[1] ) ); - case "COUNT" -> builder.count( false, node.alias, builder.field( node.inputCount, aggFields[0], aggFields[1] ) ); - case "AVG" -> builder.avg( false, node.alias, builder.field( node.inputCount, aggFields[0], aggFields[1] ) ); - case "MAX" -> builder.max( node.alias, builder.field( node.inputCount, aggFields[0], aggFields[1] ) ); - case "MIN" -> builder.min( node.alias, builder.field( node.inputCount, aggFields[0], aggFields[1] ) ); - default -> throw new IllegalArgumentException( "unknown aggregate type" ); - }; - if ( node.groupBy == null || node.groupBy.equals( "" ) ) { - return builder.aggregate( builder.groupKey(), aggregation ); - } else { - return builder.aggregate( builder.groupKey( node.groupBy ), aggregation ); - } - case "Sort": - ArrayList columns = new ArrayList<>(); - for ( SortState s : node.sortColumns ) { - String[] sortField = s.column.split( "\\." ); - if ( s.direction == SortDirection.DESC ) { - columns.add( builder.desc( builder.field( node.inputCount, sortField[0], sortField[1] ) ) ); - } else { - columns.add( builder.field( node.inputCount, sortField[0], sortField[1] ) ); - } - } - return builder.sort( columns ); - case "Union": - return builder.union( node.all, node.inputCount ); - case "Minus": - return builder.minus( node.all ); - case "Intersect": - return builder.intersect( node.all, node.inputCount ); - default: - throw new IllegalArgumentException( "PlanBuilder node of type '" + node.type + "' is not supported yet." ); - } - } - - - private static List getFields( String[] fields, int inputCount, AlgBuilder builder ) { - List nodes = new ArrayList<>(); - for ( String f : fields ) { - if ( f.isEmpty() ) { - continue; - } - String[] field = f.split( "\\." ); - nodes.add( builder.field( inputCount, field[0], field[1] ) ); - } - return nodes; - } - - - /** - * Parse an operator and return it as SqlOperator - * - * @param operator operator for a filter condition - * @return parsed operator as SqlOperator - */ - private static Operator getOperator( final String operator ) { - return switch ( operator ) { - case "=" -> OperatorRegistry.get( OperatorName.EQUALS ); - case "!=", "<>" -> OperatorRegistry.get( OperatorName.NOT_EQUALS ); - case "<" -> OperatorRegistry.get( OperatorName.LESS_THAN ); - case "<=" -> OperatorRegistry.get( OperatorName.LESS_THAN_OR_EQUAL ); - case ">" -> OperatorRegistry.get( OperatorName.GREATER_THAN ); - case ">=" -> OperatorRegistry.get( OperatorName.GREATER_THAN_OR_EQUAL ); - default -> throw new IllegalArgumentException( "Operator '" + operator + "' is not supported." ); - }; - } - -} diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index ceab839c40..5445b9021a 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -32,18 +32,26 @@ import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.websocket.api.Session; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage; +import org.polypheny.db.algebra.AlgRoot; +import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.catalog.Catalog; import org.polypheny.db.catalog.entity.logical.LogicalNamespace; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.information.InformationPolyAlg.PlanType; import org.polypheny.db.languages.QueryLanguage; import org.polypheny.db.processing.QueryContext; +import org.polypheny.db.processing.QueryContext.PhysicalQueryContext; +import org.polypheny.db.processing.QueryContext.TranslatedQueryContext; import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.transaction.Statement; +import org.polypheny.db.transaction.Transaction; +import org.polypheny.db.type.entity.PolyValue; import org.polypheny.db.type.entity.graph.PolyGraph; import org.polypheny.db.util.Pair; import org.polypheny.db.webui.crud.LanguageCrud; -import org.polypheny.db.webui.models.requests.AlgRequest; import org.polypheny.db.webui.models.requests.GraphRequest; +import org.polypheny.db.webui.models.requests.PolyAlgRequest; import org.polypheny.db.webui.models.requests.QueryRequest; import org.polypheny.db.webui.models.requests.RegisterRequest; import org.polypheny.db.webui.models.requests.RequestModel; @@ -143,59 +151,97 @@ public void onMessage( final WsMessageContext ctx ) { ctx.send( results ); break; + case "PolyAlgRequest": + PolyAlgRequest polyAlgRequest = ctx.messageAsClass( PolyAlgRequest.class ); + Transaction transaction = crud.getTransactionManager().startTransaction( Catalog.defaultUserId, Catalog.defaultNamespaceId, true, POLYPHENY_UI ); + AlgRoot root; + Statement statement; + try { + statement = transaction.createStatement(); + root = PolyPlanBuilder.buildFromPolyAlg( polyAlgRequest.polyAlg, polyAlgRequest.planType, statement ); + } catch ( Exception e ) { + log.error( "Caught exception while building the plan builder tree", e ); + ctx.send( RelationalResult.builder().error( e.getMessage() ).build() ); + break; + } + + QueryLanguage ql = switch ( polyAlgRequest.model ) { + case RELATIONAL -> QueryLanguage.from( "sql" ); + case DOCUMENT -> QueryLanguage.from( "mongo" ); + case GRAPH -> QueryLanguage.from( "cypher" ); + }; + QueryContext qc = QueryContext.builder() + .query( polyAlgRequest.polyAlg ) + .language( ql ) + .isAnalysed( true ) + .usesCache( true ) + .origin( POLYPHENY_UI ) + .batch( polyAlgRequest.noLimit ? -1 : crud.getPageSize() ) + .transactionManager( crud.getTransactionManager() ) + .transactions( List.of( transaction ) ) + .statement( statement ) + .informationTarget( i -> i.setSession( ctx.session ) ).build(); + + TranslatedQueryContext translated; + if ( polyAlgRequest.planType == PlanType.PHYSICAL ) { + Pair, List> dynamicParams; + try { + dynamicParams = PolyPlanBuilder.translateDynamicParams( polyAlgRequest.dynamicValues, polyAlgRequest.dynamicTypes ); + } catch ( Exception e ) { + log.error( "Caught exception while translating dynamic parameters:", e ); + ctx.send( RelationalResult.builder().error( e.getMessage() ).build() ); + break; + } + translated = PhysicalQueryContext.fromQuery( polyAlgRequest.polyAlg, root, dynamicParams.left, dynamicParams.right, qc ); + } else { + translated = TranslatedQueryContext.fromQuery( polyAlgRequest.polyAlg, root, polyAlgRequest.planType == PlanType.ALLOCATION, qc ); + } + List> polyAlgResults = LanguageCrud.anyQueryResult( translated, polyAlgRequest ); + ctx.send( polyAlgResults.get( 0 ) ); + break; + case "RegisterRequest": RegisterRequest registerRequest = ctx.messageAsClass( RegisterRequest.class ); crud.authCrud.register( registerRequest, ctx ); break; - case "RelAlgRequest": case "EntityRequest": Result result; - if ( request.type.equals( "RelAlgRequest" ) ) { - AlgRequest algRequest = ctx.messageAsClass( AlgRequest.class ); - try { - result = crud.executeAlg( algRequest, ctx.session ); - } catch ( Throwable t ) { - ctx.send( RelationalResult.builder().error( t.getMessage() ).build() ); - return; - } - } else {//TableRequest, is equal to UIRequest - UIRequest uiRequest = ctx.messageAsClass( UIRequest.class ); - try { - LogicalNamespace namespace = Catalog.snapshot().getNamespace( uiRequest.namespace ).orElse( null ); - result = switch ( namespace == null ? DataModel.RELATIONAL : namespace.dataModel ) { - case RELATIONAL -> crud.getTable( uiRequest ); - case DOCUMENT -> { - String entity = Catalog.snapshot().doc().getCollection( uiRequest.entityId ).map( c -> c.name ).orElse( "" ); - yield LanguageCrud.anyQueryResult( - QueryContext.builder() - .query( String.format( "db.%s.find({})", entity ) ) - .language( QueryLanguage.from( "mongo" ) ) - .origin( POLYPHENY_UI ) - .batch( uiRequest.noLimit ? -1 : crud.getPageSize() ) - .transactionManager( crud.getTransactionManager() ) - .informationTarget( i -> i.setSession( ctx.session ) ) - .namespaceId( namespace.id ) - .build(), uiRequest ).get( 0 ); - } - case GRAPH -> LanguageCrud.anyQueryResult( + UIRequest uiRequest = ctx.messageAsClass( UIRequest.class ); + try { + LogicalNamespace namespace = Catalog.getInstance().getSnapshot().getNamespace( uiRequest.namespace ).orElse( null ); + result = switch ( namespace == null ? DataModel.RELATIONAL : namespace.dataModel ) { + case RELATIONAL -> crud.getTable( uiRequest ); + case DOCUMENT -> { + String entity = Catalog.snapshot().doc().getCollection( uiRequest.entityId ).map( c -> c.name ).orElse( "" ); + yield LanguageCrud.anyQueryResult( QueryContext.builder() - .query( "MATCH (n) RETURN n" ) - .language( QueryLanguage.from( "cypher" ) ) + .query( String.format( "db.%s.find({})", entity ) ) + .language( QueryLanguage.from( "mongo" ) ) .origin( POLYPHENY_UI ) .batch( uiRequest.noLimit ? -1 : crud.getPageSize() ) - .namespaceId( namespace.id ) .transactionManager( crud.getTransactionManager() ) .informationTarget( i -> i.setSession( ctx.session ) ) + .namespaceId( namespace.id ) .build(), uiRequest ).get( 0 ); - }; - if ( result == null ) { - throw new GenericRuntimeException( "Could not load data." ); } - - } catch ( Throwable t ) { - ctx.send( RelationalResult.builder().error( t.getMessage() ).build() ); - return; + case GRAPH -> LanguageCrud.anyQueryResult( + QueryContext.builder() + .query( "MATCH (n) RETURN n" ) + .language( QueryLanguage.from( "cypher" ) ) + .origin( POLYPHENY_UI ) + .batch( uiRequest.noLimit ? -1 : crud.getPageSize() ) + .namespaceId( namespace.id ) + .transactionManager( crud.getTransactionManager() ) + .informationTarget( i -> i.setSession( ctx.session ) ) + .build(), uiRequest ).get( 0 ); + }; + if ( result == null ) { + throw new GenericRuntimeException( "Could not load data." ); } + + } catch ( Throwable t ) { + ctx.send( RelationalResult.builder().error( t.getMessage() ).build() ); + return; } if ( result.xid != null ) { xIds.add( result.xid ); diff --git a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java index 8c73af71bd..0486c2cfa1 100644 --- a/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java +++ b/webui/src/main/java/org/polypheny/db/webui/crud/LanguageCrud.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -148,7 +148,10 @@ public static long getNamespaceIdOrDefault( String namespace ) { transaction.setUseCache( context.isUsesCache() ); attachAnalyzerIfSpecified( context, crud, transaction ); - List executedContexts = LanguageManager.getINSTANCE().anyQuery( context.addTransaction( transaction ) ); + if ( context.getTransactions().isEmpty() ) { + context.addTransaction( transaction ); + } + List executedContexts = LanguageManager.getINSTANCE().anyQuery( context ); List> results = new ArrayList<>(); TriFunction> builder = REGISTER.get( context.getLanguage() ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/UIAlgNode.java b/webui/src/main/java/org/polypheny/db/webui/models/UIAlgNode.java deleted file mode 100644 index 1e831b51cd..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/models/UIAlgNode.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2019-2024 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.models; - - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.polypheny.db.algebra.AlgNode; -import org.polypheny.db.algebra.core.JoinAlgType; - - -/** - * Model for a {@link AlgNode} coming from the Alg-Builder in the UI - */ -public class UIAlgNode { - - @JsonProperty("class") - public String clazz; - - /** - * ExpressionType of the AlgNode, e.g. Scan - */ - @JsonProperty - public String type; - - /** - * ExpressionType of Table, e.g. Table, View - */ - @JsonProperty - public String entityType; - - //tableScan - @JsonProperty - public String entityName; - - /** - * Children of this node in the tree - */ - @JsonProperty - public UIAlgNode[] children; - - /** - * Number of inputs of a node. - * Required by the AlgBuilder - */ - @JsonProperty - public int inputCount; - - - //join - @JsonProperty - public JoinAlgType join; - //join condition - @JsonProperty - public String operator; - @JsonProperty - public String col1; - @JsonProperty - public String col2; - - //filter - //(String operator) - @JsonProperty - public String field; - @JsonProperty - public String filter; - - //project - @JsonProperty - public String[] fields; - - //aggregate - @JsonProperty - public String groupBy; - @JsonProperty - public String aggregation; - @JsonProperty - public String alias; - //(String field) - - //sort - @JsonProperty - public SortState[] sortColumns; - - //union, minus - public boolean all; - -} diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/AlgRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PolyAlgRequest.java similarity index 59% rename from webui/src/main/java/org/polypheny/db/webui/models/requests/AlgRequest.java rename to webui/src/main/java/org/polypheny/db/webui/models/requests/PolyAlgRequest.java index 6606ab2329..56bfbd3c19 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/AlgRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PolyAlgRequest.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,37 +16,27 @@ package org.polypheny.db.webui.models.requests; - import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; import lombok.experimental.SuperBuilder; import lombok.extern.jackson.Jacksonized; -import org.polypheny.db.webui.models.UIAlgNode; +import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.information.InformationPolyAlg.PlanType; -@Jacksonized @SuperBuilder -public class AlgRequest extends UIRequest { +@Jacksonized +public class PolyAlgRequest extends UIRequest { @JsonProperty - public UIAlgNode topNode; - @JsonProperty - public boolean useCache; - /** - * TRUE if information about the query execution should be added to the Query Analyzer (InformationManager) - */ - @JsonProperty - public boolean analyze; - @JsonProperty - public boolean createView; - @JsonProperty - public String viewName; + public String polyAlg; @JsonProperty - public String store; + public DataModel model; @JsonProperty - public String freshness; + public PlanType planType; @JsonProperty - public String interval; + public List dynamicValues; // for physical plans @JsonProperty - public String timeUnit; + public List dynamicTypes; // AlgDataType string (e.g. CHAR(4)) } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/RequestModel.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/RequestModel.java index 355366f9fa..e950e27691 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/RequestModel.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/RequestModel.java @@ -1,5 +1,5 @@ /* - * Copyright 2019-2024 The Polypheny Project + * Copyright 2019-2025 The Polypheny Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -24,7 +24,7 @@ public class RequestModel { /** - * ExpressionType of a request, e.g. QueryRequest or RelAlgRequest + * ExpressionType of a request, e.g. QueryRequest */ public String type;