Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial phase of support for constraint optimization to the PrestoDB optimizer in response to issue #16413 #16416

Merged
merged 3 commits into from
Jun 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ public PlanNode visitFilter(FilterNode node, Void context)
tableHandle,
oldTableScanNode.getOutputVariables(),
oldTableScanNode.getAssignments(),
oldTableScanNode.getTableConstraints(),
oldTableScanNode.getCurrentConstraint(),
oldTableScanNode.getEnforcedConstraint());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ private Optional<PlanNode> tryCreatingNewScanNode(PlanNode plan)
newTableHandle,
ImmutableList.copyOf(assignments.keySet()),
assignments.entrySet().stream().collect(toImmutableMap(Map.Entry::getKey, (e) -> (ColumnHandle) (e.getValue()))),
tableScanNode.getTableConstraints(),
tableScanNode.getCurrentConstraint(),
tableScanNode.getEnforcedConstraint()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,4 +108,14 @@ public void testMismatchSchemaTable()

super.testMismatchSchemaTable();
}

@Override
public void testTableConstraints()
{
if (getHiveVersionMajor() < 3) {
throw new SkipException("Table constraints support is in Hive 3 and above. Disabling it for lower versions");
simmend marked this conversation as resolved.
Show resolved Hide resolved
}

super.testTableConstraints();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import com.facebook.presto.hive.HiveType;
import com.facebook.presto.hive.MetastoreClientConfig;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.constraints.TableConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
Expand Down Expand Up @@ -85,11 +86,13 @@ public enum MetastoreCacheScope
}

protected final ExtendedHiveMetastore delegate;

private final LoadingCache<KeyAndContext<String>, Optional<Database>> databaseCache;
private final LoadingCache<KeyAndContext<String>, List<String>> databaseNamesCache;
private final LoadingCache<KeyAndContext<HiveTableName>, Optional<Table>> tableCache;
private final LoadingCache<KeyAndContext<String>, Optional<List<String>>> tableNamesCache;
private final LoadingCache<KeyAndContext<HiveTableName>, PartitionStatistics> tableStatisticsCache;
private final LoadingCache<KeyAndContext<HiveTableName>, List<TableConstraint<String>>> tableConstraintsCache;
private final LoadingCache<KeyAndContext<HivePartitionName>, PartitionStatistics> partitionStatisticsCache;
private final LoadingCache<KeyAndContext<String>, Optional<List<String>>> viewNamesCache;
private final LoadingCache<KeyAndContext<HivePartitionName>, Optional<Partition>> partitionCache;
Expand Down Expand Up @@ -259,6 +262,9 @@ public Map<KeyAndContext<HivePartitionName>, PartitionStatistics> loadAll(Iterab
tableCache = newCacheBuilder(cacheExpiresAfterWriteMillis, cacheRefreshMills, cacheMaxSize)
.build(asyncReloading(CacheLoader.from(this::loadTable), executor));

tableConstraintsCache = newCacheBuilder(cacheExpiresAfterWriteMillis, cacheRefreshMills, cacheMaxSize)
.build(asyncReloading(CacheLoader.from(this::loadTableConstraints), executor));

viewNamesCache = newCacheBuilder(cacheExpiresAfterWriteMillis, cacheRefreshMills, cacheMaxSize)
.build(asyncReloading(CacheLoader.from(this::loadAllViews), executor));

Expand Down Expand Up @@ -304,6 +310,7 @@ public void flushCache()
partitionNamesCache.invalidateAll();
databaseCache.invalidateAll();
tableCache.invalidateAll();
tableConstraintsCache.invalidateAll();
partitionCache.invalidateAll();
partitionFilterCache.invalidateAll();
tablePrivilegesCache.invalidateAll();
Expand Down Expand Up @@ -363,6 +370,12 @@ public Optional<Table> getTable(MetastoreContext metastoreContext, String databa
return get(tableCache, getCachingKey(metastoreContext, hiveTableName(databaseName, tableName)));
}

@Override
public List<TableConstraint<String>> getTableConstraints(MetastoreContext metastoreContext, String databaseName, String tableName)
{
return get(tableConstraintsCache, getCachingKey(metastoreContext, hiveTableName(databaseName, tableName)));
}

@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(MetastoreContext metastoreContext, Type type)
{
Expand All @@ -374,6 +387,11 @@ private Optional<Table> loadTable(KeyAndContext<HiveTableName> hiveTableName)
return delegate.getTable(hiveTableName.getContext(), hiveTableName.getKey().getDatabaseName(), hiveTableName.getKey().getTableName());
}

private List<TableConstraint<String>> loadTableConstraints(KeyAndContext<HiveTableName> hiveTableName)
{
return delegate.getTableConstraints(hiveTableName.getContext(), hiveTableName.getKey().getDatabaseName(), hiveTableName.getKey().getTableName());
}

@Override
public PartitionStatistics getTableStatistics(MetastoreContext metastoreContext, String databaseName, String tableName)
{
Expand Down Expand Up @@ -608,6 +626,10 @@ protected void invalidateTable(String databaseName, String tableName)
.filter(hiveTableNameKey -> hiveTableNameKey.getKey().equals(hiveTableName))
.forEach(tableCache::invalidate);

tableConstraintsCache.asMap().keySet().stream()
.filter(hiveTableNameKey -> hiveTableNameKey.getKey().equals(hiveTableName))
.forEach(tableConstraintsCache::invalidate);

tableNamesCache.asMap().keySet().stream()
.filter(tableNameKey -> tableNameKey.getKey().equals(databaseName))
.forEach(tableNamesCache::invalidate);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,11 @@
import com.facebook.presto.common.predicate.Domain;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.hive.HiveType;
import com.facebook.presto.spi.constraints.TableConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
import com.google.common.collect.ImmutableList;
import io.airlift.units.Duration;

import java.util.List;
Expand Down Expand Up @@ -128,4 +130,9 @@ default void unlock(MetastoreContext metastoreContext, long lockId)
{
throw new NotSupportedException("Unlock is not supported by default");
}

default List<TableConstraint<String>> getTableConstraints(MetastoreContext metastoreContext, String schemaName, String tableName)
{
return ImmutableList.of();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import com.facebook.presto.hive.HiveType;
import com.facebook.presto.hive.MetastoreClientConfig;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.constraints.TableConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
Expand Down Expand Up @@ -65,6 +66,7 @@ public class RecordingHiveMetastore

private final Cache<String, Optional<Database>> databaseCache;
private final Cache<HiveTableName, Optional<Table>> tableCache;
private final Cache<HiveTableName, List<TableConstraint<String>>> tableConstraintsCache;
private final Cache<String, Set<ColumnStatisticType>> supportedColumnStatisticsCache;
private final Cache<HiveTableName, PartitionStatistics> tableStatisticsCache;
private final Cache<Set<HivePartitionName>, Map<String, PartitionStatistics>> partitionStatisticsCache;
Expand All @@ -88,6 +90,7 @@ public RecordingHiveMetastore(@ForRecordingHiveMetastore ExtendedHiveMetastore d

databaseCache = createCache(metastoreClientConfig);
tableCache = createCache(metastoreClientConfig);
tableConstraintsCache = createCache(metastoreClientConfig);
supportedColumnStatisticsCache = createCache(metastoreClientConfig);
tableStatisticsCache = createCache(metastoreClientConfig);
partitionStatisticsCache = createCache(metastoreClientConfig);
Expand Down Expand Up @@ -115,6 +118,7 @@ void loadRecording()
allRoles = recording.getAllRoles();
databaseCache.putAll(toMap(recording.getDatabases()));
tableCache.putAll(toMap(recording.getTables()));
tableConstraintsCache.putAll(toMap(recording.getTableConstraints()));
supportedColumnStatisticsCache.putAll(toMap(recording.getSupportedColumnStatistics()));
tableStatisticsCache.putAll(toMap(recording.getTableStatistics()));
partitionStatisticsCache.putAll(toMap(recording.getPartitionStatistics()));
Expand Down Expand Up @@ -153,6 +157,7 @@ public void writeRecording()
allRoles,
toPairs(databaseCache),
toPairs(tableCache),
toPairs(tableConstraintsCache),
toPairs(supportedColumnStatisticsCache),
toPairs(tableStatisticsCache),
toPairs(partitionStatisticsCache),
Expand Down Expand Up @@ -206,6 +211,11 @@ public Optional<Table> getTable(MetastoreContext metastoreContext, String databa
return loadValue(tableCache, hiveTableName(databaseName, tableName), () -> delegate.getTable(metastoreContext, databaseName, tableName));
}

public List<TableConstraint<String>> getTableConstraints(MetastoreContext metastoreContext, String databaseName, String tableName)
{
return loadValue(tableConstraintsCache, hiveTableName(databaseName, tableName), () -> delegate.getTableConstraints(metastoreContext, databaseName, tableName));
}

@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(MetastoreContext metastoreContext, Type type)
{
Expand Down Expand Up @@ -508,6 +518,7 @@ public static class Recording
private final Optional<Set<String>> allRoles;
private final List<Pair<String, Optional<Database>>> databases;
private final List<Pair<HiveTableName, Optional<Table>>> tables;
private final List<Pair<HiveTableName, List<TableConstraint<String>>>> tableConstraints;
private final List<Pair<String, Set<ColumnStatisticType>>> supportedColumnStatistics;
private final List<Pair<HiveTableName, PartitionStatistics>> tableStatistics;
private final List<Pair<Set<HivePartitionName>, Map<String, PartitionStatistics>>> partitionStatistics;
Expand All @@ -526,6 +537,7 @@ public Recording(
@JsonProperty("allRoles") Optional<Set<String>> allRoles,
@JsonProperty("databases") List<Pair<String, Optional<Database>>> databases,
@JsonProperty("tables") List<Pair<HiveTableName, Optional<Table>>> tables,
@JsonProperty("tableConstraints") List<Pair<HiveTableName, List<TableConstraint<String>>>> tableConstraints,
@JsonProperty("supportedColumnStatistics") List<Pair<String, Set<ColumnStatisticType>>> supportedColumnStatistics,
@JsonProperty("tableStatistics") List<Pair<HiveTableName, PartitionStatistics>> tableStatistics,
@JsonProperty("partitionStatistics") List<Pair<Set<HivePartitionName>, Map<String, PartitionStatistics>>> partitionStatistics,
Expand All @@ -542,6 +554,7 @@ public Recording(
this.allRoles = allRoles;
this.databases = databases;
this.tables = tables;
this.tableConstraints = tableConstraints;
this.supportedColumnStatistics = supportedColumnStatistics;
this.tableStatistics = tableStatistics;
this.partitionStatistics = partitionStatistics;
Expand Down Expand Up @@ -579,6 +592,12 @@ public List<Pair<HiveTableName, Optional<Table>>> getTables()
return tables;
}

@JsonProperty
public List<Pair<HiveTableName, List<TableConstraint<String>>>> getTableConstraints()
{
return tableConstraints;
}

@JsonProperty
public List<Pair<String, Set<ColumnStatisticType>>> getSupportedColumnStatistics()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.facebook.presto.spi.StandardErrorCode;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.connector.ConnectorCommitHandle;
import com.facebook.presto.spi.constraints.TableConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.PrincipalType;
import com.facebook.presto.spi.security.RoleGrant;
Expand All @@ -56,6 +57,7 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
Expand Down Expand Up @@ -198,6 +200,16 @@ public synchronized Optional<Table> getTable(MetastoreContext metastoreContext,
}
}

public synchronized List<TableConstraint<String>> getTableConstraints(MetastoreContext metastoreContext, String databaseName, String tableName)
{
checkReadable();
Action<TableAndMore> tableAction = tableActions.get(new SchemaTableName(databaseName, tableName));
if (tableAction == null) {
return delegate.getTableConstraints(metastoreContext, databaseName, tableName);
}
return Collections.emptyList();
}

public synchronized Set<ColumnStatisticType> getSupportedColumnStatistics(MetastoreContext metastoreContext, Type type)
{
return delegate.getSupportedColumnStatistics(metastoreContext, type);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,12 @@
import com.facebook.presto.spi.SchemaNotFoundException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.constraints.PrimaryKeyConstraint;
import com.facebook.presto.spi.constraints.TableConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.airlift.units.Duration;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
Expand Down Expand Up @@ -99,6 +102,18 @@ public Optional<Table> getTable(MetastoreContext metastoreContext, String databa
});
}

@Override
public List<TableConstraint<String>> getTableConstraints(MetastoreContext metastoreContext, String databaseName, String tableName)
{
ImmutableList.Builder<TableConstraint<String>> constraints = ImmutableList.builder();
Optional<PrimaryKeyConstraint<String>> primaryKey = delegate.getPrimaryKey(metastoreContext, databaseName, tableName);
if (primaryKey.isPresent()) {
constraints.add(primaryKey.get());
}
constraints.addAll(delegate.getUniqueConstraints(metastoreContext, databaseName, tableName));
return constraints.build();
}

@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(MetastoreContext metastoreContext, Type type)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.constraints.PrimaryKeyConstraint;
import com.facebook.presto.spi.constraints.UniqueConstraint;
import com.facebook.presto.spi.security.PrestoPrincipal;
import com.facebook.presto.spi.security.RoleGrant;
import com.facebook.presto.spi.statistics.ColumnStatisticType;
import com.google.common.collect.ImmutableList;
import io.airlift.units.Duration;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
Expand Down Expand Up @@ -169,4 +172,14 @@ default Optional<List<FieldSchema>> getFields(MetastoreContext metastoreContext,

return Optional.of(table.get().getSd().getCols());
}

default Optional<PrimaryKeyConstraint<String>> getPrimaryKey(MetastoreContext metastoreContext, String databaseName, String tableName)
simmend marked this conversation as resolved.
Show resolved Hide resolved
{
return Optional.empty();
}

default List<UniqueConstraint<String>> getUniqueConstraints(MetastoreContext metastoreContext, String databaseName, String tableName)
{
return ImmutableList.of();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,20 @@
import org.apache.hadoop.hive.metastore.api.LockRequest;
import org.apache.hadoop.hive.metastore.api.LockResponse;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PrimaryKeysResponse;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.UniqueConstraintsResponse;
import org.apache.hadoop.hive.metastore.api.UnlockRequest;
import org.apache.thrift.TException;

import java.io.Closeable;
import java.util.List;
import java.util.Map;
import java.util.Optional;

public interface HiveMetastoreClient
extends Closeable
Expand Down Expand Up @@ -159,4 +162,10 @@ LockResponse lock(LockRequest request)

void unlock(UnlockRequest request)
throws TException;

Optional<PrimaryKeysResponse> getPrimaryKey(String dbName, String tableName)
throws TException;

Optional<UniqueConstraintsResponse> getUniqueConstraints(String catName, String dbName, String tableName)
throws TException;
}
Loading