Skip to content

Commit

Permalink
Use builder pattern for IcebergQueryRunner
Browse files Browse the repository at this point in the history
Additionally, add support for tracking catalog properties
added to iceberg. This can be useful for tests that may
require their own catalogs
  • Loading branch information
ZacBlanco committed Feb 7, 2025
1 parent 8789cd9 commit bdd370e
Show file tree
Hide file tree
Showing 27 changed files with 308 additions and 313 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
package com.facebook.presto.iceberg;

import com.facebook.presto.tests.DistributedQueryRunner;
import com.google.common.collect.ImmutableMap;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
Expand All @@ -27,12 +26,9 @@
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;

import java.util.Optional;
import java.util.OptionalInt;

import static com.facebook.presto.iceberg.CatalogType.HADOOP;
import static com.facebook.presto.iceberg.FileFormat.PARQUET;
import static com.facebook.presto.iceberg.IcebergQueryRunner.createIcebergQueryRunner;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.openjdk.jmh.annotations.Mode.AverageTime;
import static org.openjdk.jmh.annotations.Scope.Benchmark;
Expand All @@ -50,14 +46,11 @@ public class BenchmarkIcebergHadoopCatalog
public DistributedQueryRunner getQueryRunner()
{
try {
return createIcebergQueryRunner(
ImmutableMap.of(),
ImmutableMap.of("iceberg.catalog.type", HADOOP.name()),
PARQUET,
false,
true,
OptionalInt.of(1),
Optional.empty());
return IcebergQueryRunner.builder()
.setCatalogType(HADOOP)
.setNodeCount(OptionalInt.of(4))
.build()
.getQueryRunner();
}
catch (Exception e) {
e.printStackTrace();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
package com.facebook.presto.iceberg;

import com.facebook.presto.tests.DistributedQueryRunner;
import com.google.common.collect.ImmutableMap;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
Expand All @@ -27,11 +26,9 @@
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;

import java.util.Optional;
import java.util.OptionalInt;

import static com.facebook.presto.iceberg.FileFormat.PARQUET;
import static com.facebook.presto.iceberg.IcebergQueryRunner.createIcebergQueryRunner;
import static com.facebook.presto.iceberg.CatalogType.HIVE;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.openjdk.jmh.annotations.Mode.AverageTime;
import static org.openjdk.jmh.annotations.Scope.Benchmark;
Expand All @@ -49,14 +46,10 @@ public class BenchmarkIcebergHiveCatalog
public DistributedQueryRunner getQueryRunner()
{
try {
return createIcebergQueryRunner(
ImmutableMap.of(),
ImmutableMap.of(),
PARQUET,
false,
true,
OptionalInt.of(1),
Optional.empty());
return IcebergQueryRunner.builder()
.setCatalogType(HIVE)
.setNodeCount(OptionalInt.of(1))
.build().getQueryRunner();
}
catch (Exception e) {
e.printStackTrace();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.facebook.presto.testing.QueryRunner;
import com.facebook.presto.testing.assertions.Assert;
import com.facebook.presto.tests.AbstractTestIntegrationSmokeTest;
import com.google.common.collect.ImmutableMap;
import org.apache.iceberg.Table;
import org.apache.iceberg.UpdateProperties;
import org.intellij.lang.annotations.Language;
Expand Down Expand Up @@ -74,7 +73,7 @@ protected IcebergDistributedSmokeTestBase(CatalogType catalogType)
protected QueryRunner createQueryRunner()
throws Exception
{
return IcebergQueryRunner.createIcebergQueryRunner(ImmutableMap.of(), catalogType);
return IcebergQueryRunner.builder().setCatalogType(catalogType).build().getQueryRunner();
}

@Test
Expand Down Expand Up @@ -123,6 +122,7 @@ public void testDescribeTable()
MaterializedResult actualColumns = computeActual("DESCRIBE orders");
Assert.assertEquals(actualColumns, expectedColumns);
}

@Test
public void testShowCreateTable()
{
Expand Down Expand Up @@ -771,15 +771,15 @@ private void testCreateTableLike()

assertUpdate(session, "CREATE TABLE test_create_table_like_copy4 (LIKE test_create_table_like_original INCLUDING PROPERTIES) WITH (format = 'ORC')");
assertEquals(getTablePropertiesString("test_create_table_like_copy4"), format("WITH (\n" +
" delete_mode = 'merge-on-read',\n" +
" format = 'ORC',\n" +
" format_version = '2',\n" +
" location = '%s',\n" +
" metadata_delete_after_commit = false,\n" +
" metadata_previous_versions_max = 100,\n" +
" metrics_max_inferred_column = 100,\n" +
" partitioning = ARRAY['adate']\n" +
")",
" delete_mode = 'merge-on-read',\n" +
" format = 'ORC',\n" +
" format_version = '2',\n" +
" location = '%s',\n" +
" metadata_delete_after_commit = false,\n" +
" metadata_previous_versions_max = 100,\n" +
" metrics_max_inferred_column = 100,\n" +
" partitioning = ARRAY['adate']\n" +
")",
getLocation(schemaName, "test_create_table_like_original")));
dropTable(session, "test_create_table_like_copy4");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,10 @@ protected IcebergDistributedTestBase(CatalogType catalogType)
protected QueryRunner createQueryRunner()
throws Exception
{
return IcebergQueryRunner.createIcebergQueryRunner(ImmutableMap.of(), catalogType, extraConnectorProperties);
return IcebergQueryRunner.builder()
.setCatalogType(catalogType)
.setExtraConnectorProperties(extraConnectorProperties)
.build().getQueryRunner();
}

@Test
Expand Down
Loading

0 comments on commit bdd370e

Please sign in to comment.