diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java
index df6beef865ca..2a042c108e00 100644
--- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/Acl.java
@@ -163,8 +163,8 @@ Access toPb() {
}
/**
- * Class for a BigQuery Group entity. Objects of this class represent a group to grante access to.
- * A Group entity can be created given the group's email or can be a special group:
+ * Class for a BigQuery Group entity. Objects of this class represent a group to granted access
+ * to. A Group entity can be created given the group's email or can be a special group:
* {@link #ofProjectOwners()}, {@link #ofProjectReaders()}, {@link #ofProjectWriters()} or
* {@link #ofAllAuthenticatedUsers()}.
*/
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java
index 9d780e5dc003..738a644a5dde 100644
--- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/JobStatus.java
@@ -56,7 +56,7 @@ public enum State {
/**
* Returns the state of the job. A {@link State#PENDING} job is waiting to be executed. A
* {@link State#RUNNING} is being executed. A {@link State#DONE} job has completed either
- * suceeding or failing. If failed {@link #error()} will be non-null.
+ * succeeding or failing. If failed {@link #error()} will be non-null.
*/
public State state() {
return state;
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java
index 9b6becdb75c3..dd09d7010a50 100644
--- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/QueryJobInfo.java
@@ -239,7 +239,7 @@ public Builder priority(Priority priority) {
* the query is allowed to create large results at a slight cost in performance. If {@code true}
* {@link Builder#destinationTable(TableId)} must be provided.
*
- * @see
* Returning Large Query Results
*/
public Builder allowLargeResults(Boolean allowLargeResults) {
@@ -309,7 +309,7 @@ private QueryJobInfo(Builder builder) {
* the query is allowed to create large results at a slight cost in performance.
* the query is allowed to create large results at a slight cost in performance.
*
- * @see
* Returning Large Query Results
*/
public Boolean allowLargeResults() {
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java
index 7b47f4df8f19..05fb6908a51b 100644
--- a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/TableInfo.java
@@ -194,7 +194,7 @@ public static Builder builder(TableId tableId, Schema schema) {
* @param tableId table id
* @param schema the schema of the table
*/
- public static BaseTableInfo of(TableId tableId, Schema schema) {
+ public static TableInfo of(TableId tableId, Schema schema) {
return builder(tableId, schema).build();
}
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java
new file mode 100644
index 000000000000..22fa62a7b86e
--- /dev/null
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/RemoteBigQueryHelper.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.bigquery.testing;
+
+import com.google.gcloud.AuthCredentials;
+import com.google.gcloud.RetryParams;
+import com.google.gcloud.bigquery.BigQuery;
+import com.google.gcloud.bigquery.BigQueryException;
+import com.google.gcloud.bigquery.BigQueryOptions;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.UUID;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained
+ * via the {@link #options()} method. Returned options have custom
+ * {@link BigQueryOptions#retryParams()}: {@link RetryParams#retryMaxAttempts()} is {@code 10},
+ * {@link RetryParams#retryMinAttempts()} is {@code 6}, {@link RetryParams#maxRetryDelayMillis()} is
+ * {@code 30000}, {@link RetryParams#totalRetryPeriodMillis()} is {@code 120000} and
+ * {@link RetryParams#initialRetryDelayMillis()} is {@code 250}.
+ * {@link BigQueryOptions#connectTimeout()} and {@link BigQueryOptions#readTimeout()} are both set
+ * to {@code 60000}.
+ */
+public class RemoteBigQueryHelper {
+
+ private static final Logger log = Logger.getLogger(RemoteBigQueryHelper.class.getName());
+ private static final String DATASET_NAME_PREFIX = "gcloud_test_dataset_temp_";
+ private final BigQueryOptions options;
+
+ private RemoteBigQueryHelper(BigQueryOptions options) {
+ this.options = options;
+ }
+
+ /**
+ * Returns a {@link BigQueryOptions} object to be used for testing.
+ */
+ public BigQueryOptions options() {
+ return options;
+ }
+
+ /**
+ * Deletes a dataset, even if non-empty.
+ *
+ * @param bigquery the BigQuery service to be used to issue the delete request
+ * @param dataset the dataset to be deleted
+ * @return {@code true} if deletion succeeded, {@code false} if the dataset was not found.
+ * @throws BigQueryException upon failure
+ */
+ public static boolean forceDelete(BigQuery bigquery, String dataset) {
+ return bigquery.delete(dataset, BigQuery.DatasetDeleteOption.deleteContents());
+ }
+
+ /**
+ * Returns a dataset name generated using a random UUID.
+ */
+ public static String generateDatasetName() {
+ return DATASET_NAME_PREFIX + UUID.randomUUID().toString().replace('-', '_');
+ }
+
+ /**
+ * Creates a {@code RemoteBigQueryHelper} object for the given project id and JSON key input
+ * stream.
+ *
+ * @param projectId id of the project to be used for running the tests
+ * @param keyStream input stream for a JSON key
+ * @return A {@code RemoteBigQueryHelper} object for the provided options.
+ * @throws BigQueryHelperException if {@code keyStream} is not a valid JSON key stream
+ */
+ public static RemoteBigQueryHelper create(String projectId, InputStream keyStream)
+ throws BigQueryHelperException {
+ try {
+ BigQueryOptions bigqueryOptions = BigQueryOptions.builder()
+ .authCredentials(AuthCredentials.createForJson(keyStream))
+ .projectId(projectId)
+ .retryParams(retryParams())
+ .connectTimeout(60000)
+ .readTimeout(60000)
+ .build();
+ return new RemoteBigQueryHelper(bigqueryOptions);
+ } catch (IOException ex) {
+ if (log.isLoggable(Level.WARNING)) {
+ log.log(Level.WARNING, ex.getMessage());
+ }
+ throw BigQueryHelperException.translate(ex);
+ }
+ }
+
+ /**
+ * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication
+ * credentials.
+ */
+ public static RemoteBigQueryHelper create() {
+ BigQueryOptions bigqueryOptions = BigQueryOptions.builder()
+ .retryParams(retryParams())
+ .connectTimeout(60000)
+ .readTimeout(60000)
+ .build();
+ return new RemoteBigQueryHelper(bigqueryOptions);
+ }
+
+ private static RetryParams retryParams() {
+ return RetryParams.builder()
+ .retryMaxAttempts(10)
+ .retryMinAttempts(6)
+ .maxRetryDelayMillis(30000)
+ .totalRetryPeriodMillis(120000)
+ .initialRetryDelayMillis(250)
+ .build();
+ }
+
+ public static class BigQueryHelperException extends RuntimeException {
+
+ private static final long serialVersionUID = 3984993496060055562L;
+
+ public BigQueryHelperException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public static BigQueryHelperException translate(Exception ex) {
+ return new BigQueryHelperException(ex.getMessage(), ex);
+ }
+ }
+}
diff --git a/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java
new file mode 100644
index 000000000000..9ca792ecd77d
--- /dev/null
+++ b/gcloud-java-bigquery/src/main/java/com/google/gcloud/bigquery/testing/package-info.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * A testing helper for Google BigQuery.
+ *
+ * A simple usage example:
+ *
+ *
Before the test:
+ *
{@code
+ * RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ * BigQuery bigquery = bigqueryHelper.options().service();
+ * String dataset = RemoteBigQueryHelper.generateDatasetName();
+ * bigquery.create(DatasetInfo.builder(dataset).build());
+ * }
+ *
+ * After the test:
+ *
{@code
+ * RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ * }
+ *
+ * @see
+ * gcloud-java tools for testing
+ */
+package com.google.gcloud.bigquery.testing;
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
index 85c67b36b1c4..ecd9b23c1eb1 100644
--- a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/BigQueryImplTest.java
@@ -39,7 +39,6 @@
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
-import com.google.gcloud.AuthCredentials;
import com.google.gcloud.Page;
import com.google.gcloud.RetryParams;
import com.google.gcloud.bigquery.InsertAllRequest.RowToInsert;
@@ -860,7 +859,6 @@ public Job apply(JobInfo jobInfo) {
assertEquals(cursor, page.nextPageCursor());
assertArrayEquals(jobList.toArray(), Iterables.toArray(page.values(), JobInfo.class));
String selector = (String) capturedOptions.getValue().get(JOB_OPTION_FIELDS.rpcOption());
- System.out.println(selector);
assertTrue(selector.contains("etag,jobs("));
assertTrue(selector.contains("configuration"));
assertTrue(selector.contains("jobReference"));
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java
new file mode 100644
index 000000000000..caff9dd510d1
--- /dev/null
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/ITBigQueryTest.java
@@ -0,0 +1,786 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.bigquery;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.gcloud.Page;
+import com.google.gcloud.bigquery.BigQuery.DatasetOption;
+import com.google.gcloud.bigquery.BigQuery.JobListOption;
+import com.google.gcloud.bigquery.BigQuery.JobOption;
+import com.google.gcloud.bigquery.BigQuery.TableOption;
+import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper;
+import com.google.gcloud.storage.BlobInfo;
+import com.google.gcloud.storage.BucketInfo;
+import com.google.gcloud.storage.Storage;
+import com.google.gcloud.storage.testing.RemoteGcsHelper;
+
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.Timeout;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+public class ITBigQueryTest {
+
+ private static final Logger log = Logger.getLogger(ITBigQueryTest.class.getName());
+ private static final String DATASET = RemoteBigQueryHelper.generateDatasetName();
+ private static final String DESCRIPTION = "Test dataset";
+ private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName();
+ private static final Field TIMESTAMP_FIELD_SCHEMA =
+ Field.builder("TimestampField", Field.Type.timestamp())
+ .mode(Field.Mode.NULLABLE)
+ .description("TimestampDescription")
+ .build();
+ private static final Field STRING_FIELD_SCHEMA =
+ Field.builder("StringField", Field.Type.string())
+ .mode(Field.Mode.NULLABLE)
+ .description("StringDescription")
+ .build();
+ private static final Field INTEGER_FIELD_SCHEMA =
+ Field.builder("IntegerField", Field.Type.integer())
+ .mode(Field.Mode.REPEATED)
+ .description("IntegerDescription")
+ .build();
+ private static final Field BOOLEAN_FIELD_SCHEMA =
+ Field.builder("BooleanField", Field.Type.bool())
+ .mode(Field.Mode.NULLABLE)
+ .description("BooleanDescription")
+ .build();
+ private static final Field RECORD_FIELD_SCHEMA =
+ Field.builder("RecordField", Field.Type.record(TIMESTAMP_FIELD_SCHEMA,
+ STRING_FIELD_SCHEMA, INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA))
+ .mode(Field.Mode.REQUIRED)
+ .description("RecordDescription")
+ .build();
+ private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA,
+ INTEGER_FIELD_SCHEMA, BOOLEAN_FIELD_SCHEMA, RECORD_FIELD_SCHEMA);
+ private static final Schema SIMPLE_SCHEMA = Schema.of(STRING_FIELD_SCHEMA);
+ private static final Schema QUERY_RESULT_SCHEMA = Schema.builder()
+ .addField(Field.builder("TimestampField", Field.Type.timestamp())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .addField(Field.builder("StringField", Field.Type.string())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .addField(Field.builder("BooleanField", Field.Type.bool())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .build();
+ private static final String LOAD_FILE = "load.csv";
+ private static final String JSON_LOAD_FILE = "load.json";
+ private static final String EXTRACT_FILE = "extract.csv";
+ private static final String BUCKET = RemoteGcsHelper.generateBucketName();
+ private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table");
+ private static final String CSV_CONTENT = "StringValue1\nStringValue2\n";
+ private static final String JSON_CONTENT = "{"
+ + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\","
+ + "\"StringField\": \"stringValue\","
+ + "\"IntegerField\": [\"0\", \"1\"],"
+ + "\"BooleanField\": \"false\","
+ + "\"RecordField\": {"
+ + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\","
+ + "\"StringField\": null,"
+ + "\"IntegerField\": [\"1\",\"0\"],"
+ + "\"BooleanField\": \"true\""
+ + "}"
+ + "}\n"
+ + "{"
+ + "\"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\","
+ + "\"StringField\": \"stringValue\","
+ + "\"IntegerField\": [\"0\", \"1\"],"
+ + "\"BooleanField\": \"false\","
+ + "\"RecordField\": {"
+ + "\"TimestampField\": \"1969-07-20 20:18:04 UTC\","
+ + "\"StringField\": null,"
+ + "\"IntegerField\": [\"1\",\"0\"],"
+ + "\"BooleanField\": \"true\""
+ + "}"
+ + "}";
+
+ private static BigQuery bigquery;
+ private static Storage storage;
+
+ @Rule
+ public Timeout globalTimeout = Timeout.seconds(300);
+
+ @BeforeClass
+ public static void beforeClass() throws IOException, InterruptedException {
+ RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create();
+ RemoteGcsHelper gcsHelper = RemoteGcsHelper.create();
+ bigquery = bigqueryHelper.options().service();
+ storage = gcsHelper.options().service();
+ storage.create(BucketInfo.of(BUCKET));
+ storage.create(BlobInfo.builder(BUCKET, LOAD_FILE).contentType("text/plain").build(),
+ CSV_CONTENT.getBytes(StandardCharsets.UTF_8));
+ storage.create(BlobInfo.builder(BUCKET, JSON_LOAD_FILE).contentType("application/json").build(),
+ JSON_CONTENT.getBytes(StandardCharsets.UTF_8));
+ DatasetInfo info = DatasetInfo.builder(DATASET).description(DESCRIPTION).build();
+ bigquery.create(info);
+ JobInfo job = LoadJobInfo.builder(TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE)
+ .createDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED)
+ .schema(TABLE_SCHEMA)
+ .formatOptions(FormatOptions.json())
+ .build();
+ job = bigquery.create(job);
+ while (job.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ job = bigquery.getJob(job.jobId());
+ }
+ assertNull(job.status().error());
+ }
+
+ @AfterClass
+ public static void afterClass() throws ExecutionException, InterruptedException {
+ if (bigquery != null) {
+ RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
+ }
+ if (storage != null && !RemoteGcsHelper.forceDelete(storage, BUCKET, 10, TimeUnit.SECONDS)) {
+ if (log.isLoggable(Level.WARNING)) {
+ log.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET);
+ }
+ }
+ }
+
+ @Test
+ public void testGetDataset() {
+ DatasetInfo dataset = bigquery.getDataset(DATASET);
+ assertEquals(bigquery.options().projectId(), dataset.datasetId().project());
+ assertEquals(DATASET, dataset.datasetId().dataset());
+ assertEquals(DESCRIPTION, dataset.description());
+ assertNotNull(dataset.acl());
+ assertNotNull(dataset.etag());
+ assertNotNull(dataset.id());
+ assertNotNull(dataset.lastModified());
+ assertNotNull(dataset.selfLink());
+ }
+
+ @Test
+ public void testGetDatasetWithSelectedFields() {
+ DatasetInfo dataset = bigquery.getDataset(DATASET,
+ DatasetOption.fields(BigQuery.DatasetField.CREATION_TIME));
+ assertEquals(bigquery.options().projectId(), dataset.datasetId().project());
+ assertEquals(DATASET, dataset.datasetId().dataset());
+ assertNotNull(dataset.creationTime());
+ assertNull(dataset.description());
+ assertNull(dataset.defaultTableLifetime());
+ assertNull(dataset.acl());
+ assertNull(dataset.etag());
+ assertNull(dataset.friendlyName());
+ assertNull(dataset.id());
+ assertNull(dataset.lastModified());
+ assertNull(dataset.location());
+ assertNull(dataset.selfLink());
+ }
+
+ @Test
+ public void testUpdateDataset() {
+ DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET)
+ .description("Some Description")
+ .build());
+ assertNotNull(dataset);
+ assertEquals(bigquery.options().projectId(), dataset.datasetId().project());
+ assertEquals(OTHER_DATASET, dataset.datasetId().dataset());
+ assertEquals("Some Description", dataset.description());
+ DatasetInfo updatedDataset =
+ bigquery.update(dataset.toBuilder().description("Updated Description").build());
+ assertEquals("Updated Description", updatedDataset.description());
+ assertTrue(bigquery.delete(OTHER_DATASET));
+ }
+
+ @Test
+ public void testUpdateDatasetWithSelectedFields() {
+ DatasetInfo dataset = bigquery.create(DatasetInfo.builder(OTHER_DATASET)
+ .description("Some Description")
+ .build());
+ assertNotNull(dataset);
+ assertEquals(bigquery.options().projectId(), dataset.datasetId().project());
+ assertEquals(OTHER_DATASET, dataset.datasetId().dataset());
+ assertEquals("Some Description", dataset.description());
+ DatasetInfo updatedDataset =
+ bigquery.update(dataset.toBuilder().description("Updated Description").build(),
+ DatasetOption.fields(BigQuery.DatasetField.DESCRIPTION));
+ assertEquals("Updated Description", updatedDataset.description());
+ assertNull(updatedDataset.creationTime());
+ assertNull(updatedDataset.defaultTableLifetime());
+ assertNull(updatedDataset.acl());
+ assertNull(updatedDataset.etag());
+ assertNull(updatedDataset.friendlyName());
+ assertNull(updatedDataset.id());
+ assertNull(updatedDataset.lastModified());
+ assertNull(updatedDataset.location());
+ assertNull(updatedDataset.selfLink());
+ assertTrue(bigquery.delete(OTHER_DATASET));
+ }
+
+ @Test
+ public void testCreateAndGetTable() {
+ String tableName = "test_create_and_get_table";
+ TableId tableId = TableId.of(DATASET, tableName);
+ BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA));
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(tableName, createdTableInfo.tableId().table());
+ BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName);
+ assertNotNull(remoteTableInfo);
+ assertTrue(remoteTableInfo instanceof TableInfo);
+ assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId());
+ assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type());
+ assertEquals(TABLE_SCHEMA, remoteTableInfo.schema());
+ assertNotNull(remoteTableInfo.creationTime());
+ assertNotNull(remoteTableInfo.lastModifiedTime());
+ assertNotNull(remoteTableInfo.numBytes());
+ assertNotNull(remoteTableInfo.numRows());
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testCreateAndGetTableWithSelectedField() {
+ String tableName = "test_create_and_get_selected_fields_table";
+ TableId tableId = TableId.of(DATASET, tableName);
+ BaseTableInfo createdTableInfo = bigquery.create(TableInfo.of(tableId, TABLE_SCHEMA));
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(tableName, createdTableInfo.tableId().table());
+ BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName,
+ TableOption.fields(BigQuery.TableField.CREATION_TIME));
+ assertNotNull(remoteTableInfo);
+ assertTrue(remoteTableInfo instanceof TableInfo);
+ assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId());
+ assertEquals(BaseTableInfo.Type.TABLE, remoteTableInfo.type());
+ assertNotNull(remoteTableInfo.creationTime());
+ assertNull(remoteTableInfo.schema());
+ assertNull(remoteTableInfo.lastModifiedTime());
+ assertNull(remoteTableInfo.numBytes());
+ assertNull(remoteTableInfo.numRows());
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testCreateExternalTable() throws InterruptedException {
+ String tableName = "test_create_external_table";
+ TableId tableId = TableId.of(DATASET, tableName);
+ ExternalDataConfiguration externalDataConfiguration = ExternalDataConfiguration.of(
+ "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json());
+ BaseTableInfo tableInfo = ExternalTableInfo.of(tableId, externalDataConfiguration);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(tableName, createdTableInfo.tableId().table());
+ BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName);
+ assertNotNull(remoteTableInfo);
+ assertTrue(remoteTableInfo instanceof ExternalTableInfo);
+ assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId());
+ assertEquals(TABLE_SCHEMA, remoteTableInfo.schema());
+ QueryRequest request = QueryRequest.builder(
+ "SELECT TimestampField, StringField, IntegerField, BooleanField FROM " + DATASET + "." +
+ tableName)
+ .defaultDataset(DatasetId.of(DATASET))
+ .maxWaitTime(60000L)
+ .maxResults(1000L)
+ .build();
+ QueryResponse response = bigquery.query(request);
+ while (!response.jobComplete()) {
+ response = bigquery.getQueryResults(response.jobId());
+ Thread.sleep(1000);
+ }
+ long integerValue = 0;
+ int rowCount = 0;
+ for (List row : response.result().values()) {
+ FieldValue timestampCell = row.get(0);
+ FieldValue stringCell = row.get(1);
+ FieldValue integerCell = row.get(2);
+ FieldValue booleanCell = row.get(3);
+ assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, integerCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute());
+ assertEquals(1408452095220000L, timestampCell.timestampValue());
+ assertEquals("stringValue", stringCell.stringValue());
+ assertEquals(integerValue, integerCell.longValue());
+ assertEquals(false, booleanCell.booleanValue());
+ integerValue = ~integerValue & 0x1;
+ rowCount++;
+ }
+ assertEquals(4, rowCount);
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testCreateViewTable() throws InterruptedException {
+ String tableName = "test_create_view_table";
+ TableId tableId = TableId.of(DATASET, tableName);
+ BaseTableInfo tableInfo = ViewInfo.of(tableId,
+ "SELECT TimestampField, StringField, BooleanField FROM " + DATASET + "."
+ + TABLE_ID.table());
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(tableName, createdTableInfo.tableId().table());
+ BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, tableName);
+ assertNotNull(remoteTableInfo);
+ assertEquals(createdTableInfo.tableId(), remoteTableInfo.tableId());
+ assertTrue(remoteTableInfo instanceof ViewInfo);
+ Schema expectedSchema = Schema.builder()
+ .addField(
+ Field.builder("TimestampField", Field.Type.timestamp())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .addField(
+ Field.builder("StringField", Field.Type.string())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .addField(
+ Field.builder("BooleanField", Field.Type.bool())
+ .mode(Field.Mode.NULLABLE)
+ .build())
+ .build();
+ assertEquals(expectedSchema, remoteTableInfo.schema());
+ QueryRequest request = QueryRequest.builder("SELECT * FROM " + tableName)
+ .defaultDataset(DatasetId.of(DATASET))
+ .maxWaitTime(60000L)
+ .maxResults(1000L)
+ .build();
+ QueryResponse response = bigquery.query(request);
+ while (!response.jobComplete()) {
+ response = bigquery.getQueryResults(response.jobId());
+ Thread.sleep(1000);
+ }
+ int rowCount = 0;
+ for (List row : response.result().values()) {
+ FieldValue timestampCell = row.get(0);
+ FieldValue stringCell = row.get(1);
+ FieldValue booleanCell = row.get(2);
+ assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute());
+ assertEquals(1408452095220000L, timestampCell.timestampValue());
+ assertEquals("stringValue", stringCell.stringValue());
+ assertEquals(false, booleanCell.booleanValue());
+ rowCount++;
+ }
+ assertEquals(2, rowCount);
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testListTables() {
+ String tableName = "test_list_tables";
+ BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ Page tables = bigquery.listTables(DATASET);
+ boolean found = false;
+ Iterator tableIterator = tables.values().iterator();
+ while (tableIterator.hasNext() && !found) {
+ if (tableIterator.next().tableId().equals(createdTableInfo.tableId())) {
+ found = true;
+ }
+ }
+ assertTrue(found);
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testUdpateTable() {
+ String tableName = "test_update_table";
+ BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder()
+ .description("newDescription").build());
+ assertEquals(DATASET, updatedTableInfo.tableId().dataset());
+ assertEquals(tableName, updatedTableInfo.tableId().table());
+ assertEquals(TABLE_SCHEMA, updatedTableInfo.schema());
+ assertEquals("newDescription", updatedTableInfo.description());
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testUdpateTableWithSelectedFields() {
+ String tableName = "test_update_with_selected_fields_table";
+ BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ BaseTableInfo updatedTableInfo = bigquery.update(tableInfo.toBuilder().description("newDescr")
+ .build(), TableOption.fields(BigQuery.TableField.DESCRIPTION));
+ assertTrue(updatedTableInfo instanceof TableInfo);
+ assertEquals(DATASET, updatedTableInfo.tableId().dataset());
+ assertEquals(tableName, updatedTableInfo.tableId().table());
+ assertEquals("newDescr", updatedTableInfo.description());
+ assertNull(updatedTableInfo.schema());
+ assertNull(updatedTableInfo.lastModifiedTime());
+ assertNull(updatedTableInfo.numBytes());
+ assertNull(updatedTableInfo.numRows());
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testInsertAll() {
+ String tableName = "test_insert_all_table";
+ BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA);
+ assertNotNull(bigquery.create(tableInfo));
+ InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId())
+ .addRow(ImmutableMap.of(
+ "TimestampField", "2014-08-19 07:41:35.220 -05:00",
+ "StringField", "stringValue",
+ "IntegerField", ImmutableList.of(0, 1),
+ "BooleanField", false,
+ "RecordField", ImmutableMap.of(
+ "TimestampField", "1969-07-20 20:18:04 UTC",
+ "IntegerField", ImmutableList.of(1, 0),
+ "BooleanField", true)))
+ .addRow(ImmutableMap.of(
+ "TimestampField", "2014-08-19 07:41:35.220 -05:00",
+ "StringField", "stringValue",
+ "IntegerField", ImmutableList.of(0, 1),
+ "BooleanField", false,
+ "RecordField", ImmutableMap.of(
+ "TimestampField", "1969-07-20 20:18:04 UTC",
+ "IntegerField", ImmutableList.of(1, 0),
+ "BooleanField", true)))
+ .build();
+ InsertAllResponse response = bigquery.insertAll(request);
+ assertFalse(response.hasErrors());
+ assertEquals(0, response.insertErrors().size());
+ assertTrue(bigquery.delete(TableId.of(DATASET, tableName)));
+ }
+
+ @Test
+ public void testInsertAllWithErrors() {
+ String tableName = "test_insert_all_with_errors_table";
+ BaseTableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), TABLE_SCHEMA);
+ assertNotNull(bigquery.create(tableInfo));
+ InsertAllRequest request = InsertAllRequest.builder(tableInfo.tableId())
+ .addRow(ImmutableMap.of(
+ "TimestampField", "2014-08-19 07:41:35.220 -05:00",
+ "StringField", "stringValue",
+ "IntegerField", ImmutableList.of(0, 1),
+ "BooleanField", false,
+ "RecordField", ImmutableMap.of(
+ "TimestampField", "1969-07-20 20:18:04 UTC",
+ "IntegerField", ImmutableList.of(1, 0),
+ "BooleanField", true)))
+ .addRow(ImmutableMap.of(
+ "TimestampField", "invalidDate",
+ "StringField", "stringValue",
+ "IntegerField", ImmutableList.of(0, 1),
+ "BooleanField", false,
+ "RecordField", ImmutableMap.of(
+ "TimestampField", "1969-07-20 20:18:04 UTC",
+ "IntegerField", ImmutableList.of(1, 0),
+ "BooleanField", true)))
+ .addRow(ImmutableMap.of(
+ "TimestampField", "1969-07-20 20:18:04 UTC",
+ "StringField", "stringValue",
+ "IntegerField", ImmutableList.of(0, 1),
+ "BooleanField", false))
+ .skipInvalidRows(true)
+ .build();
+ InsertAllResponse response = bigquery.insertAll(request);
+ assertTrue(response.hasErrors());
+ assertEquals(2, response.insertErrors().size());
+ assertNotNull(response.errorsFor(1L));
+ assertNotNull(response.errorsFor(2L));
+ assertTrue(bigquery.delete(TableId.of(DATASET, tableName)));
+ }
+
+ @Test
+ public void testListAllTableData() {
+ Page> rows = bigquery.listTableData(TABLE_ID);
+ int rowCount = 0;
+ for (List row : rows.values()) {
+ FieldValue timestampCell = row.get(0);
+ FieldValue stringCell = row.get(1);
+ FieldValue integerCell = row.get(2);
+ FieldValue booleanCell = row.get(3);
+ FieldValue recordCell = row.get(4);
+ assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute());
+ assertEquals(FieldValue.Attribute.REPEATED, integerCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute());
+ assertEquals(FieldValue.Attribute.RECORD, recordCell.attribute());
+ assertEquals(1408452095220000L, timestampCell.timestampValue());
+ assertEquals("stringValue", stringCell.stringValue());
+ assertEquals(0, integerCell.repeatedValue().get(0).longValue());
+ assertEquals(1, integerCell.repeatedValue().get(1).longValue());
+ assertEquals(false, booleanCell.booleanValue());
+ assertEquals(-14182916000000L, recordCell.recordValue().get(0).timestampValue());
+ assertTrue(recordCell.recordValue().get(1).isNull());
+ assertEquals(1, recordCell.recordValue().get(2).repeatedValue().get(0).longValue());
+ assertEquals(0, recordCell.recordValue().get(2).repeatedValue().get(1).longValue());
+ assertEquals(true, recordCell.recordValue().get(3).booleanValue());
+ rowCount++;
+ }
+ assertEquals(2, rowCount);
+ }
+
+ @Test
+ public void testQuery() throws InterruptedException {
+ String query = new StringBuilder()
+ .append("SELECT TimestampField, StringField, BooleanField FROM ")
+ .append(TABLE_ID.table())
+ .toString();
+ QueryRequest request = QueryRequest.builder(query)
+ .defaultDataset(DatasetId.of(DATASET))
+ .maxWaitTime(60000L)
+ .maxResults(1000L)
+ .build();
+ QueryResponse response = bigquery.query(request);
+ while (!response.jobComplete()) {
+ Thread.sleep(1000);
+ response = bigquery.getQueryResults(response.jobId());
+ }
+ assertEquals(QUERY_RESULT_SCHEMA, response.result().schema());
+ int rowCount = 0;
+ for (List row : response.result().values()) {
+ FieldValue timestampCell = row.get(0);
+ FieldValue stringCell = row.get(1);
+ FieldValue booleanCell = row.get(2);
+ assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute());
+ assertEquals(1408452095220000L, timestampCell.timestampValue());
+ assertEquals("stringValue", stringCell.stringValue());
+ assertEquals(false, booleanCell.booleanValue());
+ rowCount++;
+ }
+ assertEquals(2, rowCount);
+ }
+
+ @Test
+ public void testListJobs() {
+ Page jobs = bigquery.listJobs();
+ for (JobInfo job : jobs.values()) {
+ assertNotNull(job.jobId());
+ assertNotNull(job.statistics());
+ assertNotNull(job.status());
+ assertNotNull(job.userEmail());
+ assertNotNull(job.id());
+ }
+ }
+
+ @Test
+ public void testListJobsWithSelectedFields() {
+ Page jobs = bigquery.listJobs(JobListOption.fields(BigQuery.JobField.USER_EMAIL));
+ for (JobInfo job : jobs.values()) {
+ assertNotNull(job.jobId());
+ assertNotNull(job.status());
+ assertNotNull(job.userEmail());
+ assertNull(job.statistics());
+ assertNull(job.id());
+ }
+ }
+
+ @Test
+ public void testCreateAndGetJob() throws InterruptedException {
+ String sourceTableName = "test_create_and_get_job_source_table";
+ String destinationTableName = "test_create_and_get_job_destination_table";
+ TableId sourceTable = TableId.of(DATASET, sourceTableName);
+ BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(sourceTableName, createdTableInfo.tableId().table());
+ TableId destinationTable = TableId.of(DATASET, destinationTableName);
+ JobInfo job = CopyJobInfo.of(destinationTable, sourceTable);
+ CopyJobInfo createdJob = (CopyJobInfo) bigquery.create(job);
+ CopyJobInfo remoteJob = (CopyJobInfo) bigquery.getJob(createdJob.jobId());
+ assertEquals(createdJob.jobId(), remoteJob.jobId());
+ assertEquals(createdJob.sourceTables(), remoteJob.sourceTables());
+ assertEquals(createdJob.destinationTable(), remoteJob.destinationTable());
+ assertEquals(createdJob.createDisposition(), remoteJob.createDisposition());
+ assertEquals(createdJob.writeDisposition(), remoteJob.writeDisposition());
+ assertNotNull(remoteJob.etag());
+ assertNotNull(remoteJob.statistics());
+ assertNotNull(remoteJob.status());
+ assertEquals(createdJob.selfLink(), remoteJob.selfLink());
+ assertEquals(createdJob.userEmail(), remoteJob.userEmail());
+ assertTrue(bigquery.delete(DATASET, sourceTableName));
+ assertTrue(bigquery.delete(DATASET, destinationTableName));
+ }
+
+ @Test
+ public void testCreateAndGetJobWithSelectedFields() throws InterruptedException {
+ String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table";
+ String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table";
+ TableId sourceTable = TableId.of(DATASET, sourceTableName);
+ BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(sourceTableName, createdTableInfo.tableId().table());
+ TableId destinationTable = TableId.of(DATASET, destinationTableName);
+ JobInfo job = CopyJobInfo.of(destinationTable, sourceTable);
+ CopyJobInfo createdJob = (CopyJobInfo) bigquery.create(job,
+ JobOption.fields(BigQuery.JobField.ETAG));
+ assertNotNull(createdJob.jobId());
+ assertNotNull(createdJob.sourceTables());
+ assertNotNull(createdJob.destinationTable());
+ assertNotNull(createdJob.etag());
+ assertNull(createdJob.statistics());
+ assertNull(createdJob.status());
+ assertNull(createdJob.selfLink());
+ assertNull(createdJob.userEmail());
+ CopyJobInfo remoteJob = (CopyJobInfo) bigquery.getJob(createdJob.jobId(),
+ JobOption.fields(BigQuery.JobField.ETAG));
+ assertEquals(createdJob.jobId(), remoteJob.jobId());
+ assertEquals(createdJob.sourceTables(), remoteJob.sourceTables());
+ assertEquals(createdJob.destinationTable(), remoteJob.destinationTable());
+ assertEquals(createdJob.createDisposition(), remoteJob.createDisposition());
+ assertEquals(createdJob.writeDisposition(), remoteJob.writeDisposition());
+ assertNotNull(remoteJob.etag());
+ assertNull(remoteJob.statistics());
+ assertNull(remoteJob.status());
+ assertNull(remoteJob.selfLink());
+ assertNull(remoteJob.userEmail());
+ assertTrue(bigquery.delete(DATASET, sourceTableName));
+ assertTrue(bigquery.delete(DATASET, destinationTableName));
+ }
+
+ @Test
+ public void testCopyJob() throws InterruptedException {
+ String sourceTableName = "test_copy_job_source_table";
+ String destinationTableName = "test_copy_job_destination_table";
+ TableId sourceTable = TableId.of(DATASET, sourceTableName);
+ BaseTableInfo tableInfo = TableInfo.of(sourceTable, SIMPLE_SCHEMA);
+ BaseTableInfo createdTableInfo = bigquery.create(tableInfo);
+ assertNotNull(createdTableInfo);
+ assertEquals(DATASET, createdTableInfo.tableId().dataset());
+ assertEquals(sourceTableName, createdTableInfo.tableId().table());
+ TableId destinationTable = TableId.of(DATASET, destinationTableName);
+ JobInfo job = CopyJobInfo.of(destinationTable, sourceTable);
+ JobInfo remoteJob = bigquery.create(job);
+ while (remoteJob.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ remoteJob = bigquery.getJob(remoteJob.jobId());
+ }
+ assertNull(remoteJob.status().error());
+ BaseTableInfo remoteTableInfo = bigquery.getTable(DATASET, destinationTableName);
+ assertNotNull(remoteTableInfo);
+ assertEquals(destinationTable.dataset(), remoteTableInfo.tableId().dataset());
+ assertEquals(destinationTableName, remoteTableInfo.tableId().table());
+ assertEquals(SIMPLE_SCHEMA, remoteTableInfo.schema());
+ assertTrue(bigquery.delete(DATASET, sourceTableName));
+ assertTrue(bigquery.delete(DATASET, destinationTableName));
+ }
+
+ @Test
+ public void testQueryJob() throws InterruptedException {
+ String tableName = "test_query_job_table";
+ String query = new StringBuilder()
+ .append("SELECT TimestampField, StringField, BooleanField FROM ")
+ .append(TABLE_ID.table())
+ .toString();
+ TableId destinationTable = TableId.of(DATASET, tableName);
+ QueryJobInfo job = QueryJobInfo.builder(query)
+ .defaultDataset(DatasetId.of(DATASET))
+ .destinationTable(destinationTable)
+ .build();
+ JobInfo remoteJob = bigquery.create(job);
+ while (remoteJob.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ remoteJob = bigquery.getJob(remoteJob.jobId());
+ }
+ assertNull(remoteJob.status().error());
+
+ QueryResponse response = bigquery.getQueryResults(remoteJob.jobId());
+ while (!response.jobComplete()) {
+ Thread.sleep(1000);
+ response = bigquery.getQueryResults(response.jobId());
+ }
+ assertFalse(response.hasErrors());
+ assertEquals(QUERY_RESULT_SCHEMA, response.result().schema());
+ int rowCount = 0;
+ for (List row : response.result().values()) {
+ FieldValue timestampCell = row.get(0);
+ FieldValue stringCell = row.get(1);
+ FieldValue booleanCell = row.get(2);
+ assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.attribute());
+ assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.attribute());
+ assertEquals(1408452095220000L, timestampCell.timestampValue());
+ assertEquals("stringValue", stringCell.stringValue());
+ assertEquals(false, booleanCell.booleanValue());
+ rowCount++;
+ }
+ assertEquals(2, rowCount);
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testExtract() throws InterruptedException {
+ String tableName = "test_export_job_table";
+ TableId destinationTable = TableId.of(DATASET, tableName);
+ JobInfo remoteJob = bigquery.create(
+ LoadJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE)
+ .schema(SIMPLE_SCHEMA)
+ .build());
+ while (remoteJob.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ remoteJob = bigquery.getJob(remoteJob.jobId());
+ }
+ assertNull(remoteJob.status().error());
+
+ ExtractJobInfo extractJob =
+ ExtractJobInfo.builder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE)
+ .printHeader(false)
+ .build();
+ remoteJob = bigquery.create(extractJob);
+ while (remoteJob.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ remoteJob = bigquery.getJob(remoteJob.jobId());
+ }
+ assertNull(remoteJob.status().error());
+ assertEquals(CSV_CONTENT,
+ new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8));
+ assertTrue(bigquery.delete(DATASET, tableName));
+ }
+
+ @Test
+ public void testCancelJob() throws InterruptedException {
+ String destinationTableName = "test_cancel_query_job_table";
+ String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.table();
+ TableId destinationTable = TableId.of(DATASET, destinationTableName);
+ QueryJobInfo job = QueryJobInfo.builder(query)
+ .defaultDataset(DatasetId.of(DATASET))
+ .destinationTable(destinationTable)
+ .build();
+ JobInfo remoteJob = bigquery.create(job);
+ assertTrue(bigquery.cancel(remoteJob.jobId()));
+ while (remoteJob.status().state() != JobStatus.State.DONE) {
+ Thread.sleep(1000);
+ remoteJob = bigquery.getJob(remoteJob.jobId());
+ }
+ assertNull(remoteJob.status().error());
+ }
+}
diff --git a/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java
new file mode 100644
index 000000000000..62a88c1860cd
--- /dev/null
+++ b/gcloud-java-bigquery/src/test/java/com/google/gcloud/bigquery/RemoteBigQueryHelperTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2015 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.gcloud.bigquery;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import com.google.gcloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.gcloud.bigquery.testing.RemoteBigQueryHelper;
+
+import org.easymock.EasyMock;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.ByteArrayInputStream;
+import java.io.InputStream;
+import java.util.concurrent.ExecutionException;
+
+public class RemoteBigQueryHelperTest {
+
+ private static final String DATASET_NAME = "dataset-name";
+ private static final String PROJECT_ID = "project-id";
+ private static final String JSON_KEY = "{\n"
+ + " \"private_key_id\": \"somekeyid\",\n"
+ + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS"
+ + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg"
+ + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4"
+ + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2"
+ + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa"
+ + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF"
+ + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL"
+ + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\"
+ + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp"
+ + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF"
+ + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm"
+ + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK"
+ + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF"
+ + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR"
+ + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl"
+ + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa"
+ + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi"
+ + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG"
+ + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk"
+ + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n"
+ + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n"
+ + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n"
+ + " \"type\": \"service_account\"\n"
+ + "}";
+ private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes());
+
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+ @Test
+ public void testForceDelete() throws InterruptedException, ExecutionException {
+ BigQuery bigqueryMock = EasyMock.createMock(BigQuery.class);
+ EasyMock.expect(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents()))
+ .andReturn(true);
+ EasyMock.replay(bigqueryMock);
+ assertTrue(RemoteBigQueryHelper.forceDelete(bigqueryMock, DATASET_NAME));
+ EasyMock.verify(bigqueryMock);
+ }
+
+ @Test
+ public void testCreateFromStream() {
+ RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM);
+ BigQueryOptions options = helper.options();
+ assertEquals(PROJECT_ID, options.projectId());
+ assertEquals(60000, options.connectTimeout());
+ assertEquals(60000, options.readTimeout());
+ assertEquals(10, options.retryParams().retryMaxAttempts());
+ assertEquals(6, options.retryParams().retryMinAttempts());
+ assertEquals(30000, options.retryParams().maxRetryDelayMillis());
+ assertEquals(120000, options.retryParams().totalRetryPeriodMillis());
+ assertEquals(250, options.retryParams().initialRetryDelayMillis());
+ }
+}
diff --git a/pom.xml b/pom.xml
index 5b11a09fb382..8aedae4ec3b3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -71,6 +71,7 @@
gcloud-java-storage
gcloud-java
gcloud-java-examples
+ gcloud-java-bigquery