Skip to content

Commit

Permalink
Remove unused BigQuery append samples (#6100)
Browse files Browse the repository at this point in the history
  • Loading branch information
alixhami authored Sep 25, 2018
1 parent 48ae436 commit 7222def
Showing 1 changed file with 0 additions and 79 deletions.
79 changes: 0 additions & 79 deletions bigquery/docs/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -1512,85 +1512,6 @@ def test_load_table_from_uri_autodetect(client, to_delete, capsys):
assert 'Loaded 50 rows.' in out


def test_load_table_from_uri_append(client, to_delete, capsys):
"""Appends data to a table from a GCS URI using various formats
Each file format has its own tested load from URI sample. Because most of
the code is common for autodetect, append, and truncate, this sample
includes snippets for all supported formats but only calls a single load
job.
This code snippet is made up of shared code, then format-specific code,
followed by more shared code. Note that only the last format in the
format-specific code section will be tested in this test.
"""
dataset_id = 'load_table_dataset_{}'.format(_millis())
dataset = bigquery.Dataset(client.dataset(dataset_id))
client.create_dataset(dataset)
to_delete.append(dataset)

job_config = bigquery.LoadJobConfig()
job_config.schema = [
bigquery.SchemaField('name', 'STRING'),
bigquery.SchemaField('post_abbr', 'STRING')
]
table_ref = dataset.table('us_states')
body = six.BytesIO(b'Washington,WA')
client.load_table_from_file(
body, table_ref, job_config=job_config).result()

# SHared code
# [START bigquery_load_table_gcs_csv_append]
# [START bigquery_load_table_gcs_json_append]
# from google.cloud import bigquery
# client = bigquery.Client()
# table_ref = client.dataset('my_dataset').table('existing_table')

previous_rows = client.get_table(table_ref).num_rows
assert previous_rows > 0

job_config = bigquery.LoadJobConfig()
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
# [END bigquery_load_table_gcs_csv_append]
# [END bigquery_load_table_gcs_json_append]

# Format-specific code
# [START bigquery_load_table_gcs_csv_append]
job_config.skip_leading_rows = 1
# The source format defaults to CSV, so the line below is optional.
job_config.source_format = bigquery.SourceFormat.CSV
uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.csv'
# [END bigquery_load_table_gcs_csv_append]
# unset csv-specific attribute
del job_config._properties['load']['skipLeadingRows']

# [START bigquery_load_table_gcs_json_append]
job_config.source_format = bigquery.SourceFormat.NEWLINE_DELIMITED_JSON
uri = 'gs://cloud-samples-data/bigquery/us-states/us-states.json'
# [END bigquery_load_table_gcs_json_append]

# Shared code
# [START bigquery_load_table_gcs_csv_append]
# [START bigquery_load_table_gcs_json_append]
load_job = client.load_table_from_uri(
uri,
table_ref,
job_config=job_config) # API request
print('Starting job {}'.format(load_job.job_id))

load_job.result() # Waits for table load to complete.
print('Job finished.')

destination_table = client.get_table(table_ref)
print('Loaded {} rows.'.format(destination_table.num_rows - previous_rows))
# [END bigquery_load_table_gcs_csv_append]
# [END bigquery_load_table_gcs_json_append]

out, _ = capsys.readouterr()
assert previous_rows == 1
assert 'Loaded 50 rows.' in out


def test_load_table_from_uri_truncate(client, to_delete, capsys):
"""Replaces table data with data from a GCS URI using various formats
Expand Down

0 comments on commit 7222def

Please sign in to comment.