Skip to content

Commit

Permalink
Bigtable: fix rendering of instance admin snippets. (#7797)
Browse files Browse the repository at this point in the history
Closes #7522.
  • Loading branch information
sangramql authored and tseaver committed May 1, 2019
1 parent f953c55 commit 8a0486e
Show file tree
Hide file tree
Showing 2 changed files with 80 additions and 104 deletions.
69 changes: 25 additions & 44 deletions bigtable/docs/snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,20 +216,18 @@ def test_bigtable_list_clusters_in_project():


def test_bigtable_list_app_profiles():
# [START bigtable_list_app_profiles]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
# [END bigtable_list_app_profiles]

app_profile = instance.app_profile(
app_profile = Config.INSTANCE.app_profile(
app_profile_id="app-prof-" + unique_resource_id("-"),
routing_policy_type=enums.RoutingPolicyType.ANY,
)
app_profile = app_profile.create(ignore_warnings=True)

# [START bigtable_list_app_profiles]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)

app_profiles_list = instance.list_app_profiles()
# [END bigtable_list_app_profiles]
assert len(app_profiles_list) > 0
Expand Down Expand Up @@ -363,22 +361,15 @@ def test_bigtable_delete_cluster():


def test_bigtable_delete_instance():
# [START bigtable_delete_instance]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance_id_to_delete = "inst-my-" + unique_resource_id("-")
# [END bigtable_delete_instance]

cluster_id = "clus-my-" + unique_resource_id("-")

instance = client.instance(
instance_id_to_delete, instance_type=PRODUCTION, labels=LABELS
)
instance = client.instance("inst-my-123", instance_type=PRODUCTION, labels=LABELS)
cluster = instance.cluster(
cluster_id,
"clus-my-123",
location_id=ALT_LOCATION_ID,
serve_nodes=SERVER_NODES,
serve_nodes=1,
default_storage_type=STORAGE_TYPE,
)
operation = instance.create(clusters=[cluster])
Expand All @@ -390,7 +381,12 @@ def test_bigtable_delete_instance():
operation.result(timeout=100)

# [START bigtable_delete_instance]
instance_to_delete = client.instance(instance_id_to_delete)
from google.cloud.bigtable import Client

client = Client(admin=True)

instance_id = "inst-my-123"
instance_to_delete = client.instance(instance_id)
instance_to_delete.delete()
# [END bigtable_delete_instance]

Expand All @@ -412,16 +408,13 @@ def test_bigtable_test_iam_permissions():


def test_bigtable_set_iam_policy_then_get_iam_policy():
service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_set_iam_policy]
from google.cloud.bigtable import Client
from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE

# [END bigtable_set_iam_policy]

service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_set_iam_policy]
client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
instance.reload()
Expand Down Expand Up @@ -490,16 +483,13 @@ def test_bigtable_instance_admin_client():


def test_bigtable_admins_policy():
service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_admins_policy]
from google.cloud.bigtable import Client
from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.policy import BIGTABLE_ADMIN_ROLE

# [END bigtable_admins_policy]

service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_admins_policy]
client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
instance.reload()
Expand All @@ -514,16 +504,13 @@ def test_bigtable_admins_policy():


def test_bigtable_readers_policy():
service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_readers_policy]
from google.cloud.bigtable import Client
from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.policy import BIGTABLE_READER_ROLE

# [END bigtable_readers_policy]

service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_readers_policy]
client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
instance.reload()
Expand All @@ -538,16 +525,13 @@ def test_bigtable_readers_policy():


def test_bigtable_users_policy():
service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_users_policy]
from google.cloud.bigtable import Client
from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.policy import BIGTABLE_USER_ROLE

# [END bigtable_users_policy]

service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_users_policy]
client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
instance.reload()
Expand All @@ -562,16 +546,13 @@ def test_bigtable_users_policy():


def test_bigtable_viewers_policy():
service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_viewers_policy]
from google.cloud.bigtable import Client
from google.cloud.bigtable.policy import Policy
from google.cloud.bigtable.policy import BIGTABLE_VIEWER_ROLE

# [END bigtable_viewers_policy]

service_account_email = Config.CLIENT._credentials.service_account_email

# [START bigtable_viewers_policy]
client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
instance.reload()
Expand Down
115 changes: 55 additions & 60 deletions bigtable/docs/snippets_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,17 +131,18 @@ def test_bigtable_create_table():


def test_bigtable_sample_row_keys():
table_sample = Config.INSTANCE.table("table_id1_samplerow")
initial_split_keys = [b"split_key_1", b"split_key_10", b"split_key_20"]
table_sample.create(initial_split_keys=initial_split_keys)
assert table_sample.exists()

# [START bigtable_sample_row_keys]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)

table = instance.table("table_id1_samplerow")
# [END bigtable_sample_row_keys]
initial_split_keys = [b"split_key_1", b"split_key_10", b"split_key_20"]
table.create(initial_split_keys=initial_split_keys)
# [START bigtable_sample_row_keys]
data = table.sample_row_keys()
actual_keys, offset = zip(*[(rk.row_key, rk.offset_bytes) for rk in data])
# [END bigtable_sample_row_keys]
Expand Down Expand Up @@ -178,7 +179,7 @@ def test_bigtable_write_read_drop_truncate():
response = table.mutate_rows(rows)
# validate that all rows written successfully
for i, status in enumerate(response):
if status.code is not 0:
if status.code != 0:
print("Row number {} failed to write".format(i))
# [END bigtable_mutate_rows]
assert len(response) == len(rows)
Expand Down Expand Up @@ -304,7 +305,7 @@ def test_bigtable_list_tables():
instance = client.instance(INSTANCE_ID)
tables_list = instance.list_tables()
# [END bigtable_list_tables]
assert len(tables_list) is not 0
assert len(tables_list) != 0


def test_bigtable_table_name():
Expand Down Expand Up @@ -368,18 +369,17 @@ def test_bigtable_table_exists():


def test_bigtable_delete_table():
table_del = Config.INSTANCE.table("table_id_del")
table_del.create()
assert table_del.exists()

# [START bigtable_delete_table]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table("table_id_del")
# [END bigtable_delete_table]

table.create()
assert table.exists()

# [START bigtable_delete_table]
table.delete()
# [END bigtable_delete_table]
assert not table.exists()
Expand Down Expand Up @@ -906,6 +906,15 @@ def test_bigtable_row_setcell_rowkey():


def test_bigtable_row_delete():
table_row_del = Config.INSTANCE.table(TABLE_ID)
row_obj = table_row_del.row(b"row_key_1")
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val")
row_obj.commit()
actual_rows_keys = []
for row in table_row_del.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [b"row_key_1"]

# [START bigtable_row_delete]
from google.cloud.bigtable import Client

Expand All @@ -915,16 +924,7 @@ def test_bigtable_row_delete():

row_key = b"row_key_1"
row_obj = table.row(row_key)
# [END bigtable_row_delete]

row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val")
row_obj.commit()
actual_rows_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key]

# [START bigtable_row_delete]
row_obj.delete()
row_obj.commit()
# [END bigtable_row_delete]
Expand All @@ -936,76 +936,76 @@ def test_bigtable_row_delete():


def test_bigtable_row_delete_cell():
# [START bigtable_row_delete_cell]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

table_row_del_cell = Config.INSTANCE.table(TABLE_ID)
row_key1 = b"row_key_1"
row_obj = table.row(row_key1)
# [END bigtable_row_delete_cell]

row_obj = table_row_del_cell.row(row_key1)
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row_obj.commit()

row_key2 = b"row_key_2"
row_obj = table.row(row_key2)
row_obj.set_cell(COLUMN_FAMILY_ID2, COL_NAME2, CELL_VAL2)
row_obj.commit()

actual_rows_keys = []
for row in table.read_rows():
for row in table_row_del_cell.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key1, row_key2]
assert actual_rows_keys == [row_key1]

# [START bigtable_row_delete_cell]
row_obj.delete_cell(COLUMN_FAMILY_ID2, COL_NAME2)
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

row_key = b"row_key_1"
row_obj = table.row(row_key)

row_obj.delete_cell(COLUMN_FAMILY_ID, COL_NAME1)
row_obj.commit()
# [END bigtable_row_delete_cell]

actual_rows_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key1]
table.truncate(timeout=300)
assert not row.row_key


def test_bigtable_row_delete_cells():
# [START bigtable_row_delete_cells]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

table_row_del_cells = Config.INSTANCE.table(TABLE_ID)
row_key1 = b"row_key_1"
row_obj = table.row(row_key1)
# [END bigtable_row_delete_cells]
row_obj = table_row_del_cells.row(row_key1)

row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, CELL_VAL1)
row_obj.commit()
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME2, CELL_VAL2)
row_obj.commit()

actual_rows_keys = []
for row in table.read_rows():
for row in table_row_del_cells.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == [row_key1]

# [START bigtable_row_delete_cells]
from google.cloud.bigtable import Client

client = Client(admin=True)
instance = client.instance(INSTANCE_ID)
table = instance.table(TABLE_ID)

row_key = b"row_key_1"
row_obj = table.row(row_key)

row_obj.delete_cells(COLUMN_FAMILY_ID, [COL_NAME1, COL_NAME2])
row_obj.commit()
# [END bigtable_row_delete_cells]

actual_rows_keys = []
for row in table.read_rows():
actual_rows_keys.append(row.row_key)
assert actual_rows_keys == []
assert not row.row_key


def test_bigtable_row_clear():
table_row_clear = Config.INSTANCE.table(TABLE_ID)
row_obj = table_row_clear.row(b"row_key_1")
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val")

mutation_size = row_obj.get_mutations_size()
assert mutation_size > 0

# [START bigtable_row_clear]
from google.cloud.bigtable import Client

Expand All @@ -1016,12 +1016,7 @@ def test_bigtable_row_clear():
row_key = b"row_key_1"
row_obj = table.row(row_key)
row_obj.set_cell(COLUMN_FAMILY_ID, COL_NAME1, b"cell-val")
# [END bigtable_row_clear]

mutation_size = row_obj.get_mutations_size()
assert mutation_size > 0

# [START bigtable_row_clear]
row_obj.clear()
# [END bigtable_row_clear]

Expand Down

0 comments on commit 8a0486e

Please sign in to comment.