From 8ba5fd15e0c22463b20573257f734e98815e1410 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 19 Jul 2019 13:53:40 -0700 Subject: [PATCH 001/105] Add generated samples (via synth). (#8710) --- .../v1beta1/datacatalog_lookup_entry.py | 71 +++++++++++++++++++ .../datacatalog_lookup_entry_sql_resource.py | 70 ++++++++++++++++++ 2 files changed, 141 insertions(+) create mode 100644 datacatalog/v1beta1/datacatalog_lookup_entry.py create mode 100644 datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/datacatalog_lookup_entry.py new file mode 100644 index 000000000000..189b4cb8edca --- /dev/null +++ b/datacatalog/v1beta1/datacatalog_lookup_entry.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_lookup_entry") + +# To install the latest published package dependency, execute the following: +# pip install google-cloud-datacatalog + +# sample-metadata +# title: +# description: Lookup Entry +# usage: python3 samples/v1beta1/datacatalog_lookup_entry.py [--resource_name "[Full Resource Name]"] +import sys + +# [START datacatalog_lookup_entry] + +from google.cloud import datacatalog_v1beta1 +from google.cloud.datacatalog_v1beta1 import enums + + +def sample_lookup_entry(resource_name): + """ + Lookup Entry + + Args: + resource_name The full name of the Google Cloud Platform resource the Data + Catalog entry represents. + See: https://cloud.google.com/apis/design/resource_names#full_resource_name + Examples: + //bigquery.googleapis.com/projects/bigquery-public-data/datasets/new_york_taxi_trips/tables/taxi_zone_geom + //pubsub.googleapis.com/projects/pubsub-public-data/topics/taxirides-realtime + """ + + client = datacatalog_v1beta1.DataCatalogClient() + + # resource_name = '[Full Resource Name]' + response = client.lookup_entry(linked_resource=resource_name) + entry = response + print(u"Entry name: {}".format(entry.name)) + print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Linked resource: {}".format(entry.linked_resource)) + + +# [END datacatalog_lookup_entry] + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--resource_name", type=str, default="[Full Resource Name]") + args = parser.parse_args() + + sample_lookup_entry(args.resource_name) + + +if __name__ == "__main__": + main() diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py new file mode 100644 index 000000000000..f2928aafcaf8 --- /dev/null +++ b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_lookup_entry_sql_resource") + +# To install the latest published package dependency, execute the following: +# pip install google-cloud-datacatalog + +# sample-metadata +# title: +# description: Lookup Entry using SQL resource +# usage: python3 samples/v1beta1/datacatalog_lookup_entry_sql_resource.py [--sql_name "[SQL Resource Name]"] +import sys + +# [START datacatalog_lookup_entry_sql_resource] + +from google.cloud import datacatalog_v1beta1 +from google.cloud.datacatalog_v1beta1 import enums + + +def sample_lookup_entry(sql_name): + """ + Lookup Entry using SQL resource + + Args: + sql_name The SQL name of the Google Cloud Platform resource the Data Catalog + entry represents. + Examples: + bigquery.table.`bigquery-public-data`.new_york_taxi_trips.taxi_zone_geom + pubsub.topic.`pubsub-public-data`.`taxirides-realtime` + """ + + client = datacatalog_v1beta1.DataCatalogClient() + + # sql_name = '[SQL Resource Name]' + response = client.lookup_entry(sql_resource=sql_name) + entry = response + print(u"Entry name: {}".format(entry.name)) + print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Linked resource: {}".format(entry.linked_resource)) + + +# [END datacatalog_lookup_entry_sql_resource] + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--sql_name", type=str, default="[SQL Resource Name]") + args = parser.parse_args() + + sample_lookup_entry(args.sql_name) + + +if __name__ == "__main__": + main() From cc0bec4012c1c0c912eb6086754a1edddf05a212 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 22 Jul 2019 10:32:52 -0700 Subject: [PATCH 002/105] Add get_entry sample (via synth). (#8725) --- datacatalog/v1beta1/datacatalog_get_entry.py | 79 ++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 datacatalog/v1beta1/datacatalog_get_entry.py diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/datacatalog_get_entry.py new file mode 100644 index 000000000000..5b5775bd7a96 --- /dev/null +++ b/datacatalog/v1beta1/datacatalog_get_entry.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_get_entry") + +# To install the latest published package dependency, execute the following: +# pip install google-cloud-datacatalog + +# sample-metadata +# title: +# description: Get Entry +# usage: python3 samples/v1beta1/datacatalog_get_entry.py [--project_id "[Google Cloud Project ID]"] [--location_id "[Google Cloud Location ID]"] [--entry_group_id "[Entry Group ID]"] [--entry_id "[Entry ID]"] +import sys + +# [START datacatalog_get_entry] + +from google.cloud import datacatalog_v1beta1 +from google.cloud.datacatalog_v1beta1 import enums + + +def sample_get_entry(project_id, location_id, entry_group_id, entry_id): + """ + Get Entry + + Args: + project_id Your Google Cloud project ID + location_id Google Cloud region, e.g. us-central1 + entry_group_id ID of the Entry Group, e.g. @bigquery, @pubsub, my_entry_group + entry_id ID of the Entry + """ + + client = datacatalog_v1beta1.DataCatalogClient() + + # project_id = '[Google Cloud Project ID]' + # location_id = '[Google Cloud Location ID]' + # entry_group_id = '[Entry Group ID]' + # entry_id = '[Entry ID]' + name = client.entry_path(project_id, location_id, entry_group_id, entry_id) + + response = client.get_entry(name) + entry = response + print(u"Entry name: {}".format(entry.name)) + print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Linked resource: {}".format(entry.linked_resource)) + + +# [END datacatalog_get_entry] + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", type=str, default="[Google Cloud Project ID]") + parser.add_argument("--location_id", type=str, default="[Google Cloud Location ID]") + parser.add_argument("--entry_group_id", type=str, default="[Entry Group ID]") + parser.add_argument("--entry_id", type=str, default="[Entry ID]") + args = parser.parse_args() + + sample_get_entry( + args.project_id, args.location_id, args.entry_group_id, args.entry_id + ) + + +if __name__ == "__main__": + main() From f195ada0a6520a5e4b9b5c2437b1ab2c0af57bc1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 26 Jul 2019 09:59:15 -0700 Subject: [PATCH 003/105] Add 'search' sample (via synth). (#8793) --- datacatalog/v1beta1/datacatalog_search.py | 92 +++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 datacatalog/v1beta1/datacatalog_search.py diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/datacatalog_search.py new file mode 100644 index 000000000000..928b69f52fc3 --- /dev/null +++ b/datacatalog/v1beta1/datacatalog_search.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# DO NOT EDIT! This is a generated sample ("RequestPagedAll", "datacatalog_search") + +# To install the latest published package dependency, execute the following: +# pip install google-cloud-datacatalog + +# sample-metadata +# title: +# description: Search Catalog +# usage: python3 samples/v1beta1/datacatalog_search.py [--include_project_id "[Google Cloud Project ID]"] [--include_gcp_public_datasets false] [--query "[String in search query syntax]"] +import sys + +# [START datacatalog_search] + +from google.cloud import datacatalog_v1beta1 +from google.cloud.datacatalog_v1beta1 import enums + + +def sample_search_catalog(include_project_id, include_gcp_public_datasets, query): + """ + Search Catalog + + Args: + include_project_id Your Google Cloud project ID. + include_gcp_public_datasets If true, include Google Cloud Platform (GCP) public + datasets in the search results. + query Your query string. + See: https://cloud.google.com/data-catalog/docs/how-to/search-reference + Example: system=bigquery type=dataset + """ + + client = datacatalog_v1beta1.DataCatalogClient() + + # include_project_id = '[Google Cloud Project ID]' + # include_gcp_public_datasets = False + # query = '[String in search query syntax]' + include_project_ids = [include_project_id] + scope = { + "include_project_ids": include_project_ids, + "include_gcp_public_datasets": include_gcp_public_datasets, + } + + # Iterate over all results + for response_item in client.search_catalog(scope, query): + print( + u"Result type: {}".format( + enums.SearchResultType(response_item.search_result_type).name + ) + ) + print(u"Result subtype: {}".format(response_item.search_result_subtype)) + print( + u"Relative resource name: {}".format(response_item.relative_resource_name) + ) + print(u"Linked resource: {}\n".format(response_item.linked_resource)) + + +# [END datacatalog_search] + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument( + "--include_project_id", type=str, default="[Google Cloud Project ID]" + ) + parser.add_argument("--include_gcp_public_datasets", type=bool, default=False) + parser.add_argument("--query", type=str, default="[String in search query syntax]") + args = parser.parse_args() + + sample_search_catalog( + args.include_project_id, args.include_gcp_public_datasets, args.query + ) + + +if __name__ == "__main__": + main() From 6d5b3cf9585a890e86341050b1ac52084c61f6d4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 27 Aug 2019 09:26:37 -0700 Subject: [PATCH 004/105] Remove unused import from samples (via synth). (#9110) --- datacatalog/v1beta1/datacatalog_get_entry.py | 2 -- datacatalog/v1beta1/datacatalog_lookup_entry.py | 2 -- datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py | 2 -- datacatalog/v1beta1/datacatalog_search.py | 2 -- 4 files changed, 8 deletions(-) diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/datacatalog_get_entry.py index 5b5775bd7a96..97a4a92c8106 100644 --- a/datacatalog/v1beta1/datacatalog_get_entry.py +++ b/datacatalog/v1beta1/datacatalog_get_entry.py @@ -23,10 +23,8 @@ # title: # description: Get Entry # usage: python3 samples/v1beta1/datacatalog_get_entry.py [--project_id "[Google Cloud Project ID]"] [--location_id "[Google Cloud Location ID]"] [--entry_group_id "[Entry Group ID]"] [--entry_id "[Entry ID]"] -import sys # [START datacatalog_get_entry] - from google.cloud import datacatalog_v1beta1 from google.cloud.datacatalog_v1beta1 import enums diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/datacatalog_lookup_entry.py index 189b4cb8edca..42c3c6ff4ce8 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry.py @@ -23,10 +23,8 @@ # title: # description: Lookup Entry # usage: python3 samples/v1beta1/datacatalog_lookup_entry.py [--resource_name "[Full Resource Name]"] -import sys # [START datacatalog_lookup_entry] - from google.cloud import datacatalog_v1beta1 from google.cloud.datacatalog_v1beta1 import enums diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py index f2928aafcaf8..1b19a57f8189 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py @@ -23,10 +23,8 @@ # title: # description: Lookup Entry using SQL resource # usage: python3 samples/v1beta1/datacatalog_lookup_entry_sql_resource.py [--sql_name "[SQL Resource Name]"] -import sys # [START datacatalog_lookup_entry_sql_resource] - from google.cloud import datacatalog_v1beta1 from google.cloud.datacatalog_v1beta1 import enums diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/datacatalog_search.py index 928b69f52fc3..a49107de520f 100644 --- a/datacatalog/v1beta1/datacatalog_search.py +++ b/datacatalog/v1beta1/datacatalog_search.py @@ -23,10 +23,8 @@ # title: # description: Search Catalog # usage: python3 samples/v1beta1/datacatalog_search.py [--include_project_id "[Google Cloud Project ID]"] [--include_gcp_public_datasets false] [--query "[String in search query syntax]"] -import sys # [START datacatalog_search] - from google.cloud import datacatalog_v1beta1 from google.cloud.datacatalog_v1beta1 import enums From 810a09b904bb7d35ea2cb5f053bfef7110a8bb30 Mon Sep 17 00:00:00 2001 From: Ricardo Mendes Date: Fri, 1 Nov 2019 20:09:44 -0300 Subject: [PATCH 005/105] feat(datacatalog): add sample to create an entry group (#9584) * feat(datacatalog): add sample to create an entry group Closes #9583 * feat(datacatalog): add sample to create an entry group Fix pytest fixtures and synth.py. --- datacatalog/__init__.py | 0 datacatalog/tests/__init__.py | 0 datacatalog/tests/conftest.py | 55 ++++++++++++++++++++ datacatalog/tests/test_create_entry_group.py | 29 +++++++++++ datacatalog/v1beta1/__init__.py | 0 datacatalog/v1beta1/create_entry_group.py | 54 +++++++++++++++++++ 6 files changed, 138 insertions(+) create mode 100644 datacatalog/__init__.py create mode 100644 datacatalog/tests/__init__.py create mode 100644 datacatalog/tests/conftest.py create mode 100644 datacatalog/tests/test_create_entry_group.py create mode 100644 datacatalog/v1beta1/__init__.py create mode 100644 datacatalog/v1beta1/create_entry_group.py diff --git a/datacatalog/__init__.py b/datacatalog/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/tests/__init__.py b/datacatalog/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/tests/conftest.py b/datacatalog/tests/conftest.py new file mode 100644 index 000000000000..b147413db588 --- /dev/null +++ b/datacatalog/tests/conftest.py @@ -0,0 +1,55 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import uuid + +import pytest + +import google.auth +from google.cloud import datacatalog_v1beta1 + + +@pytest.fixture(scope="session") +def client(credentials): + return datacatalog_v1beta1.DataCatalogClient(credentials=credentials) + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default() + + +@pytest.fixture(scope="session") +def credentials(default_credentials): + return default_credentials[0] + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + return default_credentials[1] + + +@pytest.fixture +def random_entry_group_id(client, project_id): + now = datetime.datetime.now() + random_entry_group_id = "example_entry_group_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + yield random_entry_group_id + entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( + project_id, "us-central1", random_entry_group_id + ) + client.delete_entry_group(entry_group_name) diff --git a/datacatalog/tests/test_create_entry_group.py b/datacatalog/tests/test_create_entry_group.py new file mode 100644 index 000000000000..9c8c33b8cd64 --- /dev/null +++ b/datacatalog/tests/test_create_entry_group.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from ..v1beta1 import create_entry_group + + +def test_create_entry_group(capsys, client, project_id, random_entry_group_id): + + create_entry_group.create_entry_group(client, project_id, random_entry_group_id) + out, err = capsys.readouterr() + assert ( + "Created entry group" + " projects/{}/locations/{}/entryGroups/{}".format( + project_id, "us-central1", random_entry_group_id + ) + in out + ) diff --git a/datacatalog/v1beta1/__init__.py b/datacatalog/v1beta1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/v1beta1/create_entry_group.py b/datacatalog/v1beta1/create_entry_group.py new file mode 100644 index 000000000000..24a856d8739c --- /dev/null +++ b/datacatalog/v1beta1/create_entry_group.py @@ -0,0 +1,54 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_entry_group(client, project_id, entry_group_id): + + # [START datacatalog_create_entry_group_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # project_id = "your-project-id" + + # TODO(developer): Specify the geographic location where the + # entry group should reside. + # Currently, Data Catalog stores metadata in the us-central1 region. + location_id = "us-central1" + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # entry_group_id = "your_entry_group_id" + + # Construct a full location path to be the parent of the entry group. + parent = datacatalog_v1beta1.DataCatalogClient.location_path( + project_id, location_id + ) + + # Construct a full EntryGroup object to send to the API. + entry_group = datacatalog_v1beta1.types.EntryGroup() + entry_group.display_name = "My Entry Group" + entry_group.description = "This Entry Group consists of ..." + + # Send the entry group to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry Group + # already exists within the project. + entry_group = client.create_entry_group( + parent, entry_group_id, entry_group + ) # Make an API request. + print("Created entry group {}".format(entry_group.name)) + # [END datacatalog_create_entry_group_tag] From c2ab241880231cb5829ea19eee3ee85466f58f16 Mon Sep 17 00:00:00 2001 From: Ricardo Mendes Date: Mon, 4 Nov 2019 21:35:51 -0300 Subject: [PATCH 006/105] feat(datacatalog): add sample to create a fileset entry (#9590) Fixes #9589 --- datacatalog/tests/conftest.py | 26 ++++++ .../tests/test_create_fileset_entry.py | 30 +++++++ datacatalog/v1beta1/create_fileset_entry.py | 86 +++++++++++++++++++ 3 files changed, 142 insertions(+) create mode 100644 datacatalog/tests/test_create_fileset_entry.py create mode 100644 datacatalog/v1beta1/create_fileset_entry.py diff --git a/datacatalog/tests/conftest.py b/datacatalog/tests/conftest.py index b147413db588..b0669fa0df28 100644 --- a/datacatalog/tests/conftest.py +++ b/datacatalog/tests/conftest.py @@ -53,3 +53,29 @@ def random_entry_group_id(client, project_id): project_id, "us-central1", random_entry_group_id ) client.delete_entry_group(entry_group_name) + + +@pytest.fixture +def random_entry_name(client, entry_group_name): + now = datetime.datetime.now() + random_entry_id = "example_entry_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) + yield random_entry_name + client.delete_entry(random_entry_name) + + +@pytest.fixture +def entry_group_name(client, project_id): + now = datetime.datetime.now() + entry_group_id = "python_entry_group_sample_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + entry_group = client.create_entry_group( + datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), + entry_group_id, + {}, + ) + yield entry_group.name + client.delete_entry_group(entry_group.name) diff --git a/datacatalog/tests/test_create_fileset_entry.py b/datacatalog/tests/test_create_fileset_entry.py new file mode 100644 index 000000000000..8d0bc28fd07f --- /dev/null +++ b/datacatalog/tests/test_create_fileset_entry.py @@ -0,0 +1,30 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import re + +from ..v1beta1 import create_fileset_entry + + +def test_create_fileset_entry(capsys, client, random_entry_name): + + entry_name_pattern = "(?P.+?)/entries/(?P.+?$)" + entry_name_matches = re.match(entry_name_pattern, random_entry_name) + entry_group_name = entry_name_matches.group("entry_group_name") + entry_id = entry_name_matches.group("entry_id") + + create_fileset_entry.create_fileset_entry(client, entry_group_name, entry_id) + out, err = capsys.readouterr() + assert "Created entry {}".format(random_entry_name) in out diff --git a/datacatalog/v1beta1/create_fileset_entry.py b/datacatalog/v1beta1/create_fileset_entry.py new file mode 100644 index 000000000000..6cc275655988 --- /dev/null +++ b/datacatalog/v1beta1/create_fileset_entry.py @@ -0,0 +1,86 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_fileset_entry(client, entry_group_name, entry_id): + + # [START datacatalog_create_fileset_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_name to the Name of the entry group + # the entry will belong. + # entry_group_name = "your_entry_group_name" + + # TODO(developer): Set entry_id to the ID of the entry to create. + # entry_id = "your_entry_id" + + # Construct a full Entry object to send to the API. + entry = datacatalog_v1beta1.types.Entry() + entry.display_name = "My Fileset" + entry.description = "This Fileset consists of ..." + entry.gcs_fileset_spec.file_patterns.append("gs://my_bucket/*") + entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + + # Create the Schema, for example when you have a csv file. + columns = [] + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="first_name", + description="First name", + mode="REQUIRED", + type="STRING", + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="last_name", description="Last name", mode="REQUIRED", type="STRING" + ) + ) + + # Create sub columns for the addresses parent column + subcolumns = [] + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="city", description="City", mode="NULLABLE", type="STRING" + ) + ) + + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="state", description="State", mode="NULLABLE", type="STRING" + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="addresses", + description="Addresses", + mode="REPEATED", + subcolumns=subcolumns, + type="RECORD", + ) + ) + + entry.schema.columns.extend(columns) + + # Send the entry to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry already + # exists within the project. + entry = client.create_entry(entry_group_name, entry_id, entry) + print("Created entry {}".format(entry.name)) + # [END datacatalog_create_fileset_tag] From b99022ba28a805ea7a37332361fdd589245b35b3 Mon Sep 17 00:00:00 2001 From: Marcelo Costa Date: Wed, 18 Dec 2019 15:17:34 -0300 Subject: [PATCH 007/105] feat(datacatalog): add sample for create a fileset entry quickstart (#9977) --- datacatalog/quickstart/__init__.py | 0 .../create_fileset_entry_quickstart.py | 115 ++++++++++++++++++ datacatalog/tests/conftest.py | 13 ++ datacatalog/tests/quickstart/__init__.py | 0 .../test_create_fileset_entry_quickstart.py | 40 ++++++ 5 files changed, 168 insertions(+) create mode 100644 datacatalog/quickstart/__init__.py create mode 100644 datacatalog/quickstart/create_fileset_entry_quickstart.py create mode 100644 datacatalog/tests/quickstart/__init__.py create mode 100644 datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py diff --git a/datacatalog/quickstart/__init__.py b/datacatalog/quickstart/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/quickstart/create_fileset_entry_quickstart.py b/datacatalog/quickstart/create_fileset_entry_quickstart.py new file mode 100644 index 000000000000..55b0af59e689 --- /dev/null +++ b/datacatalog/quickstart/create_fileset_entry_quickstart.py @@ -0,0 +1,115 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id): + + # [START datacatalog_create_fileset_quickstart_tag] + # Import required modules. + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set project_id to your + # Google Cloud Platform project ID the entry will belong. + # project_id = "your-project-id" + + # TODO(developer): Specify the geographic location where the + # entry should reside. + # Currently, Data Catalog stores metadata in the us-central1 region. + location_id = "us-central1" + + # TODO(developer): Set entry_group_id to the ID of the entry group + # the entry will belong. + # entry_group_id = "your_entry_group_id" + + # TODO(developer): Set entry_id to the ID of the entry to create. + # entry_id = "your_entry_id" + + # Create an Entry Group. + # Construct a full Entry Group object to send to the API. + entry_group_obj = datacatalog_v1beta1.types.EntryGroup() + entry_group_obj.display_name = "My Fileset Entry Group" + entry_group_obj.description = "This Entry Group consists of ...." + + # Send the Entry Group to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry Group + # already exists within the project. + entry_group = client.create_entry_group( + parent=datacatalog_v1beta1.DataCatalogClient.location_path( + project_id, location_id + ), + entry_group_id=entry_group_id, + entry_group=entry_group_obj, + ) + print("Created entry group {}".format(entry_group.name)) + + # Create a Fileset Entry. + # Construct a full Entry object to send to the API. + entry = datacatalog_v1beta1.types.Entry() + entry.display_name = "My Fileset" + entry.description = "This Fileset consists of ..." + entry.gcs_fileset_spec.file_patterns.append("gs://cloud-samples-data/*") + entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + + # Create the Schema, for example when you have a csv file. + columns = [] + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="first_name", + description="First name", + mode="REQUIRED", + type="STRING", + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="last_name", description="Last name", mode="REQUIRED", type="STRING" + ) + ) + + # Create sub columns for the addresses parent column + subcolumns = [] + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="city", description="City", mode="NULLABLE", type="STRING" + ) + ) + + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="state", description="State", mode="NULLABLE", type="STRING" + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="addresses", + description="Addresses", + mode="REPEATED", + subcolumns=subcolumns, + type="RECORD", + ) + ) + + entry.schema.columns.extend(columns) + + # Send the entry to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry already + # exists within the project. + entry = client.create_entry(entry_group.name, entry_id, entry) + print("Created entry {}".format(entry.name)) + # [END datacatalog_create_fileset_quickstart_tag] diff --git a/datacatalog/tests/conftest.py b/datacatalog/tests/conftest.py index b0669fa0df28..75e6753ff446 100644 --- a/datacatalog/tests/conftest.py +++ b/datacatalog/tests/conftest.py @@ -42,6 +42,19 @@ def project_id(default_credentials): return default_credentials[1] +@pytest.fixture +def random_entry_id(client, project_id, random_entry_group_id): + now = datetime.datetime.now() + random_entry_id = "example_entry_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + yield random_entry_id + entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( + project_id, "us-central1", random_entry_group_id, random_entry_id + ) + client.delete_entry(entry_name) + + @pytest.fixture def random_entry_group_id(client, project_id): now = datetime.datetime.now() diff --git a/datacatalog/tests/quickstart/__init__.py b/datacatalog/tests/quickstart/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py b/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py new file mode 100644 index 000000000000..769d034fac4a --- /dev/null +++ b/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py @@ -0,0 +1,40 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import datacatalog_v1beta1 + +from ...quickstart import create_fileset_entry_quickstart + + +def test_create_fileset_entry_quickstart( + capsys, client, project_id, random_entry_group_id, random_entry_id +): + + create_fileset_entry_quickstart.create_fileset_entry_quickstart( + client, project_id, random_entry_group_id, random_entry_id + ) + out, err = capsys.readouterr() + assert ( + "Created entry group" + " projects/{}/locations/{}/entryGroups/{}".format( + project_id, "us-central1", random_entry_group_id + ) + in out + ) + + expected_entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( + project_id, "us-central1", random_entry_group_id, random_entry_id + ) + + assert "Created entry {}".format(expected_entry_name) in out From 305260a38167cd7ad6b94efb661b5bc6d17fe4bc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 29 Jan 2020 11:22:06 -0800 Subject: [PATCH 008/105] feat(datacatalog): undeprecate resource name helper methods, bump copyright year to 2020, tweak docstring formatting (via synth) (#10228) --- datacatalog/v1beta1/datacatalog_get_entry.py | 2 +- datacatalog/v1beta1/datacatalog_lookup_entry.py | 2 +- datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py | 2 +- datacatalog/v1beta1/datacatalog_search.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/datacatalog_get_entry.py index 97a4a92c8106..fcd8b2096c7e 100644 --- a/datacatalog/v1beta1/datacatalog_get_entry.py +++ b/datacatalog/v1beta1/datacatalog_get_entry.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/datacatalog_lookup_entry.py index 42c3c6ff4ce8..7920df16bf2f 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py index 1b19a57f8189..9656759ef4bd 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/datacatalog_search.py index a49107de520f..c4c1798c1cc9 100644 --- a/datacatalog/v1beta1/datacatalog_search.py +++ b/datacatalog/v1beta1/datacatalog_search.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 0092efcc9ffc5b8ecffd33c5c23e4cb860bab354 Mon Sep 17 00:00:00 2001 From: Ricardo Mendes <50331050+ricardosm-cit@users.noreply.github.com> Date: Fri, 10 May 2019 19:26:19 -0300 Subject: [PATCH 009/105] Add samples for Data Catalog lookup_entry [(#2148)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2148) * Add samples for Data Catalog lookup_entry * Add tests for Data Catalog lookup_entry * Add samples for lookup_entry by SQL Resource * Add README.rst * Improve command line interface * Removed the "lookup-" prefix from commands * Handle the --sql-resource optional argument by subparsers * Refer to GCP public assets in tests --- datacatalog/snippets/README.rst | 121 +++++++++++++++++ datacatalog/snippets/README.rst.in | 23 ++++ datacatalog/snippets/lookup_entry.py | 150 ++++++++++++++++++++++ datacatalog/snippets/lookup_entry_test.py | 53 ++++++++ datacatalog/snippets/requirements.txt | 1 + 5 files changed, 348 insertions(+) create mode 100644 datacatalog/snippets/README.rst create mode 100644 datacatalog/snippets/README.rst.in create mode 100644 datacatalog/snippets/lookup_entry.py create mode 100644 datacatalog/snippets/lookup_entry_test.py create mode 100644 datacatalog/snippets/requirements.txt diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst new file mode 100644 index 000000000000..ba214203558c --- /dev/null +++ b/datacatalog/snippets/README.rst @@ -0,0 +1,121 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Data Catalog Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=datacatalog/cloud-client/README.rst + + +This directory contains samples for Google Cloud Data Catalog. `Google Cloud Data Catalog`_ is a fully managed and scalable metadata management service that empowers organizations to quickly discover, manage, and understand all their data in Google Cloud. + + + + +.. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started + +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Lookup entry ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=datacatalog/cloud-client/lookup_entry.py,datacatalog/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python lookup_entry.py + + usage: lookup_entry.py [-h] + project_id + {bigquery-dataset,bigquery-table,pubsub-topic} ... + + This application demonstrates how to perform basic operations on entries + with the Cloud Data Catalog API. + + For more information, see the README.md under /datacatalog and the + documentation at https://cloud.google.com/data-catalog/docs. + + positional arguments: + project_id Your Google Cloud project ID + {bigquery-dataset,bigquery-table,pubsub-topic} + bigquery-dataset Retrieves Data Catalog entry for the given BigQuery + Dataset. + bigquery-table Retrieves Data Catalog entry for the given BigQuery + Table. + pubsub-topic Retrieves Data Catalog entry for the given Pub/Sub + Topic. + + optional arguments: + -h, --help show this help message and exit + + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/datacatalog/snippets/README.rst.in b/datacatalog/snippets/README.rst.in new file mode 100644 index 000000000000..704d55a5f9f0 --- /dev/null +++ b/datacatalog/snippets/README.rst.in @@ -0,0 +1,23 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Data Catalog + short_name: Data Catalog + url: https://cloud.google.com/data-catalog/docs + description: > + `Google Cloud Data Catalog`_ is a fully managed and scalable metadata + management service that empowers organizations to quickly discover, manage, + and understand all their data in Google Cloud. + +setup: +- auth +- install_deps + +samples: +- name: Lookup entry + file: lookup_entry.py + show_help: true + +cloud_client_library: true + +folder: datacatalog/cloud-client diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py new file mode 100644 index 000000000000..a186408fe560 --- /dev/null +++ b/datacatalog/snippets/lookup_entry.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on entries +with the Cloud Data Catalog API. + +For more information, see the README.md under /datacatalog and the +documentation at https://cloud.google.com/data-catalog/docs. +""" + +import argparse + + +def lookup_bigquery_dataset(project_id, dataset_id): + """Retrieves Data Catalog entry for the given BigQuery Dataset.""" + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}'\ + .format(project_id, dataset_id) + + return datacatalog.lookup_entry(linked_resource=resource_name) + + +def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): + """Retrieves Data Catalog entry for the given BigQuery Dataset by + sql_resource. + """ + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + sql_resource = 'bigquery.dataset.`{}`.`{}`'.format(project_id, dataset_id) + + return datacatalog.lookup_entry(sql_resource=sql_resource) + + +def lookup_bigquery_table(project_id, dataset_id, table_id): + """Retrieves Data Catalog entry for the given BigQuery Table.""" + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}' \ + '/tables/{}'\ + .format(project_id, dataset_id, table_id) + + return datacatalog.lookup_entry(linked_resource=resource_name) + + +def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): + """Retrieves Data Catalog entry for the given BigQuery Table by + sql_resource. + """ + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + sql_resource = 'bigquery.table.`{}`.`{}`.`{}`'.format( + project_id, dataset_id, table_id) + + return datacatalog.lookup_entry(sql_resource=sql_resource) + + +def lookup_pubsub_topic(project_id, topic_id): + """Retrieves Data Catalog entry for the given Pub/Sub Topic.""" + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + resource_name = '//pubsub.googleapis.com/projects/{}/topics/{}'\ + .format(project_id, topic_id) + + return datacatalog.lookup_entry(linked_resource=resource_name) + + +def lookup_pubsub_topic_sql_resource(project_id, topic_id): + """Retrieves Data Catalog entry for the given Pub/Sub Topic by + sql_resource. + """ + from google.cloud import datacatalog_v1beta1 + + datacatalog = datacatalog_v1beta1.DataCatalogClient() + + sql_resource = 'pubsub.topic.`{}`.`{}`'.format(project_id, topic_id) + + return datacatalog.lookup_entry(sql_resource=sql_resource) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('project_id', help='Your Google Cloud project ID') + + subparsers = parser.add_subparsers(dest='command') + + bigquery_dataset_parser = subparsers.add_parser( + 'bigquery-dataset', help=lookup_bigquery_dataset.__doc__) + bigquery_dataset_parser.add_argument('dataset_id') + bigquery_dataset_parser.add_argument('--sql-resource', action='store_true', + help='Perform lookup by SQL Resource') + + bigquery_table_parser = subparsers.add_parser( + 'bigquery-table', help=lookup_bigquery_table.__doc__) + bigquery_table_parser.add_argument('dataset_id') + bigquery_table_parser.add_argument('table_id') + bigquery_table_parser.add_argument('--sql-resource', action='store_true', + help='Perform lookup by SQL Resource') + + pubsub_topic_parser = subparsers.add_parser( + 'pubsub-topic', help=lookup_pubsub_topic.__doc__) + pubsub_topic_parser.add_argument('topic_id') + pubsub_topic_parser.add_argument('--sql-resource', action='store_true', + help='Perform lookup by SQL Resource') + + args = parser.parse_args() + + entry = None + + if args.command == 'bigquery-dataset': + lookup_method = lookup_bigquery_dataset_sql_resource \ + if args.sql_resource else lookup_bigquery_dataset + entry = lookup_method(args.project_id, args.dataset_id) + elif args.command == 'bigquery-table': + lookup_method = lookup_bigquery_table_sql_resource \ + if args.sql_resource else lookup_bigquery_table + entry = lookup_method(args.project_id, args.dataset_id, args.table_id) + elif args.command == 'pubsub-topic': + lookup_method = lookup_pubsub_topic_sql_resource \ + if args.sql_resource else lookup_pubsub_topic + entry = lookup_method(args.project_id, args.topic_id) + + print(entry.name) diff --git a/datacatalog/snippets/lookup_entry_test.py b/datacatalog/snippets/lookup_entry_test.py new file mode 100644 index 000000000000..2030cb072197 --- /dev/null +++ b/datacatalog/snippets/lookup_entry_test.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import lookup_entry + +BIGQUERY_PROJECT = 'bigquery-public-data' +BIGQUERY_DATASET = 'new_york_taxi_trips' +BIGQUERY_TABLE = 'taxi_zone_geom' + +PUBSUB_PROJECT = 'pubsub-public-data' +PUBSUB_TOPIC = 'taxirides-realtime' + + +def test_lookup_bigquery_dataset(): + assert lookup_entry.lookup_bigquery_dataset( + BIGQUERY_PROJECT, BIGQUERY_DATASET) + + +def test_lookup_bigquery_dataset_sql_resource(): + assert lookup_entry.lookup_bigquery_dataset_sql_resource( + BIGQUERY_PROJECT, BIGQUERY_DATASET) + + +def test_lookup_bigquery_table(): + assert lookup_entry.lookup_bigquery_table( + BIGQUERY_PROJECT, BIGQUERY_DATASET, BIGQUERY_TABLE) + + +def test_lookup_bigquery_table_sql_resource(): + assert lookup_entry.lookup_bigquery_table_sql_resource( + BIGQUERY_PROJECT, BIGQUERY_DATASET, BIGQUERY_TABLE) + + +def test_lookup_pubsub_topic(): + assert lookup_entry.lookup_pubsub_topic(PUBSUB_PROJECT, PUBSUB_TOPIC) + + +def test_lookup_pubsub_topic_sql_resource(): + assert lookup_entry.lookup_pubsub_topic_sql_resource( + PUBSUB_PROJECT, PUBSUB_TOPIC) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt new file mode 100644 index 000000000000..7882537c470c --- /dev/null +++ b/datacatalog/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-datacatalog==0.1.0 From 5c4fdea6e56b4de133c46172d177d7ff085514bf Mon Sep 17 00:00:00 2001 From: Ricardo Mendes <50331050+ricardosm-cit@users.noreply.github.com> Date: Tue, 21 May 2019 13:24:43 -0300 Subject: [PATCH 010/105] Add region tags to support Data Catalog docs [(#2169)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2169) --- datacatalog/snippets/lookup_entry.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index a186408fe560..b6c1e321f7f4 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -25,6 +25,7 @@ def lookup_bigquery_dataset(project_id, dataset_id): + # [START datacatalog_lookup_dataset] """Retrieves Data Catalog entry for the given BigQuery Dataset.""" from google.cloud import datacatalog_v1beta1 @@ -34,6 +35,7 @@ def lookup_bigquery_dataset(project_id, dataset_id): .format(project_id, dataset_id) return datacatalog.lookup_entry(linked_resource=resource_name) + # [END datacatalog_lookup_dataset] def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): From 12f0dcdae4f70b7c967eb26c67d4986fdfc32351 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Wed, 23 Oct 2019 16:27:00 -0700 Subject: [PATCH 011/105] Adds updates including compute [(#2436)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2436) * Adds updates including compute * Python 2 compat pytest * Fixing weird \r\n issue from GH merge * Put asset tests back in * Re-add pod operator test * Hack parameter for k8s pod operator --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 7882537c470c..dbd7eccd9666 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.1.0 +google-cloud-datacatalog==0.3.0 From 540fd59f3cef5b22a78980b42f181683b19d10a5 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 012/105] Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index dbd7eccd9666..8a41223bc3ff 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.3.0 +google-cloud-datacatalog==0.5.0 From e3f508e4c0edd84212a15728a7eb4969abc2d000 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 10 Mar 2020 20:20:03 +0100 Subject: [PATCH 013/105] chore(deps): update dependency google-cloud-datacatalog to v0.6.0 [(#3069)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3069) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | minor | `==0.5.0` -> `==0.6.0` | --- ### Release Notes
googleapis/python-datacatalog ### [`v0.6.0`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​060httpswwwgithubcomgoogleapispython-datacatalogcomparev050v060-2020-02-24) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v0.5.0...v0.6.0) ##### Features - **datacatalog:** add sample for create a fileset entry quickstart ([#​9977](https://www.github.com/googleapis/python-datacatalog/issues/9977)) ([16eaf4b](https://www.github.com/googleapis/python-datacatalog/commit/16eaf4b16cdc0ce7361afb1d8dac666cea2a9db0)) - **datacatalog:** undeprecate resource name helper methods, bump copyright year to 2020, tweak docstring formatting (via synth) ([#​10228](https://www.github.com/googleapis/python-datacatalog/issues/10228)) ([84e5e7c](https://www.github.com/googleapis/python-datacatalog/commit/84e5e7c340fa189ce4cffca4fdee82cc7ded9f70)) - add `list_entry_groups`, `list_entries`, `update_entry_group` methods to v1beta1 (via synth) ([#​6](https://www.github.com/googleapis/python-datacatalog/issues/6)) ([b51902e](https://www.github.com/googleapis/python-datacatalog/commit/b51902e26d590f52c9412756a178265850b7d516)) ##### Bug Fixes - **datacatalog:** deprecate resource name helper methods (via synth) ([#​9831](https://www.github.com/googleapis/python-datacatalog/issues/9831)) ([22db3f0](https://www.github.com/googleapis/python-datacatalog/commit/22db3f0683b8aca544cd96c0063dcc8157ad7335))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 8a41223bc3ff..35a49a56cd06 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.5.0 +google-cloud-datacatalog==0.6.0 From 377744ecb7482d390fdbea630626c483fad83ad0 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 014/105] Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- datacatalog/snippets/requirements-test.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 datacatalog/snippets/requirements-test.txt diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt new file mode 100644 index 000000000000..781d4326c947 --- /dev/null +++ b/datacatalog/snippets/requirements-test.txt @@ -0,0 +1 @@ +pytest==5.3.2 From ab8f43a028e86969603d7a79208aa99e3a22a8c9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 10 Apr 2020 01:38:56 +0200 Subject: [PATCH 015/105] Update dependency google-cloud-datacatalog to v0.7.0 [(#3320)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3320) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 35a49a56cd06..11fe8151a0e8 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.6.0 +google-cloud-datacatalog==0.7.0 From 270481fe06e9cc589f53cb5f6428ae17226005f9 Mon Sep 17 00:00:00 2001 From: Marcelo Costa Date: Thu, 23 Apr 2020 01:39:40 -0300 Subject: [PATCH 016/105] Update Data Catalog samples to V1 [(#3382)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3382) Co-authored-by: Takashi Matsuo --- datacatalog/snippets/lookup_entry.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index b6c1e321f7f4..4b6e8c5873fb 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -27,9 +27,9 @@ def lookup_bigquery_dataset(project_id, dataset_id): # [START datacatalog_lookup_dataset] """Retrieves Data Catalog entry for the given BigQuery Dataset.""" - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}'\ .format(project_id, dataset_id) @@ -42,9 +42,9 @@ def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): """Retrieves Data Catalog entry for the given BigQuery Dataset by sql_resource. """ - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() sql_resource = 'bigquery.dataset.`{}`.`{}`'.format(project_id, dataset_id) @@ -53,9 +53,9 @@ def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): def lookup_bigquery_table(project_id, dataset_id, table_id): """Retrieves Data Catalog entry for the given BigQuery Table.""" - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}' \ '/tables/{}'\ @@ -68,9 +68,9 @@ def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): """Retrieves Data Catalog entry for the given BigQuery Table by sql_resource. """ - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() sql_resource = 'bigquery.table.`{}`.`{}`.`{}`'.format( project_id, dataset_id, table_id) @@ -80,9 +80,9 @@ def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): def lookup_pubsub_topic(project_id, topic_id): """Retrieves Data Catalog entry for the given Pub/Sub Topic.""" - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() resource_name = '//pubsub.googleapis.com/projects/{}/topics/{}'\ .format(project_id, topic_id) @@ -94,9 +94,9 @@ def lookup_pubsub_topic_sql_resource(project_id, topic_id): """Retrieves Data Catalog entry for the given Pub/Sub Topic by sql_resource. """ - from google.cloud import datacatalog_v1beta1 + from google.cloud import datacatalog_v1 - datacatalog = datacatalog_v1beta1.DataCatalogClient() + datacatalog = datacatalog_v1.DataCatalogClient() sql_resource = 'pubsub.topic.`{}`.`{}`'.format(project_id, topic_id) From 23dbd9bfbddec1fa61ff21bcd25f5ca5d895de87 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 21 May 2020 01:31:44 +0200 Subject: [PATCH 017/105] chore(deps): update dependency google-cloud-datacatalog to v0.8.0 [(#3850)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3850) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 11fe8151a0e8..152bb285909d 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.7.0 +google-cloud-datacatalog==0.8.0 From baa8c1c1482638d666703fe35c7d3528c39cdb9f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Jun 2020 05:37:02 +0200 Subject: [PATCH 018/105] Update dependency google-cloud-datacatalog to v1 [(#4115)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4115) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 152bb285909d..bf6cb06eb859 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==0.8.0 +google-cloud-datacatalog==1.0.0 From b4fb8229f351d553288bff4a8996bd04faf982a8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 00:46:30 +0200 Subject: [PATCH 019/105] chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole --- datacatalog/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 781d4326c947..79738af5f268 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==5.3.2 +pytest==5.4.3 From 99d4feec00af0b1639ee575f1bf2a4385cafc202 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 1 Aug 2020 21:51:00 +0200 Subject: [PATCH 020/105] Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) --- datacatalog/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 79738af5f268..7e460c8c866e 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==5.4.3 +pytest==6.0.1 From 8222b6d679d32d5c3edd90875b0cbfd9c08df87a Mon Sep 17 00:00:00 2001 From: Harikumar Devandla Date: Wed, 12 Aug 2020 15:53:12 -0700 Subject: [PATCH 021/105] chore: update templates --- datacatalog/snippets/README.rst | 22 +++- datacatalog/snippets/noxfile.py | 224 ++++++++++++++++++++++++++++++++ 2 files changed, 244 insertions(+), 2 deletions(-) create mode 100644 datacatalog/snippets/noxfile.py diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst index ba214203558c..3476cceaf360 100644 --- a/datacatalog/snippets/README.rst +++ b/datacatalog/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Data Catalog Python Samples @@ -14,10 +15,12 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat .. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs + Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -28,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -42,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -58,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Lookup entry +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -76,6 +88,7 @@ To run this sample: $ python lookup_entry.py + usage: lookup_entry.py [-h] project_id {bigquery-dataset,bigquery-table,pubsub-topic} ... @@ -103,6 +116,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -118,4 +135,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/datacatalog/snippets/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) From f24d507ab7a23e46098f777147f286a949c0ed98 Mon Sep 17 00:00:00 2001 From: hkdevandla <60490673+hkdevandla@users.noreply.github.com> Date: Thu, 20 Aug 2020 10:28:46 -0700 Subject: [PATCH 022/105] feat: Migrate API client to Microgenerator (#54) * Add samples for Data Catalog lookup_entry [(#2148)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2148) * Add samples for Data Catalog lookup_entry * Add tests for Data Catalog lookup_entry * Add samples for lookup_entry by SQL Resource * Add README.rst * Improve command line interface * Removed the "lookup-" prefix from commands * Handle the --sql-resource optional argument by subparsers * Refer to GCP public assets in tests * Add region tags to support Data Catalog docs [(#2169)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2169) * Adds updates including compute [(#2436)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2436) * Adds updates including compute * Python 2 compat pytest * Fixing weird \r\n issue from GH merge * Put asset tests back in * Re-add pod operator test * Hack parameter for k8s pod operator * Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh * chore(deps): update dependency google-cloud-datacatalog to v0.6.0 [(#3069)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3069) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | minor | `==0.5.0` -> `==0.6.0` | --- ### Release Notes
googleapis/python-datacatalog ### [`v0.6.0`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​060httpswwwgithubcomgoogleapispython-datacatalogcomparev050v060-2020-02-24) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v0.5.0...v0.6.0) ##### Features - **datacatalog:** add sample for create a fileset entry quickstart ([#​9977](https://www.github.com/googleapis/python-datacatalog/issues/9977)) ([16eaf4b](https://www.github.com/googleapis/python-datacatalog/commit/16eaf4b16cdc0ce7361afb1d8dac666cea2a9db0)) - **datacatalog:** undeprecate resource name helper methods, bump copyright year to 2020, tweak docstring formatting (via synth) ([#​10228](https://www.github.com/googleapis/python-datacatalog/issues/10228)) ([84e5e7c](https://www.github.com/googleapis/python-datacatalog/commit/84e5e7c340fa189ce4cffca4fdee82cc7ded9f70)) - add `list_entry_groups`, `list_entries`, `update_entry_group` methods to v1beta1 (via synth) ([#​6](https://www.github.com/googleapis/python-datacatalog/issues/6)) ([b51902e](https://www.github.com/googleapis/python-datacatalog/commit/b51902e26d590f52c9412756a178265850b7d516)) ##### Bug Fixes - **datacatalog:** deprecate resource name helper methods (via synth) ([#​9831](https://www.github.com/googleapis/python-datacatalog/issues/9831)) ([22db3f0](https://www.github.com/googleapis/python-datacatalog/commit/22db3f0683b8aca544cd96c0063dcc8157ad7335))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). * Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot * Update dependency google-cloud-datacatalog to v0.7.0 [(#3320)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3320) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> * Update Data Catalog samples to V1 [(#3382)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3382) Co-authored-by: Takashi Matsuo * chore(deps): update dependency google-cloud-datacatalog to v0.8.0 [(#3850)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3850) * Update dependency google-cloud-datacatalog to v1 [(#4115)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4115) * chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole * Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) * chore: update templates * chore: update templates * feat: Migrate to use Microgenerator * feat: Migrate to use Microgenerator * feat: Migrate to use Microgenerator * Migrate API to microgenerator * Migrate API to microgenerator * Samples tests * fix samples tests * fix lint errors and test coverage metrics * docs update * fix docs * fix docs * fix docs * remove .python-version file Co-authored-by: Ricardo Mendes <50331050+ricardosm-cit@users.noreply.github.com> Co-authored-by: Gus Class Co-authored-by: DPEBot Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh Co-authored-by: WhiteSource Renovate Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Marcelo Costa Co-authored-by: Takashi Matsuo Co-authored-by: Leah Cole --- .../create_fileset_entry_quickstart.py | 25 ++++++++----------- datacatalog/snippets/README.rst | 24 ++++-------------- datacatalog/snippets/lookup_entry.py | 12 ++++----- datacatalog/tests/conftest.py | 13 ++++------ datacatalog/tests/test_create_entry_group.py | 2 +- datacatalog/v1beta1/create_entry_group.py | 5 ++-- datacatalog/v1beta1/create_fileset_entry.py | 2 +- datacatalog/v1beta1/datacatalog_get_entry.py | 6 ++--- .../v1beta1/datacatalog_lookup_entry.py | 5 ++-- .../datacatalog_lookup_entry_sql_resource.py | 5 ++-- datacatalog/v1beta1/datacatalog_search.py | 2 +- 11 files changed, 38 insertions(+), 63 deletions(-) diff --git a/datacatalog/quickstart/create_fileset_entry_quickstart.py b/datacatalog/quickstart/create_fileset_entry_quickstart.py index 55b0af59e689..5e1c99f0f3d0 100644 --- a/datacatalog/quickstart/create_fileset_entry_quickstart.py +++ b/datacatalog/quickstart/create_fileset_entry_quickstart.py @@ -40,7 +40,7 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Create an Entry Group. # Construct a full Entry Group object to send to the API. - entry_group_obj = datacatalog_v1beta1.types.EntryGroup() + entry_group_obj = datacatalog_v1beta1.EntryGroup() entry_group_obj.display_name = "My Fileset Entry Group" entry_group_obj.description = "This Entry Group consists of ...." @@ -48,26 +48,23 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Raises google.api_core.exceptions.AlreadyExists if the Entry Group # already exists within the project. entry_group = client.create_entry_group( - parent=datacatalog_v1beta1.DataCatalogClient.location_path( + request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path( project_id, location_id - ), - entry_group_id=entry_group_id, - entry_group=entry_group_obj, - ) + ), 'entry_group_id': entry_group_id, 'entry_group': entry_group_obj}) print("Created entry group {}".format(entry_group.name)) # Create a Fileset Entry. # Construct a full Entry object to send to the API. - entry = datacatalog_v1beta1.types.Entry() + entry = datacatalog_v1beta1.Entry() entry.display_name = "My Fileset" entry.description = "This Fileset consists of ..." entry.gcs_fileset_spec.file_patterns.append("gs://cloud-samples-data/*") - entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + entry.type = datacatalog_v1beta1.EntryType.FILESET # Create the Schema, for example when you have a csv file. columns = [] columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="first_name", description="First name", mode="REQUIRED", @@ -76,7 +73,7 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id ) columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="last_name", description="Last name", mode="REQUIRED", type="STRING" ) ) @@ -84,19 +81,19 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Create sub columns for the addresses parent column subcolumns = [] subcolumns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="city", description="City", mode="NULLABLE", type="STRING" ) ) subcolumns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="state", description="State", mode="NULLABLE", type="STRING" ) ) columns.append( - datacatalog_v1beta1.types.ColumnSchema( + datacatalog_v1beta1.ColumnSchema( column="addresses", description="Addresses", mode="REPEATED", @@ -110,6 +107,6 @@ def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id # Send the entry to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Entry already # exists within the project. - entry = client.create_entry(entry_group.name, entry_id, entry) + entry = client.create_entry(request = {'parent': entry_group.name, 'entry_id': entry_id, 'entry': entry}) print("Created entry {}".format(entry.name)) # [END datacatalog_create_fileset_quickstart_tag] diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst index 3476cceaf360..343431d91532 100644 --- a/datacatalog/snippets/README.rst +++ b/datacatalog/snippets/README.rst @@ -1,4 +1,3 @@ - .. This file is automatically generated. Do not edit this file directly. Google Cloud Data Catalog Python Samples @@ -16,11 +15,13 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat .. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +32,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -48,7 +46,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. .. code-block:: bash @@ -64,15 +62,9 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ - - - - - Samples ------------------------------------------------------------------------------- - Lookup entry +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -88,7 +80,6 @@ To run this sample: $ python lookup_entry.py - usage: lookup_entry.py [-h] project_id {bigquery-dataset,bigquery-table,pubsub-topic} ... @@ -116,10 +107,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -135,5 +122,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index 4b6e8c5873fb..656cb97e6452 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -34,7 +34,7 @@ def lookup_bigquery_dataset(project_id, dataset_id): resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}'\ .format(project_id, dataset_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) # [END datacatalog_lookup_dataset] @@ -48,7 +48,7 @@ def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): sql_resource = 'bigquery.dataset.`{}`.`{}`'.format(project_id, dataset_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) def lookup_bigquery_table(project_id, dataset_id, table_id): @@ -61,7 +61,7 @@ def lookup_bigquery_table(project_id, dataset_id, table_id): '/tables/{}'\ .format(project_id, dataset_id, table_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): @@ -75,7 +75,7 @@ def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): sql_resource = 'bigquery.table.`{}`.`{}`.`{}`'.format( project_id, dataset_id, table_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) def lookup_pubsub_topic(project_id, topic_id): @@ -87,7 +87,7 @@ def lookup_pubsub_topic(project_id, topic_id): resource_name = '//pubsub.googleapis.com/projects/{}/topics/{}'\ .format(project_id, topic_id) - return datacatalog.lookup_entry(linked_resource=resource_name) + return datacatalog.lookup_entry(request={'linked_resource': resource_name}) def lookup_pubsub_topic_sql_resource(project_id, topic_id): @@ -100,7 +100,7 @@ def lookup_pubsub_topic_sql_resource(project_id, topic_id): sql_resource = 'pubsub.topic.`{}`.`{}`'.format(project_id, topic_id) - return datacatalog.lookup_entry(sql_resource=sql_resource) + return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) if __name__ == '__main__': diff --git a/datacatalog/tests/conftest.py b/datacatalog/tests/conftest.py index 75e6753ff446..6ee1fcb621ea 100644 --- a/datacatalog/tests/conftest.py +++ b/datacatalog/tests/conftest.py @@ -52,7 +52,7 @@ def random_entry_id(client, project_id, random_entry_group_id): entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( project_id, "us-central1", random_entry_group_id, random_entry_id ) - client.delete_entry(entry_name) + client.delete_entry(request = {'name': entry_name}) @pytest.fixture @@ -65,7 +65,7 @@ def random_entry_group_id(client, project_id): entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( project_id, "us-central1", random_entry_group_id ) - client.delete_entry_group(entry_group_name) + client.delete_entry_group(request = {'name': entry_group_name}) @pytest.fixture @@ -76,7 +76,7 @@ def random_entry_name(client, entry_group_name): ) random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) yield random_entry_name - client.delete_entry(random_entry_name) + client.delete_entry(request = {'name': random_entry_name}) @pytest.fixture @@ -86,9 +86,6 @@ def entry_group_name(client, project_id): now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) entry_group = client.create_entry_group( - datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), - entry_group_id, - {}, - ) + request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), 'entry_group_id': entry_group_id, 'entry_group': {}}) yield entry_group.name - client.delete_entry_group(entry_group.name) + client.delete_entry_group(request = {'name': entry_group.name}) diff --git a/datacatalog/tests/test_create_entry_group.py b/datacatalog/tests/test_create_entry_group.py index 9c8c33b8cd64..443c97f92921 100644 --- a/datacatalog/tests/test_create_entry_group.py +++ b/datacatalog/tests/test_create_entry_group.py @@ -18,7 +18,7 @@ def test_create_entry_group(capsys, client, project_id, random_entry_group_id): - create_entry_group.create_entry_group(client, project_id, random_entry_group_id) + create_entry_group.create_entry_group(request = {'parent': client, 'entry_group_id': project_id, 'entry_group': random_entry_group_id}) out, err = capsys.readouterr() assert ( "Created entry group" diff --git a/datacatalog/v1beta1/create_entry_group.py b/datacatalog/v1beta1/create_entry_group.py index 24a856d8739c..d2056ec63d2c 100644 --- a/datacatalog/v1beta1/create_entry_group.py +++ b/datacatalog/v1beta1/create_entry_group.py @@ -40,7 +40,7 @@ def create_entry_group(client, project_id, entry_group_id): ) # Construct a full EntryGroup object to send to the API. - entry_group = datacatalog_v1beta1.types.EntryGroup() + entry_group = datacatalog_v1beta1.EntryGroup() entry_group.display_name = "My Entry Group" entry_group.description = "This Entry Group consists of ..." @@ -48,7 +48,6 @@ def create_entry_group(client, project_id, entry_group_id): # Raises google.api_core.exceptions.AlreadyExists if the Entry Group # already exists within the project. entry_group = client.create_entry_group( - parent, entry_group_id, entry_group - ) # Make an API request. + request = {'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group}) # Make an API request. print("Created entry group {}".format(entry_group.name)) # [END datacatalog_create_entry_group_tag] diff --git a/datacatalog/v1beta1/create_fileset_entry.py b/datacatalog/v1beta1/create_fileset_entry.py index 6cc275655988..f96255b2bcd8 100644 --- a/datacatalog/v1beta1/create_fileset_entry.py +++ b/datacatalog/v1beta1/create_fileset_entry.py @@ -81,6 +81,6 @@ def create_fileset_entry(client, entry_group_name, entry_id): # Send the entry to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Entry already # exists within the project. - entry = client.create_entry(entry_group_name, entry_id, entry) + entry = client.create_entry(request = {'parent': entry_group_name, 'entry_id': entry_id, 'entry': entry}) print("Created entry {}".format(entry.name)) # [END datacatalog_create_fileset_tag] diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/datacatalog_get_entry.py index fcd8b2096c7e..05bc0dd52aa3 100644 --- a/datacatalog/v1beta1/datacatalog_get_entry.py +++ b/datacatalog/v1beta1/datacatalog_get_entry.py @@ -26,8 +26,6 @@ # [START datacatalog_get_entry] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums - def sample_get_entry(project_id, location_id, entry_group_id, entry_id): """ @@ -48,10 +46,10 @@ def sample_get_entry(project_id, location_id, entry_group_id, entry_id): # entry_id = '[Entry ID]' name = client.entry_path(project_id, location_id, entry_group_id, entry_id) - response = client.get_entry(name) + response = client.get_entry(request = {'name': name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/datacatalog_lookup_entry.py index 7920df16bf2f..176d080db766 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry.py @@ -26,7 +26,6 @@ # [START datacatalog_lookup_entry] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums def sample_lookup_entry(resource_name): @@ -45,10 +44,10 @@ def sample_lookup_entry(resource_name): client = datacatalog_v1beta1.DataCatalogClient() # resource_name = '[Full Resource Name]' - response = client.lookup_entry(linked_resource=resource_name) + response = client.lookup_entry(request = {'linked_resource': resource_name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py index 9656759ef4bd..f46af3698080 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py @@ -26,7 +26,6 @@ # [START datacatalog_lookup_entry_sql_resource] from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums def sample_lookup_entry(sql_name): @@ -44,10 +43,10 @@ def sample_lookup_entry(sql_name): client = datacatalog_v1beta1.DataCatalogClient() # sql_name = '[SQL Resource Name]' - response = client.lookup_entry(sql_resource=sql_name) + response = client.lookup_entry(request = {'sql_resource': sql_name}) entry = response print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(enums.EntryType(entry.type).name)) + print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) print(u"Linked resource: {}".format(entry.linked_resource)) diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/datacatalog_search.py index c4c1798c1cc9..ad10276698a4 100644 --- a/datacatalog/v1beta1/datacatalog_search.py +++ b/datacatalog/v1beta1/datacatalog_search.py @@ -54,7 +54,7 @@ def sample_search_catalog(include_project_id, include_gcp_public_datasets, query } # Iterate over all results - for response_item in client.search_catalog(scope, query): + for response_item in client.search_catalog(request = {'scope': scope, 'query': query}): print( u"Result type: {}".format( enums.SearchResultType(response_item.search_result_type).name From e2afbda669feb8e0b02b0e055971b06fa7fdec60 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 16 Oct 2020 21:58:42 +0200 Subject: [PATCH 023/105] chore(deps): update dependency google-cloud-datacatalog to v2 (#61) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index bf6cb06eb859..af94dd904b0b 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==1.0.0 +google-cloud-datacatalog==2.0.0 From a9f9df6a841ac6724043b515e460d0c28af9c12c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 16 Nov 2020 18:16:49 -0800 Subject: [PATCH 024/105] feat!: add common resource paths; expose client transport; rename ``type`` attributes to ``type_`` to avoid name collisions (#64) Renamed attributes: * TagTemplateField.type -> TagTemplatedField.type_ * ColumnSchema.type -> ColumnSchema.type_ * Entry.type -> Entry.type_ --- datacatalog/snippets/README.rst | 24 +++++++++++++++++++----- datacatalog/snippets/noxfile.py | 24 +++++++++++++++++++++++- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst index 343431d91532..3476cceaf360 100644 --- a/datacatalog/snippets/README.rst +++ b/datacatalog/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Data Catalog Python Samples @@ -15,13 +16,11 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat .. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs - - - Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -32,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -46,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -62,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Lookup entry +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -80,6 +88,7 @@ To run this sample: $ python lookup_entry.py + usage: lookup_entry.py [-h] project_id {bigquery-dataset,bigquery-table,pubsub-topic} ... @@ -107,6 +116,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -122,4 +135,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index ba55d7ce53ca..b90eef00f2d9 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -39,6 +39,10 @@ # You can opt out from the test for specific Python versions. 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string @@ -132,7 +136,10 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ @@ -141,8 +148,18 @@ def lint(session): "." ] session.run("flake8", *args) +# +# Black +# +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + # # Sample Tests # @@ -201,6 +218,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") From 963c929681009ed1fce18c10ce6e150bc4be8bdd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 25 Nov 2020 20:31:51 +0100 Subject: [PATCH 025/105] chore(deps): update dependency google-cloud-datacatalog to v3 (#73) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index af94dd904b0b..62a69aee9937 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==2.0.0 +google-cloud-datacatalog==3.0.0 From 904fdeeceb20e37f0b55ad5f05956446d7e576f7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 29 Dec 2020 09:24:11 -0800 Subject: [PATCH 026/105] chore: update templates (#83) --- datacatalog/snippets/noxfile.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index b90eef00f2d9..bca0522ec4d9 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -68,7 +69,7 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} @@ -97,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -135,7 +136,7 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): +def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: @@ -154,7 +155,7 @@ def lint(session): @nox.session -def blacken(session): +def blacken(session: nox.sessions.Session) -> None: session.install("black") python_files = [path for path in os.listdir(".") if path.endswith(".py")] @@ -168,7 +169,7 @@ def blacken(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -194,7 +195,7 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) @@ -209,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -232,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) From 1228f5fcd91d643631ead8a8484fb390cd55d60b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 21 Jan 2021 11:58:02 -0800 Subject: [PATCH 027/105] chore: add 3.9 to noxfile template (#101) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/29194dd0-d137-4c19-b14a-efe9aaef350f/targets - [ ] To automatically regenerate this PR, check this box. Source-Link: https://github.com/googleapis/synthtool/commit/56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f --- datacatalog/snippets/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index bca0522ec4d9..97bf7da80e39 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] From 870535c38a62350ec1892824cb7472916f7d98a8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 22 Mar 2021 19:35:22 +0100 Subject: [PATCH 028/105] chore(deps): update dependency google-cloud-datacatalog to v3.1.0 (#123) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 62a69aee9937..e6fae367c290 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.0.0 +google-cloud-datacatalog==3.1.0 From 08f702cd05c37a550fab7ec36bda9970b3532327 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 1 Apr 2021 19:28:10 +0200 Subject: [PATCH 029/105] chore(deps): update dependency google-cloud-datacatalog to v3.1.1 (#130) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index e6fae367c290..3a507e77cbdd 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.1.0 +google-cloud-datacatalog==3.1.1 From 6f3428dc603bd5f0d2342c6aced16b6afb23a7cc Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 13 Apr 2021 07:55:50 -0700 Subject: [PATCH 030/105] chore: add constraints file check for python samples (#137) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: add constraints file check for python samples This is the sibling PR to https://github.com/GoogleCloudPlatform/python-docs-samples/pull/5611 and this is the issue opened for it https://github.com/GoogleCloudPlatform/python-docs-samples/issues/5549 If you look at the files in [this example repo](https://github.com/leahecole/testrepo-githubapp/pull/31/files), you'll see that renovate successfully opened a PR on three constraints files in `samples` directories and subdirectories, and properly ignored `constraints` files at the root level cc @tswast TODO: - [x] update renovate to check for samples/constraints.txt dependency updates - [x] run lint locally to double check that I'm not introducing lint error Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Fri Apr 9 22:50:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 0a071b3460344886297a304253bf924aa68ddb7e Source-Link: https://github.com/googleapis/synthtool/commit/0a071b3460344886297a304253bf924aa68ddb7e --- datacatalog/snippets/noxfile.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 97bf7da80e39..956cdf4f9250 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) From ff932137c0eb87596e3303d6c1e1ee08e07442cd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 13 Apr 2021 17:06:04 +0200 Subject: [PATCH 031/105] chore(deps): update dependency pytest to v6.2.3 (#138) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.0.1` -> `==6.2.3` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/compatibility-slim/6.0.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.3/confidence-slim/6.0.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.3`](https://togithub.com/pytest-dev/pytest/releases/6.2.3) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.2...6.2.3) # pytest 6.2.3 (2021-04-03) ## Bug Fixes - [#​8414](https://togithub.com/pytest-dev/pytest/issues/8414): pytest used to create directories under `/tmp` with world-readable permissions. This means that any user in the system was able to read information written by tests in temporary directories (such as those created by the `tmp_path`/`tmpdir` fixture). Now the directories are created with private permissions. pytest used silenty use a pre-existing `/tmp/pytest-of-` directory, even if owned by another user. This means another user could pre-create such a directory and gain control of another user\\'s temporary directory. Now such a condition results in an error. ### [`v6.2.2`](https://togithub.com/pytest-dev/pytest/releases/6.2.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.1...6.2.2) # pytest 6.2.2 (2021-01-25) ## Bug Fixes - [#​8152](https://togithub.com/pytest-dev/pytest/issues/8152): Fixed "(<Skipped instance>)" being shown as a skip reason in the verbose test summary line when the reason is empty. - [#​8249](https://togithub.com/pytest-dev/pytest/issues/8249): Fix the `faulthandler` plugin for occasions when running with `twisted.logger` and using `pytest --capture=no`. ### [`v6.2.1`](https://togithub.com/pytest-dev/pytest/releases/6.2.1) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.0...6.2.1) # pytest 6.2.1 (2020-12-15) ## Bug Fixes - [#​7678](https://togithub.com/pytest-dev/pytest/issues/7678): Fixed bug where `ImportPathMismatchError` would be raised for files compiled in the host and loaded later from an UNC mounted path (Windows). - [#​8132](https://togithub.com/pytest-dev/pytest/issues/8132): Fixed regression in `approx`: in 6.2.0 `approx` no longer raises `TypeError` when dealing with non-numeric types, falling back to normal comparison. Before 6.2.0, array types like tf.DeviceArray fell through to the scalar case, and happened to compare correctly to a scalar if they had only one element. After 6.2.0, these types began failing, because they inherited neither from standard Python number hierarchy nor from `numpy.ndarray`. `approx` now converts arguments to `numpy.ndarray` if they expose the array protocol and are not scalars. This treats array-like objects like numpy arrays, regardless of size. ### [`v6.2.0`](https://togithub.com/pytest-dev/pytest/releases/6.2.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.2...6.2.0) # pytest 6.2.0 (2020-12-12) ## Breaking Changes - [#​7808](https://togithub.com/pytest-dev/pytest/issues/7808): pytest now supports python3.6+ only. ## Deprecations - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469): Directly constructing/calling the following classes/functions is now deprecated: - `_pytest.cacheprovider.Cache` - `_pytest.cacheprovider.Cache.for_config()` - `_pytest.cacheprovider.Cache.clear_cache()` - `_pytest.cacheprovider.Cache.cache_dir_from_config()` - `_pytest.capture.CaptureFixture` - `_pytest.fixtures.FixtureRequest` - `_pytest.fixtures.SubRequest` - `_pytest.logging.LogCaptureFixture` - `_pytest.pytester.Pytester` - `_pytest.pytester.Testdir` - `_pytest.recwarn.WarningsRecorder` - `_pytest.recwarn.WarningsChecker` - `_pytest.tmpdir.TempPathFactory` - `_pytest.tmpdir.TempdirFactory` These have always been considered private, but now issue a deprecation warning, which may become a hard error in pytest 7.0.0. - [#​7530](https://togithub.com/pytest-dev/pytest/issues/7530): The `--strict` command-line option has been deprecated, use `--strict-markers` instead. We have plans to maybe in the future to reintroduce `--strict` and make it an encompassing flag for all strictness related options (`--strict-markers` and `--strict-config` at the moment, more might be introduced in the future). - [#​7988](https://togithub.com/pytest-dev/pytest/issues/7988): The `@pytest.yield_fixture` decorator/function is now deprecated. Use pytest.fixture instead. `yield_fixture` has been an alias for `fixture` for a very long time, so can be search/replaced safely. ## Features - [#​5299](https://togithub.com/pytest-dev/pytest/issues/5299): pytest now warns about unraisable exceptions and unhandled thread exceptions that occur in tests on Python>=3.8. See unraisable for more information. - [#​7425](https://togithub.com/pytest-dev/pytest/issues/7425): New pytester fixture, which is identical to testdir but its methods return pathlib.Path when appropriate instead of `py.path.local`. This is part of the movement to use pathlib.Path objects internally, in order to remove the dependency to `py` in the future. Internally, the old Testdir <\_pytest.pytester.Testdir> is now a thin wrapper around Pytester <\_pytest.pytester.Pytester>, preserving the old interface. - [#​7695](https://togithub.com/pytest-dev/pytest/issues/7695): A new hook was added, pytest_markeval_namespace which should return a dictionary. This dictionary will be used to augment the "global" variables available to evaluate skipif/xfail/xpass markers. Pseudo example `conftest.py`: ```{.sourceCode .python} def pytest_markeval_namespace(): return {"color": "red"} ``` `test_func.py`: ```{.sourceCode .python} @​pytest.mark.skipif("color == 'blue'", reason="Color is not red") def test_func(): assert False ``` - [#​8006](https://togithub.com/pytest-dev/pytest/issues/8006): It is now possible to construct a ~pytest.MonkeyPatch object directly as `pytest.MonkeyPatch()`, in cases when the monkeypatch fixture cannot be used. Previously some users imported it from the private \_pytest.monkeypatch.MonkeyPatch namespace. Additionally, MonkeyPatch.context <pytest.MonkeyPatch.context> is now a classmethod, and can be used as `with MonkeyPatch.context() as mp: ...`. This is the recommended way to use `MonkeyPatch` directly, since unlike the `monkeypatch` fixture, an instance created directly is not `undo()`-ed automatically. ## Improvements - [#​1265](https://togithub.com/pytest-dev/pytest/issues/1265): Added an `__str__` implementation to the ~pytest.pytester.LineMatcher class which is returned from `pytester.run_pytest().stdout` and similar. It returns the entire output, like the existing `str()` method. - [#​2044](https://togithub.com/pytest-dev/pytest/issues/2044): Verbose mode now shows the reason that a test was skipped in the test's terminal line after the "SKIPPED", "XFAIL" or "XPASS". - [#​7469](https://togithub.com/pytest-dev/pytest/issues/7469) The types of builtin pytest fixtures are now exported so they may be used in type annotations of test functions. The newly-exported types are: - `pytest.FixtureRequest` for the request fixture. - `pytest.Cache` for the cache fixture. - `pytest.CaptureFixture[str]` for the capfd and capsys fixtures. - `pytest.CaptureFixture[bytes]` for the capfdbinary and capsysbinary fixtures. - `pytest.LogCaptureFixture` for the caplog fixture. - `pytest.Pytester` for the pytester fixture. - `pytest.Testdir` for the testdir fixture. - `pytest.TempdirFactory` for the tmpdir_factory fixture. - `pytest.TempPathFactory` for the tmp_path_factory fixture. - `pytest.MonkeyPatch` for the monkeypatch fixture. - `pytest.WarningsRecorder` for the recwarn fixture. Constructing them is not supported (except for MonkeyPatch); they are only meant for use in type annotations. Doing so will emit a deprecation warning, and may become a hard-error in pytest 7.0. Subclassing them is also not supported. This is not currently enforced at runtime, but is detected by type-checkers such as mypy. - [#​7527](https://togithub.com/pytest-dev/pytest/issues/7527): When a comparison between namedtuple <collections.namedtuple> instances of the same type fails, pytest now shows the differing field names (possibly nested) instead of their indexes. - [#​7615](https://togithub.com/pytest-dev/pytest/issues/7615): Node.warn <\_pytest.nodes.Node.warn> now permits any subclass of Warning, not just PytestWarning <pytest.PytestWarning>. - [#​7701](https://togithub.com/pytest-dev/pytest/issues/7701): Improved reporting when using `--collected-only`. It will now show the number of collected tests in the summary stats. - [#​7710](https://togithub.com/pytest-dev/pytest/issues/7710): Use strict equality comparison for non-numeric types in pytest.approx instead of raising TypeError. This was the undocumented behavior before 3.7, but is now officially a supported feature. - [#​7938](https://togithub.com/pytest-dev/pytest/issues/7938): New `--sw-skip` argument which is a shorthand for `--stepwise-skip`. - [#​8023](https://togithub.com/pytest-dev/pytest/issues/8023): Added `'node_modules'` to default value for norecursedirs. - [#​8032](https://togithub.com/pytest-dev/pytest/issues/8032): doClassCleanups <unittest.TestCase.doClassCleanups> (introduced in unittest in Python and 3.8) is now called appropriately. ## Bug Fixes - [#​4824](https://togithub.com/pytest-dev/pytest/issues/4824): Fixed quadratic behavior and improved performance of collection of items using autouse fixtures and xunit fixtures. - [#​7758](https://togithub.com/pytest-dev/pytest/issues/7758): Fixed an issue where some files in packages are getting lost from `--lf` even though they contain tests that failed. Regressed in pytest 5.4.0. - [#​7911](https://togithub.com/pytest-dev/pytest/issues/7911): Directories created by by tmp_path and tmpdir are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites. - [#​7913](https://togithub.com/pytest-dev/pytest/issues/7913): Fixed a crash or hang in pytester.spawn <\_pytest.pytester.Pytester.spawn> when the readline module is involved. - [#​7951](https://togithub.com/pytest-dev/pytest/issues/7951): Fixed handling of recursive symlinks when collecting tests. - [#​7981](https://togithub.com/pytest-dev/pytest/issues/7981): Fixed symlinked directories not being followed during collection. Regressed in pytest 6.1.0. - [#​8016](https://togithub.com/pytest-dev/pytest/issues/8016): Fixed only one doctest being collected when using `pytest --doctest-modules path/to/an/__init__.py`. ## Improved Documentation - [#​7429](https://togithub.com/pytest-dev/pytest/issues/7429): Add more information and use cases about skipping doctests. - [#​7780](https://togithub.com/pytest-dev/pytest/issues/7780): Classes which should not be inherited from are now marked `final class` in the API reference. - [#​7872](https://togithub.com/pytest-dev/pytest/issues/7872): `_pytest.config.argparsing.Parser.addini()` accepts explicit `None` and `"string"`. - [#​7878](https://togithub.com/pytest-dev/pytest/issues/7878): In pull request section, ask to commit after editing changelog and authors file. ## Trivial/Internal Changes - [#​7802](https://togithub.com/pytest-dev/pytest/issues/7802): The `attrs` dependency requirement is now >=19.2.0 instead of >=17.4.0. - [#​8014](https://togithub.com/pytest-dev/pytest/issues/8014): .pyc files created by pytest's assertion rewriting now conform to the newer PEP-552 format on Python>=3.7. (These files are internal and only interpreted by pytest itself.) ### [`v6.1.2`](https://togithub.com/pytest-dev/pytest/releases/6.1.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.1...6.1.2) # pytest 6.1.2 (2020-10-28) ## Bug Fixes - [#​7758](https://togithub.com/pytest-dev/pytest/issues/7758): Fixed an issue where some files in packages are getting lost from `--lf` even though they contain tests that failed. Regressed in pytest 5.4.0. - [#​7911](https://togithub.com/pytest-dev/pytest/issues/7911): Directories created by tmpdir are now considered stale after 3 days without modification (previous value was 3 hours) to avoid deleting directories still in use in long running test suites. ## Improved Documentation - [#​7815](https://togithub.com/pytest-dev/pytest/issues/7815): Improve deprecation warning message for `pytest._fillfuncargs()`. ### [`v6.1.1`](https://togithub.com/pytest-dev/pytest/releases/6.1.1) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.1.0...6.1.1) # pytest 6.1.1 (2020-10-03) ## Bug Fixes - [#​7807](https://togithub.com/pytest-dev/pytest/issues/7807): Fixed regression in pytest 6.1.0 causing incorrect rootdir to be determined in some non-trivial cases where parent directories have config files as well. - [#​7814](https://togithub.com/pytest-dev/pytest/issues/7814): Fixed crash in header reporting when testpaths is used and contains absolute paths (regression in 6.1.0). ### [`v6.1.0`](https://togithub.com/pytest-dev/pytest/releases/6.1.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.0.2...6.1.0) # pytest 6.1.0 (2020-09-26) ## Breaking Changes - [#​5585](https://togithub.com/pytest-dev/pytest/issues/5585): As per our policy, the following features which have been deprecated in the 5.X series are now removed: - The `funcargnames` read-only property of `FixtureRequest`, `Metafunc`, and `Function` classes. Use `fixturenames` attribute. - `@pytest.fixture` no longer supports positional arguments, pass all arguments by keyword instead. - Direct construction of `Node` subclasses now raise an error, use `from_parent` instead. - The default value for `junit_family` has changed to `xunit2`. If you require the old format, add `junit_family=xunit1` to your configuration file. - The `TerminalReporter` no longer has a `writer` attribute. Plugin authors may use the public functions of the `TerminalReporter` instead of accessing the `TerminalWriter` object directly. - The `--result-log` option has been removed. Users are recommended to use the [pytest-reportlog](https://togithub.com/pytest-dev/pytest-reportlog) plugin instead. For more information consult [Deprecations and Removals](https://docs.pytest.org/en/stable/deprecations.html) in the docs. ## Deprecations - [#​6981](https://togithub.com/pytest-dev/pytest/issues/6981): The `pytest.collect` module is deprecated: all its names can be imported from `pytest` directly. - [#​7097](https://togithub.com/pytest-dev/pytest/issues/7097): The `pytest._fillfuncargs` function is deprecated. This function was kept for backward compatibility with an older plugin. It's functionality is not meant to be used directly, but if you must replace it, use function.\_request.\_fillfixtures() instead, though note this is not a public API and may break in the future. - [#​7210](https://togithub.com/pytest-dev/pytest/issues/7210): The special `-k '-expr'` syntax to `-k` is deprecated. Use `-k 'not expr'` instead. The special `-k 'expr:'` syntax to `-k` is deprecated. Please open an issue if you use this and want a replacement. - [#​7255](https://togithub.com/pytest-dev/pytest/issues/7255): The pytest_warning_captured <\_pytest.hookspec.pytest_warning_captured> hook is deprecated in favor of pytest_warning_recorded <\_pytest.hookspec.pytest_warning_recorded>, and will be removed in a future version. - [#​7648](https://togithub.com/pytest-dev/pytest/issues/7648): The `gethookproxy()` and `isinitpath()` methods of `FSCollector` and `Package` are deprecated; use `self.session.gethookproxy()` and `self.session.isinitpath()` instead. This should work on all pytest versions. ## Features - [#​7667](https://togithub.com/pytest-dev/pytest/issues/7667): New `--durations-min` command-line flag controls the minimal duration for inclusion in the slowest list of tests shown by `--durations`. Previously this was hard-coded to `0.005s`. ## Improvements - [#​6681](https://togithub.com/pytest-dev/pytest/issues/6681): Internal pytest warnings issued during the early stages of initialization are now properly handled and can filtered through filterwarnings or `--pythonwarnings/-W`. This also fixes a number of long standing issues: [#​2891](https://togithub.com/pytest-dev/pytest/issues/2891), [#​7620](https://togithub.com/pytest-dev/pytest/issues/7620), [#​7426](https://togithub.com/pytest-dev/pytest/issues/7426). - [#​7572](https://togithub.com/pytest-dev/pytest/issues/7572): When a plugin listed in `required_plugins` is missing or an unknown config key is used with `--strict-config`, a simple error message is now shown instead of a stacktrace. - [#​7685](https://togithub.com/pytest-dev/pytest/issues/7685): Added two new attributes rootpath <\_pytest.config.Config.rootpath> and inipath <\_pytest.config.Config.inipath> to Config <\_pytest.config.Config>. These attributes are pathlib.Path versions of the existing rootdir <\_pytest.config.Config.rootdir> and inifile <\_pytest.config.Config.inifile> attributes, and should be preferred over them when possible. - [#​7780](https://togithub.com/pytest-dev/pytest/issues/7780): Public classes which are not designed to be inherited from are now marked [@​final](https://docs.python.org/3/library/typing.html#typing.final). Code which inherits from these classes will trigger a type-checking (e.g. mypy) error, but will still work in runtime. Currently the `final` designation does not appear in the API Reference but hopefully will in the future. ## Bug Fixes - [#​1953](https://togithub.com/pytest-dev/pytest/issues/1953): Fixed error when overwriting a parametrized fixture, while also reusing the super fixture value. ```{.sourceCode .python} ``` ### conftest.py import pytest @​pytest.fixture(params=[1, 2]) def foo(request): return request.param ### test_foo.py import pytest @​pytest.fixture def foo(foo): return foo * 2 ``` - [#​4984](https://togithub.com/pytest-dev/pytest/issues/4984): Fixed an internal error crash with `IndexError: list index out of range` when collecting a module which starts with a decorated function, the decorator raises, and assertion rewriting is enabled. - [#​7591](https://togithub.com/pytest-dev/pytest/issues/7591): pylint shouldn't complain anymore about unimplemented abstract methods when inheriting from File <non-python tests>. - [#​7628](https://togithub.com/pytest-dev/pytest/issues/7628): Fixed test collection when a full path without a drive letter was passed to pytest on Windows (for example `\projects\tests\test.py` instead of `c:\projects\tests\pytest.py`). - [#​7638](https://togithub.com/pytest-dev/pytest/issues/7638): Fix handling of command-line options that appear as paths but trigger an OS-level syntax error on Windows, such as the options used internally by `pytest-xdist`. - [#​7742](https://togithub.com/pytest-dev/pytest/issues/7742): Fixed INTERNALERROR when accessing locals / globals with faulty `exec`. ## Improved Documentation - [#​1477](https://togithub.com/pytest-dev/pytest/issues/1477): Removed faq.rst and its reference in contents.rst. ## Trivial/Internal Changes - [#​7536](https://togithub.com/pytest-dev/pytest/issues/7536): The internal `junitxml` plugin has rewritten to use `xml.etree.ElementTree`. The order of attributes in XML elements might differ. Some unneeded escaping is no longer performed. - [#​7587](https://togithub.com/pytest-dev/pytest/issues/7587): The dependency on the `more-itertools` package has been removed. - [#​7631](https://togithub.com/pytest-dev/pytest/issues/7631): The result type of capfd.readouterr() <\_pytest.capture.CaptureFixture.readouterr> (and similar) is no longer a namedtuple, but should behave like one in all respects. This was done for technical reasons. - [#​7671](https://togithub.com/pytest-dev/pytest/issues/7671): When collecting tests, pytest finds test classes and functions by examining the attributes of python objects (modules, classes and instances). To speed up this process, pytest now ignores builtin attributes (like `__class__`, `__delattr__` and `__new__`) without consulting the python_classes and python_functions configuration options and without passing them to plugins using the pytest_pycollect_makeitem <\_pytest.hookspec.pytest_pycollect_makeitem> hook. ### [`v6.0.2`](https://togithub.com/pytest-dev/pytest/releases/6.0.2) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.0.1...6.0.2) # pytest 6.0.2 (2020-09-04) ## Bug Fixes - [#​7148](https://togithub.com/pytest-dev/pytest/issues/7148): Fixed `--log-cli` potentially causing unrelated `print` output to be swallowed. - [#​7672](https://togithub.com/pytest-dev/pytest/issues/7672): Fixed log-capturing level restored incorrectly if `caplog.set_level` is called more than once. - [#​7686](https://togithub.com/pytest-dev/pytest/issues/7686): Fixed NotSetType.token being used as the parameter ID when the parametrization list is empty. Regressed in pytest 6.0.0. - [#​7707](https://togithub.com/pytest-dev/pytest/issues/7707): Fix internal error when handling some exceptions that contain multiple lines or the style uses multiple lines (`--tb=line` for example).
--- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- datacatalog/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 7e460c8c866e..f7e3ec09da60 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.0.1 +pytest==6.2.3 From be6ef73ade1e9bc6ef1cfcc926397a6f8a7f3eba Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 May 2021 20:15:27 +0200 Subject: [PATCH 032/105] chore(deps): update dependency pytest to v6.2.4 (#152) --- datacatalog/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index f7e3ec09da60..95ea1e6a02b0 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.3 +pytest==6.2.4 From e72dab7d3f612491a973401ebdaebf9f3d266625 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 22 May 2021 09:18:09 +0000 Subject: [PATCH 033/105] chore: new owl bot post processor docker image (#165) gcr.io/repo-automation-bots/owlbot-python:latest@sha256:3c3a445b3ddc99ccd5d31edc4b4519729635d20693900db32c4f587ed51f7479 --- datacatalog/snippets/noxfile.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 956cdf4f9250..5ff9e1db5808 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -50,7 +50,10 @@ # to use your own Cloud project. 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. 'envs': {}, @@ -170,6 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): From 69001c46ebb096c64bb5396e5a3c823152e59565 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Jun 2021 07:13:34 +0200 Subject: [PATCH 034/105] chore(deps): update dependency google-cloud-datacatalog to v3.2.0 (#170) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 3a507e77cbdd..39ef14b2eae2 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.1.1 +google-cloud-datacatalog==3.2.0 From 7a2ec24a30aaa6e205bc9653cfad81d2a98c9f26 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Jun 2021 16:52:04 +0200 Subject: [PATCH 035/105] chore(deps): update dependency google-cloud-datacatalog to v3.2.1 (#172) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | `==3.2.0` -> `==3.2.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.2.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.2.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.2.1/compatibility-slim/3.2.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.2.1/confidence-slim/3.2.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-datacatalog ### [`v3.2.1`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​321-httpswwwgithubcomgoogleapispython-datacatalogcomparev320v321-2021-06-09) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v3.2.0...v3.2.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 39ef14b2eae2..9ccb7f2aea40 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.2.0 +google-cloud-datacatalog==3.2.1 From d387a892ea5f9c5158f335521aeb50bbde688fee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 13 Jul 2021 15:54:22 +0200 Subject: [PATCH 036/105] chore(deps): update dependency google-cloud-datacatalog to v3.3.0 (#183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | `==3.2.1` -> `==3.3.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.0/compatibility-slim/3.2.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.0/confidence-slim/3.2.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-datacatalog ### [`v3.3.0`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​330-httpswwwgithubcomgoogleapispython-datacatalogcomparev321v330-2021-07-01) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v3.2.1...v3.3.0) ##### Features - add always_use_jwt_access ([#​178](https://www.github.com/googleapis/python-datacatalog/issues/178)) ([2cb3cc2](https://www.github.com/googleapis/python-datacatalog/commit/2cb3cc2e062045b4b1f602c6e2ed79b3dc6f0014)) ##### Bug Fixes - disable always_use_jwt_access ([#​182](https://www.github.com/googleapis/python-datacatalog/issues/182)) ([1bef446](https://www.github.com/googleapis/python-datacatalog/commit/1bef4465d7c0f7f4e84afb664ca5d9f55e92ea14)) ##### Documentation - omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#​1127](https://www.github.com/googleapis/python-datacatalog/issues/1127)) ([#​173](https://www.github.com/googleapis/python-datacatalog/issues/173)) ([a3d17d4](https://www.github.com/googleapis/python-datacatalog/commit/a3d17d4b485e757480040783259da234abec69a0)), closes [#​1126](https://www.github.com/googleapis/python-datacatalog/issues/1126) ##### [3.2.1](https://www.github.com/googleapis/python-datacatalog/compare/v3.2.0...v3.2.1) (2021-06-09) ##### Bug Fixes - **deps:** add packaging requirement ([#​163](https://www.github.com/googleapis/python-datacatalog/issues/163)) ([1cfdb5a](https://www.github.com/googleapis/python-datacatalog/commit/1cfdb5a444cd6c845546b060da2e0a0f7d533a0c))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 9ccb7f2aea40..c5438d9f5fbd 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.2.1 +google-cloud-datacatalog==3.3.0 From f4f252f23e5fe0dc206227fd43ddc0b92c86cc8e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 22 Jul 2021 13:48:29 +0000 Subject: [PATCH 037/105] feat: add Samples section to CONTRIBUTING.rst (#187) Source-Link: https://github.com/googleapis/synthtool/commit/52e4e46eff2a0b70e3ff5506a02929d089d077d4 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:6186535cbdbf6b9fe61f00294929221d060634dae4a0795c1cefdbc995b2d605 --- datacatalog/snippets/noxfile.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 5ff9e1db5808..6a8ccdae22c9 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -159,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) From f17187221d8ff41440f18f2661913564b44938ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 22 Jul 2021 15:58:29 +0200 Subject: [PATCH 038/105] chore(deps): update dependency google-cloud-datacatalog to v3.3.1 (#189) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-datacatalog](https://togithub.com/googleapis/python-datacatalog) | `==3.3.0` -> `==3.3.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.1/compatibility-slim/3.3.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-datacatalog/3.3.1/confidence-slim/3.3.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-datacatalog ### [`v3.3.1`](https://togithub.com/googleapis/python-datacatalog/blob/master/CHANGELOG.md#​331-httpswwwgithubcomgoogleapispython-datacatalogcomparev330v331-2021-07-20) [Compare Source](https://togithub.com/googleapis/python-datacatalog/compare/v3.3.0...v3.3.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index c5438d9f5fbd..4c7755b0f29d 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.3.0 +google-cloud-datacatalog==3.3.1 From 81cdee5f25a077b0d286d79e89a8b26c4523e4cc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 28 Jul 2021 21:58:22 +0200 Subject: [PATCH 039/105] chore(deps): update dependency google-cloud-datacatalog to v3.3.2 (#195) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 4c7755b0f29d..4f7038555df6 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.3.1 +google-cloud-datacatalog==3.3.2 From c24681c4ee72f25037e88aa96404f6d412f2f868 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 29 Jul 2021 13:02:46 +0200 Subject: [PATCH 040/105] chore(deps): update dependency google-cloud-datacatalog to v3.4.0 (#198) --- datacatalog/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 4f7038555df6..5f898b7ae101 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.3.2 +google-cloud-datacatalog==3.4.0 From 59ff57fef3d8e072fadd705fb6a81e54f4a0f08a Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Wed, 11 Aug 2021 07:39:38 -0700 Subject: [PATCH 041/105] docs(samples): add samples from docs & reorganize all samples for testing (#78) * chore: re-organize samples to ensure they are all tested * chore: sprinkle in requirements files * fix: fix all the region tags * chore: blacken * test(samples): add tests for generated samples * test: more fixes * test: fix tests * test: fix conftest files * test: fix v1beta1 tests * docs(samples): adds samples from cloud.google.com documentation and tests * docs(samples): remove old quickstart * docs(samples): adds samples from docs * docs(samples): updates quickstart * update v1beta1 samples to pass tests * docs(samples): update * docs(samples): update * update search_assets & test * update * docs(samples): replace deleted region tags * docs(samples): replace deleted region tags * docs(samples): add region tag * docs(samples): update region tags * docs(samples): update samples * docs(samples): add link to all member values * docs(samples): unpin google-cloud-datacatalog, add quickstart link * chore: pinned google-cloud-datacatalog and updated pytest Co-authored-by: Bu Sun Kim --- datacatalog/__init__.py | 0 datacatalog/quickstart/__init__.py | 0 datacatalog/quickstart/conftest.py | 78 ++++++ .../create_fileset_entry_quickstart.py | 112 --------- datacatalog/quickstart/noxfile.py | 222 ++++++++++++++++++ datacatalog/quickstart/quickstart.py | 131 +++++++++++ datacatalog/quickstart/quickstart_test.py | 33 +++ datacatalog/quickstart/requirements-test.txt | 2 + datacatalog/quickstart/requirements.txt | 1 + datacatalog/snippets/README.rst | 139 ----------- datacatalog/snippets/README.rst.in | 23 -- datacatalog/snippets/conftest.py | 148 ++++++++++++ datacatalog/snippets/create_custom_entry.py | 73 ++++++ .../snippets/create_custom_entry_test.py | 39 +++ datacatalog/snippets/create_fileset.py | 105 +++++++++ datacatalog/snippets/create_fileset_test.py | 44 ++++ .../snippets/grant_tag_template_user_role.py | 69 ++++++ .../grant_tag_template_user_role_test.py | 29 +++ datacatalog/snippets/lookup_entry.py | 178 +++++--------- datacatalog/snippets/lookup_entry_test.py | 69 +++--- datacatalog/snippets/noxfile.py | 41 ++-- datacatalog/snippets/search_assets.py | 48 ++++ datacatalog/snippets/search_assets_test.py | 26 ++ datacatalog/tests/__init__.py | 0 datacatalog/tests/quickstart/__init__.py | 0 .../test_create_fileset_entry_quickstart.py | 40 ---- datacatalog/v1beta1/__init__.py | 0 datacatalog/{tests => v1beta1}/conftest.py | 33 ++- datacatalog/v1beta1/create_entry_group.py | 21 +- datacatalog/v1beta1/create_fileset_entry.py | 21 +- ...{datacatalog_get_entry.py => get_entry.py} | 40 ++-- ...atalog_lookup_entry.py => lookup_entry.py} | 29 +-- ...source.py => lookup_entry_sql_resource.py} | 25 +- datacatalog/v1beta1/noxfile.py | 222 ++++++++++++++++++ datacatalog/v1beta1/requirements-test.txt | 1 + datacatalog/v1beta1/requirements.txt | 1 + .../{datacatalog_search.py => search.py} | 38 ++- .../test_create_entry_group.py | 4 +- .../test_create_fileset_entry.py | 2 +- datacatalog/v1beta1/test_get_entry.py | 25 ++ datacatalog/v1beta1/test_lookup_entry.py | 27 +++ .../v1beta1/test_lookup_entry_sql_resource.py | 26 ++ datacatalog/v1beta1/test_search.py | 21 ++ 43 files changed, 1595 insertions(+), 591 deletions(-) delete mode 100644 datacatalog/__init__.py delete mode 100644 datacatalog/quickstart/__init__.py create mode 100644 datacatalog/quickstart/conftest.py delete mode 100644 datacatalog/quickstart/create_fileset_entry_quickstart.py create mode 100644 datacatalog/quickstart/noxfile.py create mode 100644 datacatalog/quickstart/quickstart.py create mode 100644 datacatalog/quickstart/quickstart_test.py create mode 100644 datacatalog/quickstart/requirements-test.txt create mode 100644 datacatalog/quickstart/requirements.txt delete mode 100644 datacatalog/snippets/README.rst delete mode 100644 datacatalog/snippets/README.rst.in create mode 100644 datacatalog/snippets/conftest.py create mode 100644 datacatalog/snippets/create_custom_entry.py create mode 100644 datacatalog/snippets/create_custom_entry_test.py create mode 100644 datacatalog/snippets/create_fileset.py create mode 100644 datacatalog/snippets/create_fileset_test.py create mode 100644 datacatalog/snippets/grant_tag_template_user_role.py create mode 100644 datacatalog/snippets/grant_tag_template_user_role_test.py create mode 100644 datacatalog/snippets/search_assets.py create mode 100644 datacatalog/snippets/search_assets_test.py delete mode 100644 datacatalog/tests/__init__.py delete mode 100644 datacatalog/tests/quickstart/__init__.py delete mode 100644 datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py delete mode 100644 datacatalog/v1beta1/__init__.py rename datacatalog/{tests => v1beta1}/conftest.py (72%) rename datacatalog/v1beta1/{datacatalog_get_entry.py => get_entry.py} (59%) rename datacatalog/v1beta1/{datacatalog_lookup_entry.py => lookup_entry.py} (66%) rename datacatalog/v1beta1/{datacatalog_lookup_entry_sql_resource.py => lookup_entry_sql_resource.py} (70%) create mode 100644 datacatalog/v1beta1/noxfile.py create mode 100644 datacatalog/v1beta1/requirements-test.txt create mode 100644 datacatalog/v1beta1/requirements.txt rename datacatalog/v1beta1/{datacatalog_search.py => search.py} (68%) rename datacatalog/{tests => v1beta1}/test_create_entry_group.py (83%) rename datacatalog/{tests => v1beta1}/test_create_fileset_entry.py (96%) create mode 100644 datacatalog/v1beta1/test_get_entry.py create mode 100644 datacatalog/v1beta1/test_lookup_entry.py create mode 100644 datacatalog/v1beta1/test_lookup_entry_sql_resource.py create mode 100644 datacatalog/v1beta1/test_search.py diff --git a/datacatalog/__init__.py b/datacatalog/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/datacatalog/quickstart/__init__.py b/datacatalog/quickstart/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/datacatalog/quickstart/conftest.py b/datacatalog/quickstart/conftest.py new file mode 100644 index 000000000000..c7657fb7b62a --- /dev/null +++ b/datacatalog/quickstart/conftest.py @@ -0,0 +1,78 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import uuid + +import google.auth +from google.cloud import bigquery +from google.cloud import datacatalog_v1 + +import pytest + + +def temp_suffix(): + now = datetime.datetime.now() + return "{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + + +@pytest.fixture(scope="session") +def client(credentials): + return datacatalog_v1.DataCatalogClient(credentials=credentials) + + +@pytest.fixture(scope="session") +def bigquery_client(credentials, project_id): + return bigquery.Client(project=project_id, credentials=credentials) + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default(scopes=["https://www.googleapis.com/auth/cloud-platform"]) + + +@pytest.fixture(scope="session") +def credentials(default_credentials): + return default_credentials[0] + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + return default_credentials[1] + + +@pytest.fixture +def dataset_id(bigquery_client): + dataset_id = f"python_data_catalog_sample_{temp_suffix()}" + dataset = bigquery_client.create_dataset(dataset_id) + yield dataset.dataset_id + bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) + + +@pytest.fixture +def table_id(bigquery_client, project_id, dataset_id): + table_id = f"python_data_catalog_sample_{temp_suffix()}" + table = bigquery.Table("{}.{}.{}".format(project_id, dataset_id, table_id)) + table = bigquery_client.create_table(table) + yield table.table_id + bigquery_client.delete_table(table, not_found_ok=True) + + +@pytest.fixture +def random_tag_template_id(): + random_tag_template_id = f"python_sample_{temp_suffix()}" + yield random_tag_template_id diff --git a/datacatalog/quickstart/create_fileset_entry_quickstart.py b/datacatalog/quickstart/create_fileset_entry_quickstart.py deleted file mode 100644 index 5e1c99f0f3d0..000000000000 --- a/datacatalog/quickstart/create_fileset_entry_quickstart.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def create_fileset_entry_quickstart(client, project_id, entry_group_id, entry_id): - - # [START datacatalog_create_fileset_quickstart_tag] - # Import required modules. - from google.cloud import datacatalog_v1beta1 - - # TODO(developer): Construct a Data Catalog client object. - # client = datacatalog_v1beta1.DataCatalogClient() - - # TODO(developer): Set project_id to your - # Google Cloud Platform project ID the entry will belong. - # project_id = "your-project-id" - - # TODO(developer): Specify the geographic location where the - # entry should reside. - # Currently, Data Catalog stores metadata in the us-central1 region. - location_id = "us-central1" - - # TODO(developer): Set entry_group_id to the ID of the entry group - # the entry will belong. - # entry_group_id = "your_entry_group_id" - - # TODO(developer): Set entry_id to the ID of the entry to create. - # entry_id = "your_entry_id" - - # Create an Entry Group. - # Construct a full Entry Group object to send to the API. - entry_group_obj = datacatalog_v1beta1.EntryGroup() - entry_group_obj.display_name = "My Fileset Entry Group" - entry_group_obj.description = "This Entry Group consists of ...." - - # Send the Entry Group to the API for creation. - # Raises google.api_core.exceptions.AlreadyExists if the Entry Group - # already exists within the project. - entry_group = client.create_entry_group( - request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path( - project_id, location_id - ), 'entry_group_id': entry_group_id, 'entry_group': entry_group_obj}) - print("Created entry group {}".format(entry_group.name)) - - # Create a Fileset Entry. - # Construct a full Entry object to send to the API. - entry = datacatalog_v1beta1.Entry() - entry.display_name = "My Fileset" - entry.description = "This Fileset consists of ..." - entry.gcs_fileset_spec.file_patterns.append("gs://cloud-samples-data/*") - entry.type = datacatalog_v1beta1.EntryType.FILESET - - # Create the Schema, for example when you have a csv file. - columns = [] - columns.append( - datacatalog_v1beta1.ColumnSchema( - column="first_name", - description="First name", - mode="REQUIRED", - type="STRING", - ) - ) - - columns.append( - datacatalog_v1beta1.ColumnSchema( - column="last_name", description="Last name", mode="REQUIRED", type="STRING" - ) - ) - - # Create sub columns for the addresses parent column - subcolumns = [] - subcolumns.append( - datacatalog_v1beta1.ColumnSchema( - column="city", description="City", mode="NULLABLE", type="STRING" - ) - ) - - subcolumns.append( - datacatalog_v1beta1.ColumnSchema( - column="state", description="State", mode="NULLABLE", type="STRING" - ) - ) - - columns.append( - datacatalog_v1beta1.ColumnSchema( - column="addresses", - description="Addresses", - mode="REPEATED", - subcolumns=subcolumns, - type="RECORD", - ) - ) - - entry.schema.columns.extend(columns) - - # Send the entry to the API for creation. - # Raises google.api_core.exceptions.AlreadyExists if the Entry already - # exists within the project. - entry = client.create_entry(request = {'parent': entry_group.name, 'entry_id': entry_id, 'entry': entry}) - print("Created entry {}".format(entry.name)) - # [END datacatalog_create_fileset_quickstart_tag] diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py new file mode 100644 index 000000000000..5660f08be441 --- /dev/null +++ b/datacatalog/quickstart/noxfile.py @@ -0,0 +1,222 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/datacatalog/quickstart/quickstart.py b/datacatalog/quickstart/quickstart.py new file mode 100644 index 000000000000..f6579e53690f --- /dev/null +++ b/datacatalog/quickstart/quickstart.py @@ -0,0 +1,131 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def quickstart(override_values): + """Creates a tag template and attach a tag to a BigQuery table.""" + # [START data_catalog_quickstart] + # Import required modules. + from google.cloud import datacatalog_v1 + + # TODO: Set these values before running the sample. + # Google Cloud Platform project. + project_id = "my_project" + # Set dataset_id to the ID of existing dataset. + dataset_id = "demo_dataset" + # Set table_id to the ID of existing table. + table_id = "trips" + # Tag template to create. + tag_template_id = "example_tag_template" + + # [END data_catalog_quickstart] + + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + dataset_id = override_values.get("dataset_id", dataset_id) + table_id = override_values.get("table_id", table_id) + tag_template_id = override_values.get("tag_template_id", tag_template_id) + + # [START data_catalog_quickstart] + # For all regions available, see: + # https://cloud.google.com/data-catalog/docs/concepts/regions + location = "us-central1" + + # Use Application Default Credentials to create a new + # Data Catalog client. GOOGLE_APPLICATION_CREDENTIALS + # environment variable must be set with the location + # of a service account key file. + datacatalog_client = datacatalog_v1.DataCatalogClient() + + # Create a Tag Template. + tag_template = datacatalog_v1.types.TagTemplate() + + tag_template.display_name = "Demo Tag Template" + + tag_template.fields["source"] = datacatalog_v1.types.TagTemplateField() + tag_template.fields["source"].display_name = "Source of data asset" + tag_template.fields[ + "source" + ].type_.primitive_type = datacatalog_v1.types.FieldType.PrimitiveType.STRING + + tag_template.fields["num_rows"] = datacatalog_v1.types.TagTemplateField() + tag_template.fields["num_rows"].display_name = "Number of rows in data asset" + tag_template.fields[ + "num_rows" + ].type_.primitive_type = datacatalog_v1.types.FieldType.PrimitiveType.DOUBLE + + tag_template.fields["has_pii"] = datacatalog_v1.types.TagTemplateField() + tag_template.fields["has_pii"].display_name = "Has PII" + tag_template.fields[ + "has_pii" + ].type_.primitive_type = datacatalog_v1.types.FieldType.PrimitiveType.BOOL + + tag_template.fields["pii_type"] = datacatalog_v1.types.TagTemplateField() + tag_template.fields["pii_type"].display_name = "PII type" + + for display_name in ["EMAIL", "SOCIAL SECURITY NUMBER", "NONE"]: + enum_value = datacatalog_v1.types.FieldType.EnumType.EnumValue( + display_name=display_name + ) + tag_template.fields["pii_type"].type_.enum_type.allowed_values.append( + enum_value + ) + + expected_template_name = datacatalog_v1.DataCatalogClient.tag_template_path( + project_id, location, tag_template_id + ) + + # Create the Tag Template. + try: + tag_template = datacatalog_client.create_tag_template( + parent=f"projects/{project_id}/locations/{location}", + tag_template_id=tag_template_id, + tag_template=tag_template, + ) + print(f"Created template: {tag_template.name}") + except OSError as e: + print(f"Cannot create template: {expected_template_name}") + print(f"{e}") + + # Lookup Data Catalog's Entry referring to the table. + resource_name = ( + f"//bigquery.googleapis.com/projects/{project_id}" + f"/datasets/{dataset_id}/tables/{table_id}" + ) + table_entry = datacatalog_client.lookup_entry( + request={"linked_resource": resource_name} + ) + + # Attach a Tag to the table. + tag = datacatalog_v1.types.Tag() + + tag.template = tag_template.name + tag.name = "my_super_cool_tag" + + tag.fields["source"] = datacatalog_v1.types.TagField() + tag.fields["source"].string_value = "Copied from tlc_yellow_trips_2018" + + tag.fields["num_rows"] = datacatalog_v1.types.TagField() + tag.fields["num_rows"].double_value = 113496874 + + tag.fields["has_pii"] = datacatalog_v1.types.TagField() + tag.fields["has_pii"].bool_value = False + + tag.fields["pii_type"] = datacatalog_v1.types.TagField() + tag.fields["pii_type"].enum_value.display_name = "NONE" + + tag = datacatalog_client.create_tag(parent=table_entry.name, tag=tag) + print(f"Created tag: {tag.name}") + # [END data_catalog_quickstart] diff --git a/datacatalog/quickstart/quickstart_test.py b/datacatalog/quickstart/quickstart_test.py new file mode 100644 index 000000000000..a63efee6cbf3 --- /dev/null +++ b/datacatalog/quickstart/quickstart_test.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import quickstart + + +def test_quickstart(capsys, client, project_id, dataset_id, table_id, random_tag_template_id): + location = "us-central1" + override_values = { + "project_id": project_id, + "dataset_id": dataset_id, + "table_id": table_id, + "tag_template_id": random_tag_template_id + } + tag_template_name = client.tag_template_path( + project_id, location, random_tag_template_id + ) + quickstart.quickstart(override_values) + out, err = capsys.readouterr() + assert "Created template: {}".format(tag_template_name) in out + assert "Created tag:" in out + client.delete_tag_template(name=tag_template_name, force=True) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt new file mode 100644 index 000000000000..55a7ce4c6db8 --- /dev/null +++ b/datacatalog/quickstart/requirements-test.txt @@ -0,0 +1,2 @@ +pytest==6.2.4 +google-cloud-bigquery==2.4.0 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt new file mode 100644 index 000000000000..5f898b7ae101 --- /dev/null +++ b/datacatalog/quickstart/requirements.txt @@ -0,0 +1 @@ +google-cloud-datacatalog==3.4.0 diff --git a/datacatalog/snippets/README.rst b/datacatalog/snippets/README.rst deleted file mode 100644 index 3476cceaf360..000000000000 --- a/datacatalog/snippets/README.rst +++ /dev/null @@ -1,139 +0,0 @@ - -.. This file is automatically generated. Do not edit this file directly. - -Google Cloud Data Catalog Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=datacatalog/cloud-client/README.rst - - -This directory contains samples for Google Cloud Data Catalog. `Google Cloud Data Catalog`_ is a fully managed and scalable metadata management service that empowers organizations to quickly discover, manage, and understand all their data in Google Cloud. - - - - -.. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs - - -Setup -------------------------------------------------------------------------------- - - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - - - - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 3.6+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - - - - - - -Samples -------------------------------------------------------------------------------- - - -Lookup entry -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=datacatalog/cloud-client/lookup_entry.py,datacatalog/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python lookup_entry.py - - - usage: lookup_entry.py [-h] - project_id - {bigquery-dataset,bigquery-table,pubsub-topic} ... - - This application demonstrates how to perform basic operations on entries - with the Cloud Data Catalog API. - - For more information, see the README.md under /datacatalog and the - documentation at https://cloud.google.com/data-catalog/docs. - - positional arguments: - project_id Your Google Cloud project ID - {bigquery-dataset,bigquery-table,pubsub-topic} - bigquery-dataset Retrieves Data Catalog entry for the given BigQuery - Dataset. - bigquery-table Retrieves Data Catalog entry for the given BigQuery - Table. - pubsub-topic Retrieves Data Catalog entry for the given Pub/Sub - Topic. - - optional arguments: - -h, --help show this help message and exit - - - - - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/datacatalog/snippets/README.rst.in b/datacatalog/snippets/README.rst.in deleted file mode 100644 index 704d55a5f9f0..000000000000 --- a/datacatalog/snippets/README.rst.in +++ /dev/null @@ -1,23 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Cloud Data Catalog - short_name: Data Catalog - url: https://cloud.google.com/data-catalog/docs - description: > - `Google Cloud Data Catalog`_ is a fully managed and scalable metadata - management service that empowers organizations to quickly discover, manage, - and understand all their data in Google Cloud. - -setup: -- auth -- install_deps - -samples: -- name: Lookup entry - file: lookup_entry.py - show_help: true - -cloud_client_library: true - -folder: datacatalog/cloud-client diff --git a/datacatalog/snippets/conftest.py b/datacatalog/snippets/conftest.py new file mode 100644 index 000000000000..089190d23042 --- /dev/null +++ b/datacatalog/snippets/conftest.py @@ -0,0 +1,148 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import uuid + +from google.api_core.exceptions import NotFound, PermissionDenied +import google.auth +from google.cloud import datacatalog_v1 + +import pytest + +datacatalog = datacatalog_v1.DataCatalogClient() + + +LOCATION = "us-central1" + + +def temp_suffix(): + now = datetime.datetime.now() + return "{}_{}".format(now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8]) + + +@pytest.fixture(scope="session") +def client(credentials): + return datacatalog_v1.DataCatalogClient(credentials=credentials) + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default( + scopes=["https://www.googleapis.com/auth/cloud-platform"] + ) + + +@pytest.fixture(scope="session") +def credentials(default_credentials): + return default_credentials[0] + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + return default_credentials[1] + + +@pytest.fixture +def valid_member_id(client, project_id, random_existing_tag_template_id): + template_name = datacatalog_v1.DataCatalogClient.tag_template_path( + project_id, LOCATION, random_existing_tag_template_id + ) + + # Retrieve Template's current IAM Policy. + policy = datacatalog.get_iam_policy(resource=template_name) + yield policy.bindings[0].members[0] + + +@pytest.fixture +def resources_to_delete(client, project_id): + doomed = { + "entries": [], + "entry_groups": [], + "templates": [], + } + yield doomed + + for entry_name in doomed["entries"]: + try: + client.delete_entry(name=entry_name) + except (NotFound, PermissionDenied): + pass + for group_name in doomed["entry_groups"]: + try: + client.delete_entry_group(name=group_name) + except (NotFound, PermissionDenied): + pass + for template_name in doomed["templates"]: + try: + client.delete_tag_template(name=template_name, force=True) + except (NotFound, PermissionDenied): + pass + + +@pytest.fixture +def random_entry_id(): + random_entry_id = f"python_sample_entry_{temp_suffix()}" + yield random_entry_id + + +@pytest.fixture +def random_entry_group_id(): + random_entry_group_id = f"python_sample_group_{temp_suffix()}" + yield random_entry_group_id + + +@pytest.fixture +def random_tag_template_id(): + random_tag_template_id = f"python_sample_{temp_suffix()}" + yield random_tag_template_id + + +@pytest.fixture +def random_existing_tag_template_id(client, project_id, resources_to_delete): + random_tag_template_id = f"python_sample_{temp_suffix()}" + random_tag_template = datacatalog_v1.types.TagTemplate() + random_tag_template.fields["source"] = datacatalog_v1.types.TagTemplateField() + random_tag_template.fields[ + "source" + ].type_.primitive_type = datacatalog_v1.FieldType.PrimitiveType.STRING.value + random_tag_template = client.create_tag_template( + parent=datacatalog_v1.DataCatalogClient.common_location_path( + project_id, LOCATION + ), + tag_template_id=random_tag_template_id, + tag_template=random_tag_template, + ) + yield random_tag_template_id + resources_to_delete["templates"].append(random_tag_template.name) + + +@pytest.fixture +def random_existing_entry_group( + client, project_id, random_entry_group_id, resources_to_delete +): + entry_group_obj = datacatalog_v1.types.EntryGroup() + entry_group_obj.display_name = f"python_sample_{temp_suffix()}" + entry_group_obj.description = "Data Catalog samples test entry group." + + entry_group = datacatalog.create_entry_group( + parent=datacatalog_v1.DataCatalogClient.common_location_path( + project_id, LOCATION + ), + entry_group_id=random_entry_group_id, + entry_group=entry_group_obj, + ) + yield entry_group + resources_to_delete["entry_groups"].append(entry_group.name) diff --git a/datacatalog/snippets/create_custom_entry.py b/datacatalog/snippets/create_custom_entry.py new file mode 100644 index 000000000000..43a2dfac3c13 --- /dev/null +++ b/datacatalog/snippets/create_custom_entry.py @@ -0,0 +1,73 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_custom_entry(override_values): + """Creates a custom entry within an entry group.""" + # [START data_catalog_create_custom_entry] + # Import required modules. + from google.cloud import datacatalog_v1 + + # Google Cloud Platform project. + project_id = "my-project" + # Entry group to be created. + # For sample code demonstrating entry group creation, see quickstart: + # https://cloud.google.com/data-catalog/docs/quickstart-tagging + entry_group_name = "my_existing_entry_group" + # Entry to be created. + entry_id = "my_new_entry_id" + + # [END data_catalog_create_custom_entry] + + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + entry_group_name = override_values.get("entry_group_name", entry_group_name) + entry_id = override_values.get("entry_id", entry_id) + + # [START data_catalog_create_custom_entry] + datacatalog = datacatalog_v1.DataCatalogClient() + + # Create an Entry. + entry = datacatalog_v1.types.Entry() + entry.user_specified_system = "onprem_data_system" + entry.user_specified_type = "onprem_data_asset" + entry.display_name = "My awesome data asset" + entry.description = "This data asset is managed by an external system." + entry.linked_resource = "//my-onprem-server.com/dataAssets/my-awesome-data-asset" + + # Create the Schema, this is optional. + entry.schema.columns.append( + datacatalog_v1.types.ColumnSchema( + column="first_column", + type_="STRING", + description="This columns consists of ....", + mode=None, + ) + ) + + entry.schema.columns.append( + datacatalog_v1.types.ColumnSchema( + column="second_column", + type_="DOUBLE", + description="This columns consists of ....", + mode=None, + ) + ) + + entry = datacatalog.create_entry( + parent=entry_group_name, entry_id=entry_id, entry=entry + ) + print("Created entry: {}".format(entry.name)) + # [END data_catalog_create_custom_entry] diff --git a/datacatalog/snippets/create_custom_entry_test.py b/datacatalog/snippets/create_custom_entry_test.py new file mode 100644 index 000000000000..742993eaa534 --- /dev/null +++ b/datacatalog/snippets/create_custom_entry_test.py @@ -0,0 +1,39 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import create_custom_entry + + +def test_create_custom_entry( + capsys, + client, + project_id, + random_existing_entry_group, + random_entry_group_id, + random_entry_id, + resources_to_delete, +): + location = "us-central1" + override_values = { + "project_id": project_id, + "entry_group_name": random_existing_entry_group.name, + "entry_id": random_entry_id, + } + expected_entry = client.entry_path( + project_id, location, random_entry_group_id, random_entry_id + ) + create_custom_entry.create_custom_entry(override_values) + out, err = capsys.readouterr() + assert f"Created entry: {expected_entry}" in out + resources_to_delete["entries"].append(expected_entry) diff --git a/datacatalog/snippets/create_fileset.py b/datacatalog/snippets/create_fileset.py new file mode 100644 index 000000000000..b76d96a611b1 --- /dev/null +++ b/datacatalog/snippets/create_fileset.py @@ -0,0 +1,105 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_fileset(override_values): + """Creates a fileset within an entry group.""" + # [START data_catalog_create_fileset] + # Import required modules. + from google.cloud import datacatalog_v1 + + # TODO: Set these values before running the sample. + project_id = "project_id" + fileset_entry_group_id = "entry_group_id" + fileset_entry_id = "entry_id" + + # [END data_catalog_create_fileset] + + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + fileset_entry_group_id = override_values.get( + "fileset_entry_group_id", fileset_entry_group_id + ) + fileset_entry_id = override_values.get("fileset_entry_id", fileset_entry_id) + + # [START data_catalog_create_fileset] + # For all regions available, see: + # https://cloud.google.com/data-catalog/docs/concepts/regions + location = "us-central1" + + datacatalog = datacatalog_v1.DataCatalogClient() + + # Create an Entry Group. + entry_group_obj = datacatalog_v1.types.EntryGroup() + entry_group_obj.display_name = "My Fileset Entry Group" + entry_group_obj.description = "This Entry Group consists of ...." + + entry_group = datacatalog.create_entry_group( + parent=datacatalog_v1.DataCatalogClient.common_location_path( + project_id, location + ), + entry_group_id=fileset_entry_group_id, + entry_group=entry_group_obj, + ) + print(f"Created entry group: {entry_group.name}") + + # Create a Fileset Entry. + entry = datacatalog_v1.types.Entry() + entry.display_name = "My Fileset" + entry.description = "This fileset consists of ...." + entry.gcs_fileset_spec.file_patterns.append("gs://my_bucket/*.csv") + entry.type_ = datacatalog_v1.EntryType.FILESET + + # Create the Schema, for example when you have a csv file. + entry.schema.columns.append( + datacatalog_v1.types.ColumnSchema( + column="first_name", + description="First name", + mode="REQUIRED", + type_="STRING", + ) + ) + + entry.schema.columns.append( + datacatalog_v1.types.ColumnSchema( + column="last_name", description="Last name", mode="REQUIRED", type_="STRING" + ) + ) + + # Create the addresses parent column + addresses_column = datacatalog_v1.types.ColumnSchema( + column="addresses", description="Addresses", mode="REPEATED", type_="RECORD" + ) + + # Create sub columns for the addresses parent column + addresses_column.subcolumns.append( + datacatalog_v1.types.ColumnSchema( + column="city", description="City", mode="NULLABLE", type_="STRING" + ) + ) + + addresses_column.subcolumns.append( + datacatalog_v1.types.ColumnSchema( + column="state", description="State", mode="NULLABLE", type_="STRING" + ) + ) + + entry.schema.columns.append(addresses_column) + + entry = datacatalog.create_entry( + parent=entry_group.name, entry_id=fileset_entry_id, entry=entry + ) + print(f"Created fileset entry: {entry.name}") + # [END data_catalog_create_fileset] diff --git a/datacatalog/snippets/create_fileset_test.py b/datacatalog/snippets/create_fileset_test.py new file mode 100644 index 000000000000..d4f928550c21 --- /dev/null +++ b/datacatalog/snippets/create_fileset_test.py @@ -0,0 +1,44 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import create_fileset + + +def test_create_fileset( + capsys, + client, + project_id, + random_entry_group_id, + random_entry_id, + resources_to_delete, +): + location = "us-central1" + override_values = { + "project_id": project_id, + "fileset_entry_group_id": random_entry_group_id, + "fileset_entry_id": random_entry_id, + } + expected_group_name = client.entry_group_path( + project_id, location, random_entry_group_id + ) + expected_entry_name = client.entry_path( + project_id, location, random_entry_group_id, random_entry_id + ) + create_fileset.create_fileset(override_values) + out, err = capsys.readouterr() + assert f"Created entry group: {expected_group_name}" in out + assert f"Created fileset entry: {expected_entry_name}" in out + resources_to_delete["entry_groups"].append(expected_group_name) + resources_to_delete["entries"].append(expected_entry_name) diff --git a/datacatalog/snippets/grant_tag_template_user_role.py b/datacatalog/snippets/grant_tag_template_user_role.py new file mode 100644 index 000000000000..71afec5c361f --- /dev/null +++ b/datacatalog/snippets/grant_tag_template_user_role.py @@ -0,0 +1,69 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def grant_tag_template_user_role(override_values): + """Grants a user the Tag Template User role for a given template.""" + # [START data_catalog_grant_tag_template_user_role] + from google.cloud import datacatalog_v1 + from google.iam.v1 import iam_policy_pb2 as iam_policy + from google.iam.v1 import policy_pb2 + + datacatalog = datacatalog_v1.DataCatalogClient() + + # TODO: Set these values before running the sample. + project_id = "project_id" + tag_template_id = "existing_tag_template_id" + # For a full list of values a member can have, see: + # https://cloud.google.com/iam/docs/reference/rest/v1/Policy?hl=en#binding + member_id = "user:super-cool.test-user@gmail.com" + + # [END data_catalog_grant_tag_template_user_role] + + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + tag_template_id = override_values.get("tag_template_id", tag_template_id) + member_id = override_values.get("member_id", member_id) + + # [START data_catalog_grant_tag_template_user_role] + # For all regions available, see: + # https://cloud.google.com/data-catalog/docs/concepts/regions + location = "us-central1" + + # Format the Template name. + template_name = datacatalog_v1.DataCatalogClient.tag_template_path( + project_id, location, tag_template_id + ) + + # Retrieve Template's current IAM Policy. + policy = datacatalog.get_iam_policy(resource=template_name) + + # Add Tag Template User role and member to the policy. + binding = policy_pb2.Binding() + binding.role = "roles/datacatalog.tagTemplateUser" + binding.members.append(member_id) + policy.bindings.append(binding) + + set_policy_request = iam_policy.SetIamPolicyRequest( + resource=template_name, policy=policy + ) + + # Update Template's policy. + policy = datacatalog.set_iam_policy(set_policy_request) + + for binding in policy.bindings: + for member in binding.members: + print(f"Member: {member}, Role: {binding.role}") + # [END data_catalog_grant_tag_template_user_role] diff --git a/datacatalog/snippets/grant_tag_template_user_role_test.py b/datacatalog/snippets/grant_tag_template_user_role_test.py new file mode 100644 index 000000000000..005638dd3fca --- /dev/null +++ b/datacatalog/snippets/grant_tag_template_user_role_test.py @@ -0,0 +1,29 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import grant_tag_template_user_role + + +def test_grant_tag_template_user_role( + capsys, project_id, random_existing_tag_template_id, valid_member_id +): + override_values = { + "project_id": project_id, + "tag_template_id": random_existing_tag_template_id, + "member_id": valid_member_id, + } + grant_tag_template_user_role.grant_tag_template_user_role(override_values) + out, err = capsys.readouterr() + assert f"Member: {valid_member_id}, Role: roles/datacatalog.tagTemplateUser" in out diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index 656cb97e6452..110001836b94 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -14,139 +14,83 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This application demonstrates how to perform basic operations on entries -with the Cloud Data Catalog API. -For more information, see the README.md under /datacatalog and the -documentation at https://cloud.google.com/data-catalog/docs. -""" - -import argparse - - -def lookup_bigquery_dataset(project_id, dataset_id): +def lookup_entry(override_values): + """Retrieves Data Catalog entry for the given Google Cloud Platform resource.""" # [START datacatalog_lookup_dataset] - """Retrieves Data Catalog entry for the given BigQuery Dataset.""" - from google.cloud import datacatalog_v1 - - datacatalog = datacatalog_v1.DataCatalogClient() - - resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}'\ - .format(project_id, dataset_id) - - return datacatalog.lookup_entry(request={'linked_resource': resource_name}) - # [END datacatalog_lookup_dataset] - - -def lookup_bigquery_dataset_sql_resource(project_id, dataset_id): - """Retrieves Data Catalog entry for the given BigQuery Dataset by - sql_resource. - """ - from google.cloud import datacatalog_v1 - - datacatalog = datacatalog_v1.DataCatalogClient() - - sql_resource = 'bigquery.dataset.`{}`.`{}`'.format(project_id, dataset_id) - - return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) - - -def lookup_bigquery_table(project_id, dataset_id, table_id): - """Retrieves Data Catalog entry for the given BigQuery Table.""" + # [START data_catalog_lookup_entry] from google.cloud import datacatalog_v1 datacatalog = datacatalog_v1.DataCatalogClient() - resource_name = '//bigquery.googleapis.com/projects/{}/datasets/{}' \ - '/tables/{}'\ - .format(project_id, dataset_id, table_id) + bigquery_project_id = "my_bigquery_project" + dataset_id = "my_dataset" + table_id = "my_table" + pubsub_project_id = "my_pubsub_project" + topic_id = "my_topic" - return datacatalog.lookup_entry(request={'linked_resource': resource_name}) + # [END data_catalog_lookup_entry] + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + bigquery_project_id = override_values.get( + "bigquery_project_id", bigquery_project_id + ) + dataset_id = override_values.get("dataset_id", dataset_id) + table_id = override_values.get("table_id", table_id) + pubsub_project_id = override_values.get("pubsub_project_id", pubsub_project_id) + topic_id = override_values.get("topic_id", topic_id) + + # [START data_catalog_lookup_entry] + # BigQuery Dataset via linked_resource + resource_name = f"//bigquery.googleapis.com/projects/{bigquery_project_id}/datasets/{dataset_id}" + + entry = datacatalog.lookup_entry(request={"linked_resource": resource_name}) + print( + f"Retrieved entry {entry.name} for BigQuery Dataset resource {entry.linked_resource}" + ) -def lookup_bigquery_table_sql_resource(project_id, dataset_id, table_id): - """Retrieves Data Catalog entry for the given BigQuery Table by - sql_resource. - """ - from google.cloud import datacatalog_v1 - - datacatalog = datacatalog_v1.DataCatalogClient() - - sql_resource = 'bigquery.table.`{}`.`{}`.`{}`'.format( - project_id, dataset_id, table_id) - - return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) - - -def lookup_pubsub_topic(project_id, topic_id): - """Retrieves Data Catalog entry for the given Pub/Sub Topic.""" - from google.cloud import datacatalog_v1 - - datacatalog = datacatalog_v1.DataCatalogClient() + # BigQuery Dataset via sql_resource + sql_resource = f"bigquery.dataset.`{bigquery_project_id}`.`{dataset_id}`" - resource_name = '//pubsub.googleapis.com/projects/{}/topics/{}'\ - .format(project_id, topic_id) + entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource}) + print( + f"Retrieved entry {entry.name} for BigQuery Dataset resource {entry.linked_resource}" + ) - return datacatalog.lookup_entry(request={'linked_resource': resource_name}) + # BigQuery Table via linked_resource + resource_name = ( + f"//bigquery.googleapis.com/projects/{bigquery_project_id}/datasets/{dataset_id}" + f"/tables/{table_id}" + ) + entry = datacatalog.lookup_entry(request={"linked_resource": resource_name}) + print(f"Retrieved entry {entry.name} for BigQuery Table {entry.linked_resource}") -def lookup_pubsub_topic_sql_resource(project_id, topic_id): - """Retrieves Data Catalog entry for the given Pub/Sub Topic by - sql_resource. - """ - from google.cloud import datacatalog_v1 + # BigQuery Table via sql_resource + sql_resource = f"bigquery.table.`{bigquery_project_id}`.`{dataset_id}`.`{table_id}`" - datacatalog = datacatalog_v1.DataCatalogClient() + entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource}) + print( + f"Retrieved entry {entry.name} for BigQuery Table resource {entry.linked_resource}" + ) - sql_resource = 'pubsub.topic.`{}`.`{}`'.format(project_id, topic_id) + # Pub/Sub Topic via linked_resource + resource_name = ( + f"//pubsub.googleapis.com/projects/{pubsub_project_id}/topics/{topic_id}" + ) - return datacatalog.lookup_entry(request={'sql_resource': sql_resource}) + entry = datacatalog.lookup_entry(request={"linked_resource": resource_name}) + print( + f"Retrieved entry {entry.name} for Pub/Sub Topic resource {entry.linked_resource}" + ) + # Pub/Sub Topic via sql_resource + sql_resource = f"pubsub.topic.`{pubsub_project_id}`.`{topic_id}`" -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + entry = datacatalog.lookup_entry(request={"sql_resource": sql_resource}) + print( + f"Retrieved entry {entry.name} for Pub/Sub Topic resource {entry.linked_resource}" ) - - parser.add_argument('project_id', help='Your Google Cloud project ID') - - subparsers = parser.add_subparsers(dest='command') - - bigquery_dataset_parser = subparsers.add_parser( - 'bigquery-dataset', help=lookup_bigquery_dataset.__doc__) - bigquery_dataset_parser.add_argument('dataset_id') - bigquery_dataset_parser.add_argument('--sql-resource', action='store_true', - help='Perform lookup by SQL Resource') - - bigquery_table_parser = subparsers.add_parser( - 'bigquery-table', help=lookup_bigquery_table.__doc__) - bigquery_table_parser.add_argument('dataset_id') - bigquery_table_parser.add_argument('table_id') - bigquery_table_parser.add_argument('--sql-resource', action='store_true', - help='Perform lookup by SQL Resource') - - pubsub_topic_parser = subparsers.add_parser( - 'pubsub-topic', help=lookup_pubsub_topic.__doc__) - pubsub_topic_parser.add_argument('topic_id') - pubsub_topic_parser.add_argument('--sql-resource', action='store_true', - help='Perform lookup by SQL Resource') - - args = parser.parse_args() - - entry = None - - if args.command == 'bigquery-dataset': - lookup_method = lookup_bigquery_dataset_sql_resource \ - if args.sql_resource else lookup_bigquery_dataset - entry = lookup_method(args.project_id, args.dataset_id) - elif args.command == 'bigquery-table': - lookup_method = lookup_bigquery_table_sql_resource \ - if args.sql_resource else lookup_bigquery_table - entry = lookup_method(args.project_id, args.dataset_id, args.table_id) - elif args.command == 'pubsub-topic': - lookup_method = lookup_pubsub_topic_sql_resource \ - if args.sql_resource else lookup_pubsub_topic - entry = lookup_method(args.project_id, args.topic_id) - - print(entry.name) + # [END data_catalog_lookup_entry] + # [END datacatalog_lookup_dataset] diff --git a/datacatalog/snippets/lookup_entry_test.py b/datacatalog/snippets/lookup_entry_test.py index 2030cb072197..55245a93f6c9 100644 --- a/datacatalog/snippets/lookup_entry_test.py +++ b/datacatalog/snippets/lookup_entry_test.py @@ -14,40 +14,39 @@ # See the License for the specific language governing permissions and # limitations under the License. -import lookup_entry - -BIGQUERY_PROJECT = 'bigquery-public-data' -BIGQUERY_DATASET = 'new_york_taxi_trips' -BIGQUERY_TABLE = 'taxi_zone_geom' - -PUBSUB_PROJECT = 'pubsub-public-data' -PUBSUB_TOPIC = 'taxirides-realtime' - - -def test_lookup_bigquery_dataset(): - assert lookup_entry.lookup_bigquery_dataset( - BIGQUERY_PROJECT, BIGQUERY_DATASET) - - -def test_lookup_bigquery_dataset_sql_resource(): - assert lookup_entry.lookup_bigquery_dataset_sql_resource( - BIGQUERY_PROJECT, BIGQUERY_DATASET) - - -def test_lookup_bigquery_table(): - assert lookup_entry.lookup_bigquery_table( - BIGQUERY_PROJECT, BIGQUERY_DATASET, BIGQUERY_TABLE) - - -def test_lookup_bigquery_table_sql_resource(): - assert lookup_entry.lookup_bigquery_table_sql_resource( - BIGQUERY_PROJECT, BIGQUERY_DATASET, BIGQUERY_TABLE) - - -def test_lookup_pubsub_topic(): - assert lookup_entry.lookup_pubsub_topic(PUBSUB_PROJECT, PUBSUB_TOPIC) +import re +import lookup_entry -def test_lookup_pubsub_topic_sql_resource(): - assert lookup_entry.lookup_pubsub_topic_sql_resource( - PUBSUB_PROJECT, PUBSUB_TOPIC) +BIGQUERY_PROJECT = "bigquery-public-data" +BIGQUERY_DATASET = "new_york_taxi_trips" +BIGQUERY_TABLE = "taxi_zone_geom" + +PUBSUB_PROJECT = "pubsub-public-data" +PUBSUB_TOPIC = "taxirides-realtime" + + +def test_lookup_entry(capsys): + override_values = { + "bigquery_project_id": BIGQUERY_PROJECT, + "dataset_id": BIGQUERY_DATASET, + "table_id": BIGQUERY_TABLE, + "pubsub_project_id": PUBSUB_PROJECT, + "topic_id": PUBSUB_TOPIC, + } + dataset_resource = f"//bigquery.googleapis.com/projects/{BIGQUERY_PROJECT}/datasets/{BIGQUERY_DATASET}" + table_resource = f"//bigquery.googleapis.com/projects/{BIGQUERY_PROJECT}/datasets/{BIGQUERY_DATASET}/tables/{BIGQUERY_TABLE}" + topic_resource = ( + f"//pubsub.googleapis.com/projects/{PUBSUB_PROJECT}/topics/{PUBSUB_TOPIC}" + ) + lookup_entry.lookup_entry(override_values) + out, err = capsys.readouterr() + assert re.search( + f"(Retrieved entry .+ for BigQuery Dataset resource {dataset_resource})", out + ) + assert re.search( + f"(Retrieved entry .+ for BigQuery Table resource {table_resource})", out + ) + assert re.search( + f"(Retrieved entry .+ for Pub/Sub Topic resource {topic_resource})", out + ) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 6a8ccdae22c9..1b0d6c900250 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,7 +90,7 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -141,7 +139,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +148,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +165,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +174,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +206,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +216,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -224,7 +227,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/datacatalog/snippets/search_assets.py b/datacatalog/snippets/search_assets.py new file mode 100644 index 000000000000..113acbd2eeb4 --- /dev/null +++ b/datacatalog/snippets/search_assets.py @@ -0,0 +1,48 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def search_assets(override_values): + """Searches Data Catalog entries for a given project.""" + # [START data_catalog_search_assets] + from google.cloud import datacatalog_v1 + + datacatalog = datacatalog_v1.DataCatalogClient() + + # TODO: Set these values before running the sample. + project_id = "project_id" + + # Set custom query. + search_string = "type=dataset" + # [END data_catalog_search_assets] + + # To facilitate testing, we replace values with alternatives + # provided by the testing harness. + project_id = override_values.get("project_id", project_id) + tag_template_id = override_values.get("tag_template_id", search_string) + search_string = f"name:{tag_template_id}" + + # [START data_catalog_search_assets] + scope = datacatalog_v1.types.SearchCatalogRequest.Scope() + scope.include_project_ids.append(project_id) + + # Alternatively, search using organization scopes. + # scope.include_org_ids.append("my_organization_id") + + search_results = datacatalog.search_catalog(scope=scope, query=search_string) + + print("Results in project:") + for result in search_results: + print(result) + # [END data_catalog_search_assets] diff --git a/datacatalog/snippets/search_assets_test.py b/datacatalog/snippets/search_assets_test.py new file mode 100644 index 000000000000..84c266d3397d --- /dev/null +++ b/datacatalog/snippets/search_assets_test.py @@ -0,0 +1,26 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import search_assets + + +def test_search_assets(capsys, project_id, random_existing_tag_template_id): + override_values = { + "project_id": project_id, + "tag_template_id": random_existing_tag_template_id, + } + search_assets.search_assets(override_values) + out, err = capsys.readouterr() + assert "Results in project:" in out + assert random_existing_tag_template_id in out diff --git a/datacatalog/tests/__init__.py b/datacatalog/tests/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/datacatalog/tests/quickstart/__init__.py b/datacatalog/tests/quickstart/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py b/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py deleted file mode 100644 index 769d034fac4a..000000000000 --- a/datacatalog/tests/quickstart/test_create_fileset_entry_quickstart.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud import datacatalog_v1beta1 - -from ...quickstart import create_fileset_entry_quickstart - - -def test_create_fileset_entry_quickstart( - capsys, client, project_id, random_entry_group_id, random_entry_id -): - - create_fileset_entry_quickstart.create_fileset_entry_quickstart( - client, project_id, random_entry_group_id, random_entry_id - ) - out, err = capsys.readouterr() - assert ( - "Created entry group" - " projects/{}/locations/{}/entryGroups/{}".format( - project_id, "us-central1", random_entry_group_id - ) - in out - ) - - expected_entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( - project_id, "us-central1", random_entry_group_id, random_entry_id - ) - - assert "Created entry {}".format(expected_entry_name) in out diff --git a/datacatalog/v1beta1/__init__.py b/datacatalog/v1beta1/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/datacatalog/tests/conftest.py b/datacatalog/v1beta1/conftest.py similarity index 72% rename from datacatalog/tests/conftest.py rename to datacatalog/v1beta1/conftest.py index 6ee1fcb621ea..d641d1359132 100644 --- a/datacatalog/tests/conftest.py +++ b/datacatalog/v1beta1/conftest.py @@ -16,11 +16,11 @@ import datetime import uuid -import pytest - import google.auth from google.cloud import datacatalog_v1beta1 +import pytest + @pytest.fixture(scope="session") def client(credentials): @@ -52,7 +52,7 @@ def random_entry_id(client, project_id, random_entry_group_id): entry_name = datacatalog_v1beta1.DataCatalogClient.entry_path( project_id, "us-central1", random_entry_group_id, random_entry_id ) - client.delete_entry(request = {'name': entry_name}) + client.delete_entry(request={"name": entry_name}) @pytest.fixture @@ -65,7 +65,7 @@ def random_entry_group_id(client, project_id): entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( project_id, "us-central1", random_entry_group_id ) - client.delete_entry_group(request = {'name': entry_group_name}) + client.delete_entry_group(request={"name": entry_group_name}) @pytest.fixture @@ -76,7 +76,21 @@ def random_entry_name(client, entry_group_name): ) random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) yield random_entry_name - client.delete_entry(request = {'name': random_entry_name}) + client.delete_entry(request={"name": random_entry_name}) + + +@pytest.fixture +def entry(client, entry_group_name): + now = datetime.datetime.now() + random_entry_id = "example_entry_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + entry = datacatalog_v1beta1.CreateEntryRequest + entry = client.create_entry( + request={"parent": entry_group_name, "entry_id": random_entry_id, "entry": {"type_": "DATA_STREAM", "name": "samples_test_entry"}} + ) + yield entry.name + client.delete_entry(request={"name": entry.name}) @pytest.fixture @@ -86,6 +100,11 @@ def entry_group_name(client, project_id): now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) entry_group = client.create_entry_group( - request = {'parent': datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), 'entry_group_id': entry_group_id, 'entry_group': {}}) + request={ + "parent": f"projects/{project_id}/locations/us-central1", + "entry_group_id": entry_group_id, + "entry_group": datacatalog_v1beta1.EntryGroup(), + } + ) yield entry_group.name - client.delete_entry_group(request = {'name': entry_group.name}) + client.delete_entry_group(request={"name": entry_group.name}) diff --git a/datacatalog/v1beta1/create_entry_group.py b/datacatalog/v1beta1/create_entry_group.py index d2056ec63d2c..452f1ded681d 100644 --- a/datacatalog/v1beta1/create_entry_group.py +++ b/datacatalog/v1beta1/create_entry_group.py @@ -13,13 +13,11 @@ # limitations under the License. -def create_entry_group(client, project_id, entry_group_id): - - # [START datacatalog_create_entry_group_tag] +def create_entry_group(project_id, entry_group_id): + # [START data_catalog_create_entry_group_v1beta1] from google.cloud import datacatalog_v1beta1 - # TODO(developer): Construct a Data Catalog client object. - # client = datacatalog_v1beta1.DataCatalogClient() + client = datacatalog_v1beta1.DataCatalogClient() # TODO(developer): Set entry_group_id to the ID of the # entry group to create. @@ -35,9 +33,7 @@ def create_entry_group(client, project_id, entry_group_id): # entry_group_id = "your_entry_group_id" # Construct a full location path to be the parent of the entry group. - parent = datacatalog_v1beta1.DataCatalogClient.location_path( - project_id, location_id - ) + parent = f"projects/{project_id}/locations/{location_id}" # Construct a full EntryGroup object to send to the API. entry_group = datacatalog_v1beta1.EntryGroup() @@ -48,6 +44,11 @@ def create_entry_group(client, project_id, entry_group_id): # Raises google.api_core.exceptions.AlreadyExists if the Entry Group # already exists within the project. entry_group = client.create_entry_group( - request = {'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group}) # Make an API request. + request={ + "parent": parent, + "entry_group_id": entry_group_id, + "entry_group": entry_group, + } + ) # Make an API request. print("Created entry group {}".format(entry_group.name)) - # [END datacatalog_create_entry_group_tag] + # [END data_catalog_create_entry_group_v1beta1] diff --git a/datacatalog/v1beta1/create_fileset_entry.py b/datacatalog/v1beta1/create_fileset_entry.py index f96255b2bcd8..f798bfb6810b 100644 --- a/datacatalog/v1beta1/create_fileset_entry.py +++ b/datacatalog/v1beta1/create_fileset_entry.py @@ -14,8 +14,7 @@ def create_fileset_entry(client, entry_group_name, entry_id): - - # [START datacatalog_create_fileset_tag] + # [START data_catalog_create_fileset_v1beta1] from google.cloud import datacatalog_v1beta1 # TODO(developer): Construct a Data Catalog client object. @@ -33,7 +32,7 @@ def create_fileset_entry(client, entry_group_name, entry_id): entry.display_name = "My Fileset" entry.description = "This Fileset consists of ..." entry.gcs_fileset_spec.file_patterns.append("gs://my_bucket/*") - entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + entry.type_ = datacatalog_v1beta1.EntryType.FILESET # Create the Schema, for example when you have a csv file. columns = [] @@ -42,13 +41,13 @@ def create_fileset_entry(client, entry_group_name, entry_id): column="first_name", description="First name", mode="REQUIRED", - type="STRING", + type_="STRING", ) ) columns.append( datacatalog_v1beta1.types.ColumnSchema( - column="last_name", description="Last name", mode="REQUIRED", type="STRING" + column="last_name", description="Last name", mode="REQUIRED", type_="STRING" ) ) @@ -56,13 +55,13 @@ def create_fileset_entry(client, entry_group_name, entry_id): subcolumns = [] subcolumns.append( datacatalog_v1beta1.types.ColumnSchema( - column="city", description="City", mode="NULLABLE", type="STRING" + column="city", description="City", mode="NULLABLE", type_="STRING" ) ) subcolumns.append( datacatalog_v1beta1.types.ColumnSchema( - column="state", description="State", mode="NULLABLE", type="STRING" + column="state", description="State", mode="NULLABLE", type_="STRING" ) ) @@ -72,7 +71,7 @@ def create_fileset_entry(client, entry_group_name, entry_id): description="Addresses", mode="REPEATED", subcolumns=subcolumns, - type="RECORD", + type_="RECORD", ) ) @@ -81,6 +80,8 @@ def create_fileset_entry(client, entry_group_name, entry_id): # Send the entry to the API for creation. # Raises google.api_core.exceptions.AlreadyExists if the Entry already # exists within the project. - entry = client.create_entry(request = {'parent': entry_group_name, 'entry_id': entry_id, 'entry': entry}) + entry = client.create_entry( + request={"parent": entry_group_name, "entry_id": entry_id, "entry": entry} + ) print("Created entry {}".format(entry.name)) - # [END datacatalog_create_fileset_tag] + # [END data_catalog_create_fileset_v1beta1] diff --git a/datacatalog/v1beta1/datacatalog_get_entry.py b/datacatalog/v1beta1/get_entry.py similarity index 59% rename from datacatalog/v1beta1/datacatalog_get_entry.py rename to datacatalog/v1beta1/get_entry.py index 05bc0dd52aa3..30b13d0a5d7e 100644 --- a/datacatalog/v1beta1/datacatalog_get_entry.py +++ b/datacatalog/v1beta1/get_entry.py @@ -14,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_get_entry") - # To install the latest published package dependency, execute the following: # pip install google-cloud-datacatalog @@ -24,18 +22,20 @@ # description: Get Entry # usage: python3 samples/v1beta1/datacatalog_get_entry.py [--project_id "[Google Cloud Project ID]"] [--location_id "[Google Cloud Location ID]"] [--entry_group_id "[Entry Group ID]"] [--entry_id "[Entry ID]"] -# [START datacatalog_get_entry] -from google.cloud import datacatalog_v1beta1 -def sample_get_entry(project_id, location_id, entry_group_id, entry_id): +def sample_get_entry( + project_id: str, location_id: str, entry_group_id: str, entry_id: str +): + # [START data_catalog_get_entry_v1beta1] + from google.cloud import datacatalog_v1beta1 """ Get Entry Args: - project_id Your Google Cloud project ID - location_id Google Cloud region, e.g. us-central1 - entry_group_id ID of the Entry Group, e.g. @bigquery, @pubsub, my_entry_group - entry_id ID of the Entry + project_id (str): Your Google Cloud project ID + location_id (str): Google Cloud region, e.g. us-central1 + entry_group_id (str): ID of the Entry Group, e.g. @bigquery, @pubsub, my_entry_group + entry_id (str): ID of the Entry """ client = datacatalog_v1beta1.DataCatalogClient() @@ -46,24 +46,22 @@ def sample_get_entry(project_id, location_id, entry_group_id, entry_id): # entry_id = '[Entry ID]' name = client.entry_path(project_id, location_id, entry_group_id, entry_id) - response = client.get_entry(request = {'name': name}) - entry = response - print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) - print(u"Linked resource: {}".format(entry.linked_resource)) - - -# [END datacatalog_get_entry] + entry = client.get_entry(request={"name": name}) + print(f"Entry name: {entry.name}") + print(f"Entry type: {datacatalog_v1beta1.EntryType(entry.type_).name}") + print(f"Linked resource: {entry.linked_resource}") + # [END data_catalog_get_entry_v1beta1] + return entry def main(): import argparse parser = argparse.ArgumentParser() - parser.add_argument("--project_id", type=str, default="[Google Cloud Project ID]") - parser.add_argument("--location_id", type=str, default="[Google Cloud Location ID]") - parser.add_argument("--entry_group_id", type=str, default="[Entry Group ID]") - parser.add_argument("--entry_id", type=str, default="[Entry ID]") + parser.add_argument("--project_id", type_=str, default="[Google Cloud Project ID]") + parser.add_argument("--location_id", type_=str, default="[Google Cloud Location ID]") + parser.add_argument("--entry_group_id", type_=str, default="[Entry Group ID]") + parser.add_argument("--entry_id", type_=str, default="[Entry ID]") args = parser.parse_args() sample_get_entry( diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry.py b/datacatalog/v1beta1/lookup_entry.py similarity index 66% rename from datacatalog/v1beta1/datacatalog_lookup_entry.py rename to datacatalog/v1beta1/lookup_entry.py index 176d080db766..a167789126d0 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry.py +++ b/datacatalog/v1beta1/lookup_entry.py @@ -14,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_lookup_entry") - # To install the latest published package dependency, execute the following: # pip install google-cloud-datacatalog @@ -24,16 +22,15 @@ # description: Lookup Entry # usage: python3 samples/v1beta1/datacatalog_lookup_entry.py [--resource_name "[Full Resource Name]"] -# [START datacatalog_lookup_entry] -from google.cloud import datacatalog_v1beta1 - -def sample_lookup_entry(resource_name): +def sample_lookup_entry(resource_name: str): + # [START data_catalog_lookup_entry_v1beta1] + from google.cloud import datacatalog_v1beta1 """ Lookup Entry Args: - resource_name The full name of the Google Cloud Platform resource the Data + resource_name (str): The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: https://cloud.google.com/apis/design/resource_names#full_resource_name Examples: @@ -42,23 +39,19 @@ def sample_lookup_entry(resource_name): """ client = datacatalog_v1beta1.DataCatalogClient() - - # resource_name = '[Full Resource Name]' - response = client.lookup_entry(request = {'linked_resource': resource_name}) - entry = response - print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) - print(u"Linked resource: {}".format(entry.linked_resource)) - - -# [END datacatalog_lookup_entry] + entry = client.lookup_entry(request={"linked_resource": resource_name}) + print(f"Entry name: {entry.name}") + print(f"Entry type: {datacatalog_v1beta1.EntryType(entry.type_).name}") + print(f"Linked resource: {entry.linked_resource}") + # [END data_catalog_lookup_entry_v1beta1] + return entry def main(): import argparse parser = argparse.ArgumentParser() - parser.add_argument("--resource_name", type=str, default="[Full Resource Name]") + parser.add_argument("--resource_name", type_=str, default="[Full Resource Name]") args = parser.parse_args() sample_lookup_entry(args.resource_name) diff --git a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py b/datacatalog/v1beta1/lookup_entry_sql_resource.py similarity index 70% rename from datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py rename to datacatalog/v1beta1/lookup_entry_sql_resource.py index f46af3698080..dd6de7867ae0 100644 --- a/datacatalog/v1beta1/datacatalog_lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/lookup_entry_sql_resource.py @@ -14,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# DO NOT EDIT! This is a generated sample ("Request", "datacatalog_lookup_entry_sql_resource") - # To install the latest published package dependency, execute the following: # pip install google-cloud-datacatalog @@ -24,16 +22,15 @@ # description: Lookup Entry using SQL resource # usage: python3 samples/v1beta1/datacatalog_lookup_entry_sql_resource.py [--sql_name "[SQL Resource Name]"] -# [START datacatalog_lookup_entry_sql_resource] -from google.cloud import datacatalog_v1beta1 - -def sample_lookup_entry(sql_name): +def sample_lookup_entry(sql_name: str): + # [START data_catalog_lookup_entry_sql_resource_v1beta1] + from google.cloud import datacatalog_v1beta1 """ Lookup Entry using SQL resource Args: - sql_name The SQL name of the Google Cloud Platform resource the Data Catalog + sql_name (str): The SQL name of the Google Cloud Platform resource the Data Catalog entry represents. Examples: bigquery.table.`bigquery-public-data`.new_york_taxi_trips.taxi_zone_geom @@ -43,14 +40,12 @@ def sample_lookup_entry(sql_name): client = datacatalog_v1beta1.DataCatalogClient() # sql_name = '[SQL Resource Name]' - response = client.lookup_entry(request = {'sql_resource': sql_name}) - entry = response - print(u"Entry name: {}".format(entry.name)) - print(u"Entry type: {}".format(datacatalog_v1beta1.EntryType(entry.type).name)) - print(u"Linked resource: {}".format(entry.linked_resource)) - - -# [END datacatalog_lookup_entry_sql_resource] + entry = client.lookup_entry(request={"sql_resource": sql_name}) + print(f"Entry name: {entry.name}") + print(f"Entry type: {datacatalog_v1beta1.EntryType(entry.type_).name}") + print(f"Linked resource: {entry.linked_resource}") + # [END data_catalog_lookup_entry_sql_resource_v1beta1] + return entry def main(): diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py new file mode 100644 index 000000000000..5660f08be441 --- /dev/null +++ b/datacatalog/v1beta1/noxfile.py @@ -0,0 +1,222 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt new file mode 100644 index 000000000000..95ea1e6a02b0 --- /dev/null +++ b/datacatalog/v1beta1/requirements-test.txt @@ -0,0 +1 @@ +pytest==6.2.4 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt new file mode 100644 index 000000000000..5f898b7ae101 --- /dev/null +++ b/datacatalog/v1beta1/requirements.txt @@ -0,0 +1 @@ +google-cloud-datacatalog==3.4.0 diff --git a/datacatalog/v1beta1/datacatalog_search.py b/datacatalog/v1beta1/search.py similarity index 68% rename from datacatalog/v1beta1/datacatalog_search.py rename to datacatalog/v1beta1/search.py index ad10276698a4..f4893083589c 100644 --- a/datacatalog/v1beta1/datacatalog_search.py +++ b/datacatalog/v1beta1/search.py @@ -14,8 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# DO NOT EDIT! This is a generated sample ("RequestPagedAll", "datacatalog_search") - # To install the latest published package dependency, execute the following: # pip install google-cloud-datacatalog @@ -24,20 +22,20 @@ # description: Search Catalog # usage: python3 samples/v1beta1/datacatalog_search.py [--include_project_id "[Google Cloud Project ID]"] [--include_gcp_public_datasets false] [--query "[String in search query syntax]"] -# [START datacatalog_search] -from google.cloud import datacatalog_v1beta1 -from google.cloud.datacatalog_v1beta1 import enums - -def sample_search_catalog(include_project_id, include_gcp_public_datasets, query): +def sample_search_catalog( + include_project_id: str, include_gcp_public_datasets: bool, query: str +): + # [START data_catalog_search_v1beta1] + from google.cloud import datacatalog_v1beta1 """ Search Catalog Args: - include_project_id Your Google Cloud project ID. - include_gcp_public_datasets If true, include Google Cloud Platform (GCP) public + include_project_id (str): Your Google Cloud project ID. + include_gcp_public_datasets (bool): If true, include Google Cloud Platform (GCP) public datasets in the search results. - query Your query string. + query (str): Your query string. See: https://cloud.google.com/data-catalog/docs/how-to/search-reference Example: system=bigquery type=dataset """ @@ -54,20 +52,16 @@ def sample_search_catalog(include_project_id, include_gcp_public_datasets, query } # Iterate over all results - for response_item in client.search_catalog(request = {'scope': scope, 'query': query}): - print( - u"Result type: {}".format( - enums.SearchResultType(response_item.search_result_type).name - ) - ) - print(u"Result subtype: {}".format(response_item.search_result_subtype)) + results = client.search_catalog(request={"scope": scope, "query": query}) + for response_item in results: print( - u"Relative resource name: {}".format(response_item.relative_resource_name) + f"Result type: {datacatalog_v1beta1.SearchResultType(response_item.search_result_type).name}" ) - print(u"Linked resource: {}\n".format(response_item.linked_resource)) - - -# [END datacatalog_search] + print(f"Result subtype: {response_item.search_result_subtype}") + print(f"Relative resource name: {response_item.relative_resource_name}") + print(f"Linked resource: {response_item.linked_resource}\n") + # [END data_catalog_search_v1beta1] + return results def main(): diff --git a/datacatalog/tests/test_create_entry_group.py b/datacatalog/v1beta1/test_create_entry_group.py similarity index 83% rename from datacatalog/tests/test_create_entry_group.py rename to datacatalog/v1beta1/test_create_entry_group.py index 443c97f92921..f7fe80cc025c 100644 --- a/datacatalog/tests/test_create_entry_group.py +++ b/datacatalog/v1beta1/test_create_entry_group.py @@ -13,12 +13,12 @@ # limitations under the License. -from ..v1beta1 import create_entry_group +import create_entry_group def test_create_entry_group(capsys, client, project_id, random_entry_group_id): - create_entry_group.create_entry_group(request = {'parent': client, 'entry_group_id': project_id, 'entry_group': random_entry_group_id}) + create_entry_group.create_entry_group(project_id, random_entry_group_id) out, err = capsys.readouterr() assert ( "Created entry group" diff --git a/datacatalog/tests/test_create_fileset_entry.py b/datacatalog/v1beta1/test_create_fileset_entry.py similarity index 96% rename from datacatalog/tests/test_create_fileset_entry.py rename to datacatalog/v1beta1/test_create_fileset_entry.py index 8d0bc28fd07f..b9af5d8c3706 100644 --- a/datacatalog/tests/test_create_fileset_entry.py +++ b/datacatalog/v1beta1/test_create_fileset_entry.py @@ -15,7 +15,7 @@ import re -from ..v1beta1 import create_fileset_entry +import create_fileset_entry def test_create_fileset_entry(capsys, client, random_entry_name): diff --git a/datacatalog/v1beta1/test_get_entry.py b/datacatalog/v1beta1/test_get_entry.py new file mode 100644 index 000000000000..70d703a52a86 --- /dev/null +++ b/datacatalog/v1beta1/test_get_entry.py @@ -0,0 +1,25 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import get_entry + + +def test_get_entry(client, entry): + # break entry name into parts + name = client.parse_entry_path(entry) + retrieved_entry = get_entry.sample_get_entry( + name["project"], name["location"], name["entry_group"], name["entry"] + ) + assert retrieved_entry.name == entry diff --git a/datacatalog/v1beta1/test_lookup_entry.py b/datacatalog/v1beta1/test_lookup_entry.py new file mode 100644 index 000000000000..5091cd2b0255 --- /dev/null +++ b/datacatalog/v1beta1/test_lookup_entry.py @@ -0,0 +1,27 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import lookup_entry + +BIGQUERY_PROJECT = "bigquery-public-data" +BIGQUERY_DATASET = "new_york_taxi_trips" + + +def test_lookup_entry(client, entry, project_id): + bigquery_dataset = f"projects/{BIGQUERY_PROJECT}/datasets/{BIGQUERY_DATASET}" + resource_name = f"//bigquery.googleapis.com/{bigquery_dataset}" + + found_entry = lookup_entry.sample_lookup_entry(resource_name) + assert found_entry.linked_resource == resource_name diff --git a/datacatalog/v1beta1/test_lookup_entry_sql_resource.py b/datacatalog/v1beta1/test_lookup_entry_sql_resource.py new file mode 100644 index 000000000000..daf45523a2a1 --- /dev/null +++ b/datacatalog/v1beta1/test_lookup_entry_sql_resource.py @@ -0,0 +1,26 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import lookup_entry_sql_resource + +BIGQUERY_PROJECT = "bigquery-public-data" +BIGQUERY_DATASET = "new_york_taxi_trips" + + +def test_lookup_entry(): + sql_name = f"bigquery.dataset.`{BIGQUERY_PROJECT}`.`{BIGQUERY_DATASET}`" + resource_name = f"//bigquery.googleapis.com/projects/{BIGQUERY_PROJECT}/datasets/{BIGQUERY_DATASET}" + entry = lookup_entry_sql_resource.sample_lookup_entry(sql_name) + assert entry.linked_resource == resource_name diff --git a/datacatalog/v1beta1/test_search.py b/datacatalog/v1beta1/test_search.py new file mode 100644 index 000000000000..48fe6cf46384 --- /dev/null +++ b/datacatalog/v1beta1/test_search.py @@ -0,0 +1,21 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import search + + +def test_search_catalog(client, project_id, entry_group_name): + results = search.sample_search_catalog(project_id, False, f"name:{entry_group_name}") + assert results is not None From bd1153e42f7c505d2bc28d371861e7ef3e5332be Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 11 Aug 2021 16:56:14 +0000 Subject: [PATCH 042/105] chore: fix INSTALL_LIBRARY_FROM_SOURCE in noxfile.py (#201) Source-Link: https://github.com/googleapis/synthtool/commit/6252f2cd074c38f37b44abe5e96d128733eb1b61 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:50e35228649c47b6ca82aa0be3ff9eb2afce51c82b66c4a03fe4afeb5ff6c0fc --- datacatalog/quickstart/noxfile.py | 88 ++++++++++++++++++++++--------- datacatalog/snippets/noxfile.py | 43 +++++++-------- datacatalog/v1beta1/noxfile.py | 88 ++++++++++++++++++++++--------- 3 files changed, 146 insertions(+), 73 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 5660f08be441..125bb619cc49 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,22 +39,31 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -62,36 +73,36 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -129,17 +140,30 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) +# +# Black +# + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) # # Sample Tests @@ -149,13 +173,22 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -175,14 +208,14 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -190,7 +223,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -199,6 +232,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -208,7 +246,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 1b0d6c900250..125bb619cc49 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -39,15 +39,17 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -55,13 +57,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -76,12 +78,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -90,11 +92,11 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") # # Style Checks # @@ -139,7 +141,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: + if not TEST_CONFIG['enforce_type_hints']: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -148,11 +150,9 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) - - # # Black # @@ -165,7 +165,6 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) - # # Sample Tests # @@ -174,9 +173,7 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -206,7 +203,7 @@ def _session_tests( # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars(), + env=get_pytest_env_vars() ) @@ -216,9 +213,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -227,7 +224,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" + """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 5660f08be441..125bb619cc49 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -27,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,22 +39,31 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -62,36 +73,36 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") # # Style Checks # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -129,17 +140,30 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) +# +# Black +# + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) # # Sample Tests @@ -149,13 +173,22 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -175,14 +208,14 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -190,7 +223,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -199,6 +232,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -208,7 +246,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) From ed26248ca824ab8ddc792077aaa84c70e1725557 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 12 Aug 2021 21:02:14 +0200 Subject: [PATCH 043/105] chore(deps): update dependency google-cloud-bigquery to v2.23.3 (#203) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update dependency google-cloud-bigquery to v2.23.3 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 55a7ce4c6db8..6ea6000ac5f7 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.4 -google-cloud-bigquery==2.4.0 \ No newline at end of file +google-cloud-bigquery==2.23.3 \ No newline at end of file From d5978990f0c3ca60698e94acf6d1c3593b26b934 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 11:31:15 -0400 Subject: [PATCH 044/105] chore: drop mention of Python 2.7 from templates (#207) Source-Link: https://github.com/googleapis/synthtool/commit/facee4cc1ea096cd8bcc008bb85929daa7c414c0 Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:9743664022bd63a8084be67f144898314c7ca12f0a03e422ac17c733c129d803 Co-authored-by: Owl Bot --- datacatalog/quickstart/noxfile.py | 6 +++--- datacatalog/snippets/noxfile.py | 6 +++--- datacatalog/v1beta1/noxfile.py | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 125bb619cc49..e73436a15626 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], + 'ignored_versions': [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them @@ -88,8 +88,8 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 125bb619cc49..e73436a15626 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], + 'ignored_versions': [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them @@ -88,8 +88,8 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 125bb619cc49..e73436a15626 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], + 'ignored_versions': [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them @@ -88,8 +88,8 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] From e2053c53bf0fd6655247f59301c4c052af987085 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 13 Aug 2021 14:17:16 -0600 Subject: [PATCH 045/105] fix: make datacatalog == datacatalog_v1 (#206) Fixes #116 I have verified that v1beta1 -> v1 is additive, so this is *not* a breaking change. See internal changelist 390485345 for the proto level diff and successful run through the proto brekaing change detector --- datacatalog/v1beta1/conftest.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/datacatalog/v1beta1/conftest.py b/datacatalog/v1beta1/conftest.py index d641d1359132..61f2f64e9b97 100644 --- a/datacatalog/v1beta1/conftest.py +++ b/datacatalog/v1beta1/conftest.py @@ -85,10 +85,19 @@ def entry(client, entry_group_name): random_entry_id = "example_entry_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) - entry = datacatalog_v1beta1.CreateEntryRequest - entry = client.create_entry( - request={"parent": entry_group_name, "entry_id": random_entry_id, "entry": {"type_": "DATA_STREAM", "name": "samples_test_entry"}} + + request = datacatalog_v1beta1.CreateEntryRequest( + parent=entry_group_name, + entry_id=random_entry_id, + entry=datacatalog_v1beta1.Entry( + type_=datacatalog_v1beta1.EntryType.DATA_STREAM, + name="samples_test_entry", + user_specified_system="sample_system", + ) ) + + entry = client.create_entry(request) + yield entry.name client.delete_entry(request={"name": entry.name}) From 9bc49753d5070aa05fffd7f1584f3412526c9e2d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 16 Aug 2021 20:15:30 +0200 Subject: [PATCH 046/105] chore(deps): update dependency google-cloud-bigquery to v2.24.0 (#205) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 6ea6000ac5f7..c97105b5977d 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.4 -google-cloud-bigquery==2.23.3 \ No newline at end of file +google-cloud-bigquery==2.24.0 \ No newline at end of file From 29a62e587cde16f0fbbf3012477a1d7858ba342e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 Aug 2021 19:29:07 +0200 Subject: [PATCH 047/105] chore(deps): update dependency google-cloud-bigquery to v2.24.1 (#208) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index c97105b5977d..8f47a940d988 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.4 -google-cloud-bigquery==2.24.0 \ No newline at end of file +google-cloud-bigquery==2.24.1 \ No newline at end of file From 7ba6be3b7bf6e8b921201d39dfd2b8225b0c33cc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 30 Aug 2021 17:19:26 +0200 Subject: [PATCH 048/105] chore(deps): update dependency google-cloud-bigquery to v2.25.1 (#212) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 8f47a940d988..72cec7427d94 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.4 -google-cloud-bigquery==2.24.1 \ No newline at end of file +google-cloud-bigquery==2.25.1 \ No newline at end of file From 85652b54f40790142bf6e69de36dd1be0c582f89 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 30 Aug 2021 22:52:52 +0200 Subject: [PATCH 049/105] chore(deps): update dependency pytest to v6.2.5 (#216) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 72cec7427d94..ce283cd80eca 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==6.2.4 +pytest==6.2.5 google-cloud-bigquery==2.25.1 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 95ea1e6a02b0..927094516e65 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.4 +pytest==6.2.5 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index 95ea1e6a02b0..927094516e65 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.4 +pytest==6.2.5 From 6cfd2ec036a529cc64b47101e19eb9049da01bfd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Sep 2021 15:57:18 +0200 Subject: [PATCH 050/105] chore(deps): update dependency google-cloud-bigquery to v2.25.2 (#217) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index ce283cd80eca..03e6a854f562 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.25.1 \ No newline at end of file +google-cloud-bigquery==2.25.2 \ No newline at end of file From 0041c18ca3e27f840a8963dc641450c1f440d319 Mon Sep 17 00:00:00 2001 From: Steffany Brown <30247553+steffnay@users.noreply.github.com> Date: Thu, 2 Sep 2021 08:17:00 -0700 Subject: [PATCH 051/105] docs(samples): add entry group greation to custom entry sample (#215) Co-authored-by: Anthonios Partheniou --- datacatalog/snippets/conftest.py | 19 -------------- datacatalog/snippets/create_custom_entry.py | 25 +++++++++++++++---- .../snippets/create_custom_entry_test.py | 7 ++++-- 3 files changed, 25 insertions(+), 26 deletions(-) diff --git a/datacatalog/snippets/conftest.py b/datacatalog/snippets/conftest.py index 089190d23042..47bea8d99358 100644 --- a/datacatalog/snippets/conftest.py +++ b/datacatalog/snippets/conftest.py @@ -127,22 +127,3 @@ def random_existing_tag_template_id(client, project_id, resources_to_delete): ) yield random_tag_template_id resources_to_delete["templates"].append(random_tag_template.name) - - -@pytest.fixture -def random_existing_entry_group( - client, project_id, random_entry_group_id, resources_to_delete -): - entry_group_obj = datacatalog_v1.types.EntryGroup() - entry_group_obj.display_name = f"python_sample_{temp_suffix()}" - entry_group_obj.description = "Data Catalog samples test entry group." - - entry_group = datacatalog.create_entry_group( - parent=datacatalog_v1.DataCatalogClient.common_location_path( - project_id, LOCATION - ), - entry_group_id=random_entry_group_id, - entry_group=entry_group_obj, - ) - yield entry_group - resources_to_delete["entry_groups"].append(entry_group.name) diff --git a/datacatalog/snippets/create_custom_entry.py b/datacatalog/snippets/create_custom_entry.py index 43a2dfac3c13..1556a042fdf5 100644 --- a/datacatalog/snippets/create_custom_entry.py +++ b/datacatalog/snippets/create_custom_entry.py @@ -21,24 +21,39 @@ def create_custom_entry(override_values): # Google Cloud Platform project. project_id = "my-project" - # Entry group to be created. - # For sample code demonstrating entry group creation, see quickstart: - # https://cloud.google.com/data-catalog/docs/quickstart-tagging - entry_group_name = "my_existing_entry_group" + # Entry Group to be created. + entry_group_id = "my_new_entry_group_id" # Entry to be created. entry_id = "my_new_entry_id" + # Currently, Data Catalog stores metadata in the us-central1 region. + location = "us-central1" # [END data_catalog_create_custom_entry] # To facilitate testing, we replace values with alternatives # provided by the testing harness. project_id = override_values.get("project_id", project_id) - entry_group_name = override_values.get("entry_group_name", entry_group_name) entry_id = override_values.get("entry_id", entry_id) + entry_group_id = override_values.get("entry_group_id", entry_group_id) # [START data_catalog_create_custom_entry] datacatalog = datacatalog_v1.DataCatalogClient() + # Create an Entry Group. + entry_group_obj = datacatalog_v1.types.EntryGroup() + entry_group_obj.display_name = "My awesome Entry Group" + entry_group_obj.description = "This Entry Group represents an external system" + + entry_group = datacatalog.create_entry_group( + parent=datacatalog_v1.DataCatalogClient.common_location_path( + project_id, location + ), + entry_group_id=entry_group_id, + entry_group=entry_group_obj, + ) + entry_group_name = entry_group.name + print("Created entry group: {}".format(entry_group_name)) + # Create an Entry. entry = datacatalog_v1.types.Entry() entry.user_specified_system = "onprem_data_system" diff --git a/datacatalog/snippets/create_custom_entry_test.py b/datacatalog/snippets/create_custom_entry_test.py index 742993eaa534..597f80e9ce02 100644 --- a/datacatalog/snippets/create_custom_entry_test.py +++ b/datacatalog/snippets/create_custom_entry_test.py @@ -19,7 +19,6 @@ def test_create_custom_entry( capsys, client, project_id, - random_existing_entry_group, random_entry_group_id, random_entry_id, resources_to_delete, @@ -27,13 +26,17 @@ def test_create_custom_entry( location = "us-central1" override_values = { "project_id": project_id, - "entry_group_name": random_existing_entry_group.name, "entry_id": random_entry_id, + "entry_group_id": random_entry_group_id, } + expected_entry_group = client.entry_group_path( + project_id, location, random_entry_group_id + ) expected_entry = client.entry_path( project_id, location, random_entry_group_id, random_entry_id ) create_custom_entry.create_custom_entry(override_values) out, err = capsys.readouterr() + assert f"Created entry group: {expected_entry_group}" in out assert f"Created entry: {expected_entry}" in out resources_to_delete["entries"].append(expected_entry) From 5b3f35e93ce4dece98c1b9af9f9224f1904bf031 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Sep 2021 11:32:07 +0200 Subject: [PATCH 052/105] chore(deps): update all dependencies (#220) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 03e6a854f562..a0c86f539c5a 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.25.2 \ No newline at end of file +google-cloud-bigquery==2.26.0 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 5f898b7ae101..5206861bb8ef 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.0 +google-cloud-datacatalog==3.4.1 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 5f898b7ae101..5206861bb8ef 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.0 +google-cloud-datacatalog==3.4.1 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 5f898b7ae101..5206861bb8ef 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.0 +google-cloud-datacatalog==3.4.1 From 50adab5f9713e8bb214f66ab3e6169b51de3522e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 17 Sep 2021 15:37:01 +0000 Subject: [PATCH 053/105] chore: blacken samples noxfile template (#225) --- datacatalog/quickstart/noxfile.py | 44 ++++++++++++++++++------------- datacatalog/snippets/noxfile.py | 44 ++++++++++++++++++------------- datacatalog/v1beta1/noxfile.py | 44 ++++++++++++++++++------------- 3 files changed, 75 insertions(+), 57 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index e73436a15626..b008613f03ff 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': [], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index e73436a15626..b008613f03ff 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': [], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index e73436a15626..b008613f03ff 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': [], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # From 4f70fa008da7d0bdaea795b1202dfa6ef9a6aaf1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Sep 2021 16:54:10 +0200 Subject: [PATCH 054/105] chore(deps): update dependency google-cloud-bigquery to v2.27.0 (#229) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.26.0` -> `==2.27.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/compatibility-slim/2.26.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.27.0/confidence-slim/2.26.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.27.0`](https://togithub.com/googleapis/python-bigquery/blob/master/CHANGELOG.md#​2270-httpswwwgithubcomgoogleapispython-bigquerycomparev2260v2270-2021-09-24) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.26.0...v2.27.0) ##### Features - Add py.typed for PEP 561 compliance ([#​976](https://www.togithub.com/googleapis/python-bigquery/issues/976)) ([96e6bee](https://www.github.com/googleapis/python-bigquery/commit/96e6beef3c63b663b7e5879b1458f4dd1a47a5b5)) - include key metadata in Job representation ([#​964](https://www.togithub.com/googleapis/python-bigquery/issues/964)) ([acca1cb](https://www.github.com/googleapis/python-bigquery/commit/acca1cb7baaa3b00508246c994ade40314d421c3)) ##### Bug Fixes - Arrow extension-type metadata was not set when calling the REST API or when there are no rows ([#​946](https://www.togithub.com/googleapis/python-bigquery/issues/946)) ([864383b](https://www.github.com/googleapis/python-bigquery/commit/864383bc01636b3774f7da194587b8b7edd0383d)) - disambiguate missing policy tags from explicitly unset policy tags ([#​983](https://www.togithub.com/googleapis/python-bigquery/issues/983)) ([f83c00a](https://www.github.com/googleapis/python-bigquery/commit/f83c00acead70fc0ce9959eefb133a672d816277)) - remove default timeout ([#​974](https://www.togithub.com/googleapis/python-bigquery/issues/974)) ([1cef0d4](https://www.github.com/googleapis/python-bigquery/commit/1cef0d4664bf448168b26487a71795144b7f4d6b)) ##### Documentation - simplify destination table sample with f-strings ([#​966](https://www.togithub.com/googleapis/python-bigquery/issues/966)) ([ab6e76f](https://www.github.com/googleapis/python-bigquery/commit/ab6e76f9489262fd9c1876a1c4f93d7e139aa999))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-datacatalog). --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index a0c86f539c5a..ae4ee19b2f25 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.26.0 \ No newline at end of file +google-cloud-bigquery==2.27.0 \ No newline at end of file From 8f7183d03ccdffa736f8082c2bd8181a4c27c899 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Sep 2021 17:18:23 +0200 Subject: [PATCH 055/105] chore(deps): update all dependencies (#231) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index ae4ee19b2f25..21cea846108f 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.27.0 \ No newline at end of file +google-cloud-bigquery==2.27.1 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 5206861bb8ef..0fe7dc2d90b9 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.1 +google-cloud-datacatalog==3.4.2 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 5206861bb8ef..0fe7dc2d90b9 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.1 +google-cloud-datacatalog==3.4.2 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 5206861bb8ef..0fe7dc2d90b9 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.1 +google-cloud-datacatalog==3.4.2 From e00a482dfcc9cfe04e355fa589e4e4e6affc8397 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 15:44:33 +0000 Subject: [PATCH 056/105] chore: fail samples nox session if python version is missing (#234) --- datacatalog/quickstart/noxfile.py | 4 ++++ datacatalog/snippets/noxfile.py | 4 ++++ datacatalog/v1beta1/noxfile.py | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index b008613f03ff..1fd8956fbf01 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index b008613f03ff..1fd8956fbf01 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index b008613f03ff..1fd8956fbf01 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # From 4023cbae1a55e153d47536e82bb52c8462388c8a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 30 Sep 2021 22:19:27 +0200 Subject: [PATCH 057/105] chore(deps): update dependency google-cloud-bigquery to v2.28.0 (#235) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 21cea846108f..b2835b407ca6 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.27.1 \ No newline at end of file +google-cloud-bigquery==2.28.0 \ No newline at end of file From f33da0f06c6deb51a3a43e472fe5135aeb138f42 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 7 Oct 2021 14:13:46 +0200 Subject: [PATCH 058/105] chore(deps): update dependency google-cloud-datacatalog to v3.4.3 (#237) --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 0fe7dc2d90b9..69b421541075 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.2 +google-cloud-datacatalog==3.4.3 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 0fe7dc2d90b9..69b421541075 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.2 +google-cloud-datacatalog==3.4.3 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 0fe7dc2d90b9..69b421541075 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.2 +google-cloud-datacatalog==3.4.3 From 0fc544f02f9ad0e198adcd45af2e46808c0c16c2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Oct 2021 17:16:28 +0000 Subject: [PATCH 059/105] chore(python): Add kokoro configs for python 3.10 samples testing (#241) --- datacatalog/quickstart/noxfile.py | 2 +- datacatalog/snippets/noxfile.py | 2 +- datacatalog/v1beta1/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 1fd8956fbf01..93a9122cc457 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 1fd8956fbf01..93a9122cc457 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 1fd8956fbf01..93a9122cc457 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From c908b79f37a2bfb0fd2921edcc6a3d3126de4848 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Oct 2021 20:48:08 +0200 Subject: [PATCH 060/105] chore(deps): update dependency google-cloud-bigquery to v2.28.1 (#239) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index b2835b407ca6..051ae4479ca5 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.28.0 \ No newline at end of file +google-cloud-bigquery==2.28.1 \ No newline at end of file From 5c5f500daefff419c4d01eef18e4f0894576c9ba Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 Oct 2021 16:57:11 +0200 Subject: [PATCH 061/105] chore(deps): update dependency google-cloud-bigquery to v2.29.0 (#247) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 051ae4479ca5..8904111acc00 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.28.1 \ No newline at end of file +google-cloud-bigquery==2.29.0 \ No newline at end of file From 3b536d99ba0ead05c587b3b197f9902deef4c9e7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 3 Nov 2021 21:44:15 +0100 Subject: [PATCH 062/105] chore(deps): update dependency google-cloud-datacatalog to v3.5.0 (#250) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 69b421541075..3d7f13ea02dd 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.3 +google-cloud-datacatalog==3.5.0 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 69b421541075..3d7f13ea02dd 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.3 +google-cloud-datacatalog==3.5.0 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 69b421541075..3d7f13ea02dd 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.4.3 +google-cloud-datacatalog==3.5.0 From c78c52a769736a33139851e351d1bc9ef0181bb8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Nov 2021 10:17:29 +0100 Subject: [PATCH 063/105] chore(deps): update dependency google-cloud-bigquery to v2.30.0 (#252) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 8904111acc00..8af1806dfb15 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.29.0 \ No newline at end of file +google-cloud-bigquery==2.30.0 \ No newline at end of file From 3f47e03721865929904a6ad5983c12d317f97208 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Nov 2021 10:36:35 +0100 Subject: [PATCH 064/105] chore(deps): update all dependencies (#253) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 8af1806dfb15..48325ce7c53e 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.30.0 \ No newline at end of file +google-cloud-bigquery==2.30.1 \ No newline at end of file From 57a4be9acc33186ccd972801350b3fb0e391e197 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 8 Nov 2021 19:30:10 +0100 Subject: [PATCH 065/105] chore(deps): update dependency google-cloud-datacatalog to v3.6.0 (#258) --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 3d7f13ea02dd..97d8152bc340 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.5.0 +google-cloud-datacatalog==3.6.0 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 3d7f13ea02dd..97d8152bc340 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.5.0 +google-cloud-datacatalog==3.6.0 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 3d7f13ea02dd..97d8152bc340 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.5.0 +google-cloud-datacatalog==3.6.0 From 2ca0fb167bed95b5799202137cc5f3fd545f4b51 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 10 Nov 2021 20:39:34 -0500 Subject: [PATCH 066/105] chore(python): run blacken session for all directories with a noxfile (#262) Source-Link: https://github.com/googleapis/synthtool/commit/bc0de6ee2489da6fb8eafd021a8c58b5cc30c947 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39ad8c0570e4f5d2d3124a509de4fe975e799e2b97e0f58aed88f8880d5a8b60 Co-authored-by: Owl Bot --- datacatalog/quickstart/conftest.py | 8 ++++---- datacatalog/quickstart/quickstart_test.py | 6 ++++-- datacatalog/v1beta1/conftest.py | 2 +- datacatalog/v1beta1/get_entry.py | 5 ++++- datacatalog/v1beta1/lookup_entry.py | 1 + datacatalog/v1beta1/lookup_entry_sql_resource.py | 1 + datacatalog/v1beta1/search.py | 1 + datacatalog/v1beta1/test_search.py | 4 +++- 8 files changed, 19 insertions(+), 9 deletions(-) diff --git a/datacatalog/quickstart/conftest.py b/datacatalog/quickstart/conftest.py index c7657fb7b62a..71c7597c644d 100644 --- a/datacatalog/quickstart/conftest.py +++ b/datacatalog/quickstart/conftest.py @@ -25,9 +25,7 @@ def temp_suffix(): now = datetime.datetime.now() - return "{}_{}".format( - now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] - ) + return "{}_{}".format(now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8]) @pytest.fixture(scope="session") @@ -42,7 +40,9 @@ def bigquery_client(credentials, project_id): @pytest.fixture(scope="session") def default_credentials(): - return google.auth.default(scopes=["https://www.googleapis.com/auth/cloud-platform"]) + return google.auth.default( + scopes=["https://www.googleapis.com/auth/cloud-platform"] + ) @pytest.fixture(scope="session") diff --git a/datacatalog/quickstart/quickstart_test.py b/datacatalog/quickstart/quickstart_test.py index a63efee6cbf3..286259eaa9ab 100644 --- a/datacatalog/quickstart/quickstart_test.py +++ b/datacatalog/quickstart/quickstart_test.py @@ -15,13 +15,15 @@ import quickstart -def test_quickstart(capsys, client, project_id, dataset_id, table_id, random_tag_template_id): +def test_quickstart( + capsys, client, project_id, dataset_id, table_id, random_tag_template_id +): location = "us-central1" override_values = { "project_id": project_id, "dataset_id": dataset_id, "table_id": table_id, - "tag_template_id": random_tag_template_id + "tag_template_id": random_tag_template_id, } tag_template_name = client.tag_template_path( project_id, location, random_tag_template_id diff --git a/datacatalog/v1beta1/conftest.py b/datacatalog/v1beta1/conftest.py index 61f2f64e9b97..c9f42aca6543 100644 --- a/datacatalog/v1beta1/conftest.py +++ b/datacatalog/v1beta1/conftest.py @@ -93,7 +93,7 @@ def entry(client, entry_group_name): type_=datacatalog_v1beta1.EntryType.DATA_STREAM, name="samples_test_entry", user_specified_system="sample_system", - ) + ), ) entry = client.create_entry(request) diff --git a/datacatalog/v1beta1/get_entry.py b/datacatalog/v1beta1/get_entry.py index 30b13d0a5d7e..a797958ba44e 100644 --- a/datacatalog/v1beta1/get_entry.py +++ b/datacatalog/v1beta1/get_entry.py @@ -28,6 +28,7 @@ def sample_get_entry( ): # [START data_catalog_get_entry_v1beta1] from google.cloud import datacatalog_v1beta1 + """ Get Entry @@ -59,7 +60,9 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument("--project_id", type_=str, default="[Google Cloud Project ID]") - parser.add_argument("--location_id", type_=str, default="[Google Cloud Location ID]") + parser.add_argument( + "--location_id", type_=str, default="[Google Cloud Location ID]" + ) parser.add_argument("--entry_group_id", type_=str, default="[Entry Group ID]") parser.add_argument("--entry_id", type_=str, default="[Entry ID]") args = parser.parse_args() diff --git a/datacatalog/v1beta1/lookup_entry.py b/datacatalog/v1beta1/lookup_entry.py index a167789126d0..2e0a74628432 100644 --- a/datacatalog/v1beta1/lookup_entry.py +++ b/datacatalog/v1beta1/lookup_entry.py @@ -26,6 +26,7 @@ def sample_lookup_entry(resource_name: str): # [START data_catalog_lookup_entry_v1beta1] from google.cloud import datacatalog_v1beta1 + """ Lookup Entry diff --git a/datacatalog/v1beta1/lookup_entry_sql_resource.py b/datacatalog/v1beta1/lookup_entry_sql_resource.py index dd6de7867ae0..f60ac3dcef14 100644 --- a/datacatalog/v1beta1/lookup_entry_sql_resource.py +++ b/datacatalog/v1beta1/lookup_entry_sql_resource.py @@ -26,6 +26,7 @@ def sample_lookup_entry(sql_name: str): # [START data_catalog_lookup_entry_sql_resource_v1beta1] from google.cloud import datacatalog_v1beta1 + """ Lookup Entry using SQL resource diff --git a/datacatalog/v1beta1/search.py b/datacatalog/v1beta1/search.py index f4893083589c..cc2d3b2c2aee 100644 --- a/datacatalog/v1beta1/search.py +++ b/datacatalog/v1beta1/search.py @@ -28,6 +28,7 @@ def sample_search_catalog( ): # [START data_catalog_search_v1beta1] from google.cloud import datacatalog_v1beta1 + """ Search Catalog diff --git a/datacatalog/v1beta1/test_search.py b/datacatalog/v1beta1/test_search.py index 48fe6cf46384..c0ba8eb5ad2b 100644 --- a/datacatalog/v1beta1/test_search.py +++ b/datacatalog/v1beta1/test_search.py @@ -17,5 +17,7 @@ def test_search_catalog(client, project_id, entry_group_name): - results = search.sample_search_catalog(project_id, False, f"name:{entry_group_name}") + results = search.sample_search_catalog( + project_id, False, f"name:{entry_group_name}" + ) assert results is not None From 28f0e677759957acc98d2f6e8d7f293fe79cc139 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Nov 2021 17:23:31 +0100 Subject: [PATCH 067/105] chore(deps): update dependency google-cloud-datacatalog to v3.6.1 (#266) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 97d8152bc340..863a6d8a18d2 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.0 +google-cloud-datacatalog==3.6.1 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 97d8152bc340..863a6d8a18d2 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.0 +google-cloud-datacatalog==3.6.1 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 97d8152bc340..863a6d8a18d2 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.0 +google-cloud-datacatalog==3.6.1 From 4d85fcd172843ea626e68363002b6c16eda2b7cd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Dec 2021 11:53:09 +0100 Subject: [PATCH 068/105] chore(deps): update dependency google-cloud-bigquery to v2.31.0 (#270) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 48325ce7c53e..5c29d642d83a 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.30.1 \ No newline at end of file +google-cloud-bigquery==2.31.0 \ No newline at end of file From 817486e65959cb93223751d92166274c8f18fd8f Mon Sep 17 00:00:00 2001 From: Ricardo Mendes Date: Tue, 28 Dec 2021 17:31:25 -0300 Subject: [PATCH 069/105] docs(samples): Add sample for PolicyTagManagerClient.create_taxonomy (#37) * fix(samples): add sample for create_taxonomy * lint * typo in import * refactor sample to match expected pattern Co-authored-by: Anthonios Partheniou Co-authored-by: Tim Swast --- datacatalog/snippets/conftest.py | 28 +++++++++-- .../data_catalog_ptm_create_taxonomy.py | 50 +++++++++++++++++++ .../data_catalog_ptm_create_taxonomy_test.py | 29 +++++++++++ 3 files changed, 102 insertions(+), 5 deletions(-) create mode 100644 datacatalog/snippets/data_catalog_ptm_create_taxonomy.py create mode 100644 datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py diff --git a/datacatalog/snippets/conftest.py b/datacatalog/snippets/conftest.py index 47bea8d99358..cec3ed8c92be 100644 --- a/datacatalog/snippets/conftest.py +++ b/datacatalog/snippets/conftest.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. - import datetime import uuid @@ -22,9 +21,6 @@ import pytest -datacatalog = datacatalog_v1.DataCatalogClient() - - LOCATION = "us-central1" @@ -62,7 +58,7 @@ def valid_member_id(client, project_id, random_existing_tag_template_id): ) # Retrieve Template's current IAM Policy. - policy = datacatalog.get_iam_policy(resource=template_name) + policy = client.get_iam_policy(resource=template_name) yield policy.bindings[0].members[0] @@ -127,3 +123,25 @@ def random_existing_tag_template_id(client, project_id, resources_to_delete): ) yield random_tag_template_id resources_to_delete["templates"].append(random_tag_template.name) + + +@pytest.fixture(scope="session") +def policy_tag_manager_client(credentials): + return datacatalog_v1.PolicyTagManagerClient(credentials=credentials) + + +@pytest.fixture +def random_taxonomy_display_name(policy_tag_manager_client, project_id): + now = datetime.datetime.now() + random_display_name = f'example_taxonomy' \ + f'_{now.strftime("%Y%m%d%H%M%S")}' \ + f'_{uuid.uuid4().hex[:8]}' + yield random_display_name + parent = datacatalog_v1.PolicyTagManagerClient.common_location_path( + project_id, 'us' + ) + taxonomies = policy_tag_manager_client.list_taxonomies(parent=parent) + taxonomy = next( + (t for t in taxonomies if t.display_name == random_display_name), None) + if taxonomy: + policy_tag_manager_client.delete_taxonomy(name=taxonomy.name) diff --git a/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py b/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py new file mode 100644 index 000000000000..f6b49ba1c714 --- /dev/null +++ b/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py @@ -0,0 +1,50 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START data_catalog_ptm_create_taxonomy] +from google.cloud import datacatalog_v1 + + +def create_taxonomy( + # TODO(developer): Set project_id to the ID of the project the + # taxonomy will belong to. + project_id: str = "your-project-id", + + # TODO(developer): Specify the geographic location where the + # taxonomy should reside. + location_id: str = "us", + + # TODO(developer): Set the display name of the taxonomy. + display_name: str = "example-taxonomy", +): + # TODO(developer): Construct a Policy Tag Manager client object. To avoid + # extra delays due to authentication, create a single client for your + # program and share it across operations. + client = datacatalog_v1.PolicyTagManagerClient() + + # Construct a full location path to be the parent of the taxonomy. + parent = datacatalog_v1.PolicyTagManagerClient.common_location_path( + project_id, location_id + ) + + # TODO(developer): Construct a full Taxonomy object to send to the API. + taxonomy = datacatalog_v1.Taxonomy() + taxonomy.display_name = display_name + taxonomy.description = 'This Taxonomy represents ...' + + # Send the taxonomy to the API for creation. + taxonomy = client.create_taxonomy(parent=parent, taxonomy=taxonomy) + print(f'Created taxonomy {taxonomy.name}') + +# [END data_catalog_ptm_create_taxonomy] diff --git a/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py b/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py new file mode 100644 index 000000000000..63a913c38e2f --- /dev/null +++ b/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py @@ -0,0 +1,29 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import data_catalog_ptm_create_taxonomy + + +def test_create_taxonomy(capsys, + project_id: str, + random_taxonomy_display_name: str): + + data_catalog_ptm_create_taxonomy.create_taxonomy( + project_id=project_id, location_id="us", display_name=random_taxonomy_display_name) + out, _ = capsys.readouterr() + assert ( + f'Created taxonomy projects/{project_id}/locations/us/taxonomies/' + in out + ) From 3e5bae9aa919ba62d030e868f14395c2ce02e40f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 16:06:17 +0000 Subject: [PATCH 070/105] chore: use python-samples-reviewers (#278) --- datacatalog/snippets/conftest.py | 13 ++++++++----- .../snippets/data_catalog_ptm_create_taxonomy.py | 7 +++---- .../data_catalog_ptm_create_taxonomy_test.py | 14 ++++++-------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/datacatalog/snippets/conftest.py b/datacatalog/snippets/conftest.py index cec3ed8c92be..6060ec87721b 100644 --- a/datacatalog/snippets/conftest.py +++ b/datacatalog/snippets/conftest.py @@ -133,15 +133,18 @@ def policy_tag_manager_client(credentials): @pytest.fixture def random_taxonomy_display_name(policy_tag_manager_client, project_id): now = datetime.datetime.now() - random_display_name = f'example_taxonomy' \ - f'_{now.strftime("%Y%m%d%H%M%S")}' \ - f'_{uuid.uuid4().hex[:8]}' + random_display_name = ( + f"example_taxonomy" + f'_{now.strftime("%Y%m%d%H%M%S")}' + f"_{uuid.uuid4().hex[:8]}" + ) yield random_display_name parent = datacatalog_v1.PolicyTagManagerClient.common_location_path( - project_id, 'us' + project_id, "us" ) taxonomies = policy_tag_manager_client.list_taxonomies(parent=parent) taxonomy = next( - (t for t in taxonomies if t.display_name == random_display_name), None) + (t for t in taxonomies if t.display_name == random_display_name), None + ) if taxonomy: policy_tag_manager_client.delete_taxonomy(name=taxonomy.name) diff --git a/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py b/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py index f6b49ba1c714..5a9d8be1a35b 100644 --- a/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py +++ b/datacatalog/snippets/data_catalog_ptm_create_taxonomy.py @@ -20,11 +20,9 @@ def create_taxonomy( # TODO(developer): Set project_id to the ID of the project the # taxonomy will belong to. project_id: str = "your-project-id", - # TODO(developer): Specify the geographic location where the # taxonomy should reside. location_id: str = "us", - # TODO(developer): Set the display name of the taxonomy. display_name: str = "example-taxonomy", ): @@ -41,10 +39,11 @@ def create_taxonomy( # TODO(developer): Construct a full Taxonomy object to send to the API. taxonomy = datacatalog_v1.Taxonomy() taxonomy.display_name = display_name - taxonomy.description = 'This Taxonomy represents ...' + taxonomy.description = "This Taxonomy represents ..." # Send the taxonomy to the API for creation. taxonomy = client.create_taxonomy(parent=parent, taxonomy=taxonomy) - print(f'Created taxonomy {taxonomy.name}') + print(f"Created taxonomy {taxonomy.name}") + # [END data_catalog_ptm_create_taxonomy] diff --git a/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py b/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py index 63a913c38e2f..d45cddf074a0 100644 --- a/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py +++ b/datacatalog/snippets/data_catalog_ptm_create_taxonomy_test.py @@ -16,14 +16,12 @@ import data_catalog_ptm_create_taxonomy -def test_create_taxonomy(capsys, - project_id: str, - random_taxonomy_display_name: str): +def test_create_taxonomy(capsys, project_id: str, random_taxonomy_display_name: str): data_catalog_ptm_create_taxonomy.create_taxonomy( - project_id=project_id, location_id="us", display_name=random_taxonomy_display_name) - out, _ = capsys.readouterr() - assert ( - f'Created taxonomy projects/{project_id}/locations/us/taxonomies/' - in out + project_id=project_id, + location_id="us", + display_name=random_taxonomy_display_name, ) + out, _ = capsys.readouterr() + assert f"Created taxonomy projects/{project_id}/locations/us/taxonomies/" in out From ef099d7dc0a9bc003db6e348bf741605a47fe85e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 07:57:36 -0500 Subject: [PATCH 071/105] chore(samples): Add check for tests in directory (#280) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- datacatalog/quickstart/noxfile.py | 70 +++++++++++++++++-------------- datacatalog/snippets/noxfile.py | 70 +++++++++++++++++-------------- datacatalog/v1beta1/noxfile.py | 70 +++++++++++++++++-------------- 3 files changed, 117 insertions(+), 93 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From aeca576686975c319a9dc7a74bb7028adf9552e7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 13 Jan 2022 19:16:39 +0100 Subject: [PATCH 072/105] chore(deps): update dependency google-cloud-bigquery to v2.32.0 (#282) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 5c29d642d83a..2ea228119414 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==6.2.5 -google-cloud-bigquery==2.31.0 \ No newline at end of file +google-cloud-bigquery==2.32.0 \ No newline at end of file From 7d8a3f8f378fcabf20f57190dec45a7fa40707ce Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 15 Jan 2022 18:25:48 +0100 Subject: [PATCH 073/105] chore(deps): update dependency google-cloud-datacatalog to v3.6.2 (#284) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 863a6d8a18d2..91eb87e64a1d 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.1 +google-cloud-datacatalog==3.6.2 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 863a6d8a18d2..91eb87e64a1d 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.1 +google-cloud-datacatalog==3.6.2 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 863a6d8a18d2..91eb87e64a1d 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.1 +google-cloud-datacatalog==3.6.2 From e34e9b98eac2e224b1e80aafd20deff7d67cc30e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jan 2022 19:42:52 -0500 Subject: [PATCH 074/105] chore(python): Noxfile recognizes that tests can live in a folder (#287) * chore(python): Noxfile recognizes that tests can live in a folder Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 * add commit to trigger ci Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/noxfile.py | 1 + datacatalog/snippets/noxfile.py | 1 + datacatalog/v1beta1/noxfile.py | 1 + 3 files changed, 3 insertions(+) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From 8062842c74d03ad89ddd64465886f57399e7a5c2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 8 Feb 2022 17:36:58 +0100 Subject: [PATCH 075/105] chore(deps): update dependency pytest to v7 (#296) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 2ea228119414..f926cb9ba70f 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==6.2.5 +pytest==7.0.0 google-cloud-bigquery==2.32.0 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 927094516e65..4a46ff600804 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.5 +pytest==7.0.0 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index 927094516e65..4a46ff600804 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==6.2.5 +pytest==7.0.0 From edb2fd0c8fd730c27d254aa45189d1027d782b74 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 28 Feb 2022 23:27:19 +0100 Subject: [PATCH 076/105] chore(deps): update all dependencies (#299) Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 4 ++-- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index f926cb9ba70f..08bda2ca58e3 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.0.0 -google-cloud-bigquery==2.32.0 \ No newline at end of file +pytest==7.0.1 +google-cloud-bigquery==2.34.0 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 4a46ff600804..c2845bffbe89 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.0 +pytest==7.0.1 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index 4a46ff600804..c2845bffbe89 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.0 +pytest==7.0.1 From 135540d89d4af8a5524bf5d7d8ed18b6cdcfb854 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 1 Mar 2022 00:17:28 +0100 Subject: [PATCH 077/105] chore(deps): update all dependencies (#304) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 91eb87e64a1d..2701641827b0 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.2 +google-cloud-datacatalog==3.7.0 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 91eb87e64a1d..2701641827b0 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.2 +google-cloud-datacatalog==3.7.0 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 91eb87e64a1d..2701641827b0 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.6.2 +google-cloud-datacatalog==3.7.0 From 7f82ee1197ddce367777e572300f74cca045dd1c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 3 Mar 2022 13:48:48 +0100 Subject: [PATCH 078/105] chore(deps): update all dependencies (#309) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 08bda2ca58e3..ed9e33277b53 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.0.1 -google-cloud-bigquery==2.34.0 \ No newline at end of file +google-cloud-bigquery==2.34.1 \ No newline at end of file From bc9ad7755b66bed7b7fa7d1ca495ae572f61f24f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 11:54:44 -0500 Subject: [PATCH 079/105] chore: Adding support for pytest-xdist and pytest-parallel (#312) Source-Link: https://github.com/googleapis/synthtool/commit/82f5cb283efffe96e1b6cd634738e0e7de2cd90a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae Co-authored-by: Owl Bot --- datacatalog/quickstart/noxfile.py | 78 +++++++++++++++++-------------- datacatalog/snippets/noxfile.py | 78 +++++++++++++++++-------------- datacatalog/v1beta1/noxfile.py | 78 +++++++++++++++++-------------- 3 files changed, 132 insertions(+), 102 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 20cdfc620138..85f5836dba3a 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 20cdfc620138..85f5836dba3a 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 20cdfc620138..85f5836dba3a 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -188,42 +188,52 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 2e1a22a37ebb345ad991a2cc93a6b854f8d3a642 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 8 Mar 2022 02:26:49 +0100 Subject: [PATCH 080/105] chore(deps): update all dependencies (#315) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index ed9e33277b53..7cc725e97bde 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.0.1 -google-cloud-bigquery==2.34.1 \ No newline at end of file +google-cloud-bigquery==2.34.2 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 2701641827b0..6e6c7104830a 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.0 +google-cloud-datacatalog==3.7.1 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 2701641827b0..6e6c7104830a 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.0 +google-cloud-datacatalog==3.7.1 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 2701641827b0..6e6c7104830a 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.0 +google-cloud-datacatalog==3.7.1 From 1b418ce5832a17b94ae62edbadec2d4da6bd8014 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 13 Mar 2022 21:16:23 +0100 Subject: [PATCH 081/105] chore(deps): update dependency pytest to v7.1.0 (#319) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 7cc725e97bde..f5de837a6ac6 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.0.1 +pytest==7.1.0 google-cloud-bigquery==2.34.2 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index c2845bffbe89..824a8a7a0ce6 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.1 +pytest==7.1.0 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index c2845bffbe89..824a8a7a0ce6 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.0.1 +pytest==7.1.0 From 183fae717a680637a7759f401d21d4f598fbba90 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 19 Mar 2022 11:57:18 +0100 Subject: [PATCH 082/105] chore(deps): update dependency pytest to v7.1.1 (#320) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index f5de837a6ac6..3177d65e3a3c 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.0 +pytest==7.1.1 google-cloud-bigquery==2.34.2 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 824a8a7a0ce6..4f6bf643fc5e 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.0 +pytest==7.1.1 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index 824a8a7a0ce6..4f6bf643fc5e 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.0 +pytest==7.1.1 From 25c11f5ef67a2cca778c4f0104561e3472fe31e2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 23:58:18 +0000 Subject: [PATCH 083/105] chore(python): use black==22.3.0 (#327) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- datacatalog/quickstart/noxfile.py | 4 ++-- datacatalog/snippets/noxfile.py | 4 ++-- datacatalog/v1beta1/noxfile.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 85f5836dba3a..25f87a215d4c 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 85f5836dba3a..25f87a215d4c 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 85f5836dba3a..25f87a215d4c 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -253,7 +253,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): From af019bdc99aff6c9fbc50649721f9bf63ee1c97c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 31 Mar 2022 01:32:46 +0200 Subject: [PATCH 084/105] chore(deps): update dependency google-cloud-bigquery to v3 (#328) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 3177d65e3a3c..60cd8ca356d9 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.1 -google-cloud-bigquery==2.34.2 \ No newline at end of file +google-cloud-bigquery==3.0.1 \ No newline at end of file From f57c8b7ab11c84fac8fdf2293c6869f8f57f53ea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:52:14 -0400 Subject: [PATCH 085/105] chore(python): add nox session to sort python imports (#344) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- datacatalog/quickstart/conftest.py | 4 +--- datacatalog/quickstart/noxfile.py | 23 ++++++++++++++++++++- datacatalog/snippets/conftest.py | 1 - datacatalog/snippets/noxfile.py | 23 ++++++++++++++++++++- datacatalog/v1beta1/conftest.py | 1 - datacatalog/v1beta1/create_fileset_entry.py | 3 --- datacatalog/v1beta1/noxfile.py | 23 ++++++++++++++++++++- 7 files changed, 67 insertions(+), 11 deletions(-) diff --git a/datacatalog/quickstart/conftest.py b/datacatalog/quickstart/conftest.py index 71c7597c644d..138ead5d605e 100644 --- a/datacatalog/quickstart/conftest.py +++ b/datacatalog/quickstart/conftest.py @@ -17,9 +17,7 @@ import uuid import google.auth -from google.cloud import bigquery -from google.cloud import datacatalog_v1 - +from google.cloud import bigquery, datacatalog_v1 import pytest diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 25f87a215d4c..3b3ffa5d2b0f 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! @@ -30,6 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +168,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # diff --git a/datacatalog/snippets/conftest.py b/datacatalog/snippets/conftest.py index 6060ec87721b..520d83042b7b 100644 --- a/datacatalog/snippets/conftest.py +++ b/datacatalog/snippets/conftest.py @@ -18,7 +18,6 @@ from google.api_core.exceptions import NotFound, PermissionDenied import google.auth from google.cloud import datacatalog_v1 - import pytest LOCATION = "us-central1" diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 25f87a215d4c..3b3ffa5d2b0f 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! @@ -30,6 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +168,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # diff --git a/datacatalog/v1beta1/conftest.py b/datacatalog/v1beta1/conftest.py index c9f42aca6543..e2745b5a63db 100644 --- a/datacatalog/v1beta1/conftest.py +++ b/datacatalog/v1beta1/conftest.py @@ -18,7 +18,6 @@ import google.auth from google.cloud import datacatalog_v1beta1 - import pytest diff --git a/datacatalog/v1beta1/create_fileset_entry.py b/datacatalog/v1beta1/create_fileset_entry.py index f798bfb6810b..d589a6a048ae 100644 --- a/datacatalog/v1beta1/create_fileset_entry.py +++ b/datacatalog/v1beta1/create_fileset_entry.py @@ -19,14 +19,11 @@ def create_fileset_entry(client, entry_group_name, entry_id): # TODO(developer): Construct a Data Catalog client object. # client = datacatalog_v1beta1.DataCatalogClient() - # TODO(developer): Set entry_group_name to the Name of the entry group # the entry will belong. # entry_group_name = "your_entry_group_name" - # TODO(developer): Set entry_id to the ID of the entry to create. # entry_id = "your_entry_id" - # Construct a full Entry object to send to the API. entry = datacatalog_v1beta1.types.Entry() entry.display_name = "My Fileset" diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 25f87a215d4c..3b3ffa5d2b0f 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -22,7 +22,6 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! @@ -30,6 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +168,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 0fc981bfa9f3beb5b47adc7a06831e33f0ab5686 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 25 Apr 2022 17:08:58 +0200 Subject: [PATCH 086/105] chore(deps): update dependency pytest to v7.1.2 (#347) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 60cd8ca356d9..e633a29b8df8 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.1 +pytest==7.1.2 google-cloud-bigquery==3.0.1 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index 4f6bf643fc5e..d00689e0623a 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.1 +pytest==7.1.2 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index 4f6bf643fc5e..d00689e0623a 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.1 +pytest==7.1.2 From d33c971d97a1380d502910fc3b93656d79bb51ad Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 9 May 2022 22:56:22 +0200 Subject: [PATCH 087/105] chore(deps): update dependency google-cloud-bigquery to v3.1.0 (#352) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index e633a29b8df8..e1ef4ceaa328 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.2 -google-cloud-bigquery==3.0.1 \ No newline at end of file +google-cloud-bigquery==3.1.0 \ No newline at end of file From 1ba6e3ffd54394b11d1d629f7f1026751e30687d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 May 2022 16:50:02 +0200 Subject: [PATCH 088/105] chore(deps): update dependency google-cloud-datacatalog to v3.8.0 (#353) --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 6e6c7104830a..c27d9b1a1151 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.1 +google-cloud-datacatalog==3.8.0 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 6e6c7104830a..c27d9b1a1151 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.1 +google-cloud-datacatalog==3.8.0 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 6e6c7104830a..c27d9b1a1151 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.7.1 +google-cloud-datacatalog==3.8.0 From df94b942091aa9390f72b2fc55019b9a227c4a69 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Jul 2022 07:10:46 -0400 Subject: [PATCH 089/105] fix: require python 3.7+ (#403) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * remove python 3.6 sample configs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * trigger CI * add python_requires to setup.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/noxfile.py | 2 +- datacatalog/snippets/noxfile.py | 2 +- datacatalog/v1beta1/noxfile.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index 3b3ffa5d2b0f..e9eb1cbfa5db 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index 3b3ffa5d2b0f..e9eb1cbfa5db 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index 3b3ffa5d2b0f..e9eb1cbfa5db 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -88,7 +88,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 2fdf5dc57413088d1cc84528b3343aee737eecb3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 16 Jul 2022 16:46:54 +0200 Subject: [PATCH 090/105] chore(deps): update all dependencies (#391) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: meredithslota Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index c27d9b1a1151..e04e19ff2145 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.0 +google-cloud-datacatalog==3.8.1 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index c27d9b1a1151..e04e19ff2145 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.0 +google-cloud-datacatalog==3.8.1 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index c27d9b1a1151..e04e19ff2145 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.0 +google-cloud-datacatalog==3.8.1 From a5385af80c98622275bbc8bfaed2cc922c9b7b4b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 17:06:22 +0200 Subject: [PATCH 091/105] chore(deps): update all dependencies (#408) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index e1ef4ceaa328..092ea16f0a8b 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.2 -google-cloud-bigquery==3.1.0 \ No newline at end of file +google-cloud-bigquery==3.3.0 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index e04e19ff2145..c157ac9f9d37 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.1 +google-cloud-datacatalog==3.9.0 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index e04e19ff2145..c157ac9f9d37 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.1 +google-cloud-datacatalog==3.9.0 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index e04e19ff2145..c157ac9f9d37 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.8.1 +google-cloud-datacatalog==3.9.0 From 1a77c5d223d58c118ec405b2be97ab9a36484ef0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Aug 2022 13:40:22 +0200 Subject: [PATCH 092/105] chore(deps): update all dependencies (#410) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 092ea16f0a8b..16c2ec48ddaf 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.2 -google-cloud-bigquery==3.3.0 \ No newline at end of file +google-cloud-bigquery==3.3.1 \ No newline at end of file From 2aea143605fd9fb2bef416bd64bc65d6967033e8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Aug 2022 16:26:31 +0200 Subject: [PATCH 093/105] chore(deps): update dependency google-cloud-datacatalog to v3.9.1 (#415) --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index c157ac9f9d37..7dc5df948109 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.0 +google-cloud-datacatalog==3.9.1 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index c157ac9f9d37..7dc5df948109 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.0 +google-cloud-datacatalog==3.9.1 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index c157ac9f9d37..7dc5df948109 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.0 +google-cloud-datacatalog==3.9.1 From 00fc3ee8f4e03a2f9ba3267455dfc68705566872 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Aug 2022 18:57:27 +0200 Subject: [PATCH 094/105] chore(deps): update dependency google-cloud-bigquery to v3.3.2 (#416) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 16c2ec48ddaf..f7d5e61c9b34 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.2 -google-cloud-bigquery==3.3.1 \ No newline at end of file +google-cloud-bigquery==3.3.2 \ No newline at end of file From 11d4f37b59efc9cd323c6ef93685779f374b668a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:43:51 +0200 Subject: [PATCH 095/105] chore(deps): update dependency pytest to v7.1.3 (#426) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index f7d5e61c9b34..4f258a245ec0 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.2 +pytest==7.1.3 google-cloud-bigquery==3.3.2 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index d00689e0623a..e07168502ea9 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index d00689e0623a..e07168502ea9 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 From e1babb83f38e85819356af9c5c39be405123ef17 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 16:16:32 +0000 Subject: [PATCH 096/105] chore: detect samples tests in nested directories (#430) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 --- datacatalog/quickstart/noxfile.py | 6 ++++-- datacatalog/snippets/noxfile.py | 6 ++++-- datacatalog/v1beta1/noxfile.py | 6 ++++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py index e9eb1cbfa5db..c1715136d645 100644 --- a/datacatalog/quickstart/noxfile.py +++ b/datacatalog/quickstart/noxfile.py @@ -207,8 +207,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py index e9eb1cbfa5db..c1715136d645 100644 --- a/datacatalog/snippets/noxfile.py +++ b/datacatalog/snippets/noxfile.py @@ -207,8 +207,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py index e9eb1cbfa5db..c1715136d645 100644 --- a/datacatalog/v1beta1/noxfile.py +++ b/datacatalog/v1beta1/noxfile.py @@ -207,8 +207,10 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From a7dd6cfd71f152e0e72795de8e7fe1ca85d30d8c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 3 Oct 2022 18:13:51 +0200 Subject: [PATCH 097/105] chore(deps): update dependency google-cloud-bigquery to v3.3.3 (#434) --- datacatalog/quickstart/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 4f258a245ec0..7a1c3baea2a7 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.3 -google-cloud-bigquery==3.3.2 \ No newline at end of file +google-cloud-bigquery==3.3.3 \ No newline at end of file From 598195f4818f51557fbd26a0f9d44f2c249e9aea Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 02:52:38 +0200 Subject: [PATCH 098/105] chore(deps): update dependency google-cloud-datacatalog to v3.9.2 (#436) --- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index 7dc5df948109..b050d9555709 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.1 +google-cloud-datacatalog==3.9.2 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index 7dc5df948109..b050d9555709 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.1 +google-cloud-datacatalog==3.9.2 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index 7dc5df948109..b050d9555709 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.1 +google-cloud-datacatalog==3.9.2 From 169212bf15a1a9a2b24b5ad811d8276e932f22d9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 Oct 2022 15:18:03 +0200 Subject: [PATCH 099/105] chore(deps): update all dependencies (#439) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/quickstart/requirements.txt | 2 +- datacatalog/snippets/requirements.txt | 2 +- datacatalog/v1beta1/requirements.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 7a1c3baea2a7..1baa3139318d 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ pytest==7.1.3 -google-cloud-bigquery==3.3.3 \ No newline at end of file +google-cloud-bigquery==3.3.5 \ No newline at end of file diff --git a/datacatalog/quickstart/requirements.txt b/datacatalog/quickstart/requirements.txt index b050d9555709..5f697f48245e 100644 --- a/datacatalog/quickstart/requirements.txt +++ b/datacatalog/quickstart/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.2 +google-cloud-datacatalog==3.9.3 diff --git a/datacatalog/snippets/requirements.txt b/datacatalog/snippets/requirements.txt index b050d9555709..5f697f48245e 100644 --- a/datacatalog/snippets/requirements.txt +++ b/datacatalog/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.2 +google-cloud-datacatalog==3.9.3 diff --git a/datacatalog/v1beta1/requirements.txt b/datacatalog/v1beta1/requirements.txt index b050d9555709..5f697f48245e 100644 --- a/datacatalog/v1beta1/requirements.txt +++ b/datacatalog/v1beta1/requirements.txt @@ -1 +1 @@ -google-cloud-datacatalog==3.9.2 +google-cloud-datacatalog==3.9.3 From 354395f272d1d2ae576f02dd8fb460656b247025 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:50:30 +0200 Subject: [PATCH 100/105] chore(deps): update dependency pytest to v7.2.0 (#440) --- datacatalog/quickstart/requirements-test.txt | 2 +- datacatalog/snippets/requirements-test.txt | 2 +- datacatalog/v1beta1/requirements-test.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/datacatalog/quickstart/requirements-test.txt b/datacatalog/quickstart/requirements-test.txt index 1baa3139318d..1955fcd9b307 100644 --- a/datacatalog/quickstart/requirements-test.txt +++ b/datacatalog/quickstart/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.3 +pytest==7.2.0 google-cloud-bigquery==3.3.5 \ No newline at end of file diff --git a/datacatalog/snippets/requirements-test.txt b/datacatalog/snippets/requirements-test.txt index e07168502ea9..49780e035690 100644 --- a/datacatalog/snippets/requirements-test.txt +++ b/datacatalog/snippets/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 diff --git a/datacatalog/v1beta1/requirements-test.txt b/datacatalog/v1beta1/requirements-test.txt index e07168502ea9..49780e035690 100644 --- a/datacatalog/v1beta1/requirements-test.txt +++ b/datacatalog/v1beta1/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 From ce660e99ca82e45e96b59e299543ca95fa176c9d Mon Sep 17 00:00:00 2001 From: Sampath M Date: Mon, 14 Nov 2022 16:32:56 +0100 Subject: [PATCH 101/105] Update license header --- datacatalog/snippets/lookup_entry.py | 4 ++-- datacatalog/snippets/lookup_entry_test.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index 110001836b94..899edda26eb9 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -1,12 +1,12 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/datacatalog/snippets/lookup_entry_test.py b/datacatalog/snippets/lookup_entry_test.py index 55245a93f6c9..2e56485faf64 100644 --- a/datacatalog/snippets/lookup_entry_test.py +++ b/datacatalog/snippets/lookup_entry_test.py @@ -1,12 +1,12 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, From 281424cd156c7a77b83005c61db3f59d15378007 Mon Sep 17 00:00:00 2001 From: Sampath M Date: Mon, 14 Nov 2022 16:42:41 +0100 Subject: [PATCH 102/105] Remove (redundant) noxfile.py --- datacatalog/quickstart/noxfile.py | 312 ------------------------------ datacatalog/snippets/noxfile.py | 312 ------------------------------ datacatalog/v1beta1/noxfile.py | 312 ------------------------------ 3 files changed, 936 deletions(-) delete mode 100644 datacatalog/quickstart/noxfile.py delete mode 100644 datacatalog/snippets/noxfile.py delete mode 100644 datacatalog/v1beta1/noxfile.py diff --git a/datacatalog/quickstart/noxfile.py b/datacatalog/quickstart/noxfile.py deleted file mode 100644 index c1715136d645..000000000000 --- a/datacatalog/quickstart/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/datacatalog/snippets/noxfile.py b/datacatalog/snippets/noxfile.py deleted file mode 100644 index c1715136d645..000000000000 --- a/datacatalog/snippets/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/datacatalog/v1beta1/noxfile.py b/datacatalog/v1beta1/noxfile.py deleted file mode 100644 index c1715136d645..000000000000 --- a/datacatalog/v1beta1/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( - "**/test_*.py", recursive=True - ) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) - elif "pytest-xdist" in packages: - concurrent_args.extend(["-n", "auto"]) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """Returns the root folder of the project.""" - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) From e2b55749829539af2b29e75b86e0dee3386ded14 Mon Sep 17 00:00:00 2001 From: Sampath M Date: Mon, 14 Nov 2022 16:52:11 +0100 Subject: [PATCH 103/105] Update lookup_entry.py region tag --- datacatalog/snippets/lookup_entry.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/datacatalog/snippets/lookup_entry.py b/datacatalog/snippets/lookup_entry.py index 899edda26eb9..13e57c4fd1f7 100644 --- a/datacatalog/snippets/lookup_entry.py +++ b/datacatalog/snippets/lookup_entry.py @@ -17,7 +17,7 @@ def lookup_entry(override_values): """Retrieves Data Catalog entry for the given Google Cloud Platform resource.""" - # [START datacatalog_lookup_dataset] + # [START data_catalog_lookup_dataset] # [START data_catalog_lookup_entry] from google.cloud import datacatalog_v1 @@ -93,4 +93,4 @@ def lookup_entry(override_values): f"Retrieved entry {entry.name} for Pub/Sub Topic resource {entry.linked_resource}" ) # [END data_catalog_lookup_entry] - # [END datacatalog_lookup_dataset] + # [END data_catalog_lookup_dataset] From 00402f559185f6f23e1603877a535e34e2940a0d Mon Sep 17 00:00:00 2001 From: Sampath M Date: Tue, 15 Nov 2022 11:52:14 +0100 Subject: [PATCH 104/105] Update CODEOWNERS & blunderbuss.yml --- .github/CODEOWNERS | 1 + .github/blunderbuss.yml | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5cf115a2a35e..8c1dd587f69c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -69,3 +69,4 @@ /trace/**/* @ymotongpoo @GoogleCloudPlatform/python-samples-reviewers /translate/**/* @nicain @GoogleCloudPlatform/python-samples-reviewers /workflows/**/* @GoogleCloudPlatform/python-samples-reviewers +/datacatalog/*/* @GoogleCloudPlatform/python-samples-reviewers \ No newline at end of file diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 97e0b4b04893..8cb2c62b2187 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -126,6 +126,10 @@ assign_issues_by: - 'api: monitoring' to: - GoogleCloudPlatform/dee-observability +- labels: + - 'api: datacatalog' + to: + - GoogleCloudPlatform/python-samples-reviewers assign_prs_by: - labels: From 88d1095f5c70d8cc951041100f33041aa0b4f531 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 15 Nov 2022 09:14:46 -0500 Subject: [PATCH 105/105] Update .github/CODEOWNERS --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f56df444129a..aff55993e83f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -71,5 +71,5 @@ /talent/**/* @GoogleCloudPlatform/python-samples-reviewers /vision/**/* @GoogleCloudPlatform/python-samples-reviewers /workflows/**/* @GoogleCloudPlatform/python-samples-reviewers -/datacatalog/*/* @GoogleCloudPlatform/python-samples-reviewers +/datacatalog/**/* @GoogleCloudPlatform/python-samples-reviewers /kms/**/** @GoogleCloudPlatform/dee-infra @GoogleCloudPlatform/python-samples-reviewers