From 9cbf581baf43fc9f680c18572a57b97d735acbd3 Mon Sep 17 00:00:00 2001
From: Seth Hollyman <shollyman@google.com>
Date: Fri, 24 Sep 2021 16:46:09 +0000
Subject: [PATCH] feat: generate v2alpha

---
 .../.coveragerc                               |   17 +
 .../google-cloud-bigquery-migration/.flake8   |   33 +
 .../.github/.OwlBot.lock.yaml                 |   17 +
 .../.github/.OwlBot.yaml                      |   25 +
 .../.github/CODEOWNERS                        |   11 +
 .../.github/CONTRIBUTING.md                   |   28 +
 .../.github/ISSUE_TEMPLATE/bug_report.md      |   43 +
 .../.github/ISSUE_TEMPLATE/feature_request.md |   18 +
 .../.github/ISSUE_TEMPLATE/support_request.md |    7 +
 .../.github/PULL_REQUEST_TEMPLATE.md          |    7 +
 .../.github/header-checker-lint.yml           |   15 +
 .../.github/release-please.yml                |    1 +
 .../.github/snippet-bot.yml                   |    0
 .../.gitignore                                |   63 +
 .../.kokoro/build.sh                          |   59 +
 .../.kokoro/continuous/common.cfg             |   27 +
 .../.kokoro/continuous/continuous.cfg         |    1 +
 .../.kokoro/docker/docs/Dockerfile            |   67 +
 .../.kokoro/docs/common.cfg                   |   65 +
 .../.kokoro/docs/docs-presubmit.cfg           |   28 +
 .../.kokoro/docs/docs.cfg                     |    1 +
 .../.kokoro/populate-secrets.sh               |   43 +
 .../.kokoro/presubmit/common.cfg              |   27 +
 .../.kokoro/presubmit/presubmit.cfg           |    1 +
 .../.kokoro/publish-docs.sh                   |   64 +
 .../.kokoro/release.sh                        |   32 +
 .../.kokoro/release/common.cfg                |   30 +
 .../.kokoro/release/release.cfg               |    1 +
 .../.kokoro/samples/lint/common.cfg           |   34 +
 .../.kokoro/samples/lint/continuous.cfg       |    6 +
 .../.kokoro/samples/lint/periodic.cfg         |    6 +
 .../.kokoro/samples/lint/presubmit.cfg        |    6 +
 .../.kokoro/samples/python3.6/common.cfg      |   40 +
 .../.kokoro/samples/python3.6/continuous.cfg  |    7 +
 .../samples/python3.6/periodic-head.cfg       |   11 +
 .../.kokoro/samples/python3.6/periodic.cfg    |    6 +
 .../.kokoro/samples/python3.6/presubmit.cfg   |    6 +
 .../.kokoro/samples/python3.7/common.cfg      |   40 +
 .../.kokoro/samples/python3.7/continuous.cfg  |    6 +
 .../samples/python3.7/periodic-head.cfg       |   11 +
 .../.kokoro/samples/python3.7/periodic.cfg    |    6 +
 .../.kokoro/samples/python3.7/presubmit.cfg   |    6 +
 .../.kokoro/samples/python3.8/common.cfg      |   40 +
 .../.kokoro/samples/python3.8/continuous.cfg  |    6 +
 .../samples/python3.8/periodic-head.cfg       |   11 +
 .../.kokoro/samples/python3.8/periodic.cfg    |    6 +
 .../.kokoro/samples/python3.8/presubmit.cfg   |    6 +
 .../.kokoro/samples/python3.9/common.cfg      |   40 +
 .../.kokoro/samples/python3.9/continuous.cfg  |    6 +
 .../samples/python3.9/periodic-head.cfg       |   11 +
 .../.kokoro/samples/python3.9/periodic.cfg    |    6 +
 .../.kokoro/samples/python3.9/presubmit.cfg   |    6 +
 .../.kokoro/test-samples-against-head.sh      |   28 +
 .../.kokoro/test-samples-impl.sh              |  102 +
 .../.kokoro/test-samples.sh                   |   46 +
 .../.kokoro/trampoline.sh                     |   28 +
 .../.kokoro/trampoline_v2.sh                  |  487 +++
 .../.pre-commit-config.yaml                   |   31 +
 .../.repo-metadata.json                       |   14 +
 .../.trampolinerc                             |   52 +
 .../CHANGELOG.md                              |    1 +
 .../CODE_OF_CONDUCT.md                        |   95 +
 .../CONTRIBUTING.rst                          |  279 ++
 .../google-cloud-bigquery-migration/LICENSE   |  202 ++
 .../MANIFEST.in                               |   25 +
 .../README.rst                                |   49 +
 .../SECURITY.md                               |    7 +
 .../docs/README.rst                           |    1 +
 .../docs/_static/custom.css                   |   20 +
 .../docs/_templates/layout.html               |   50 +
 .../docs/changelog.md                         |    1 +
 .../docs/conf.py                              |  381 ++
 .../docs/index.rst                            |   21 +
 .../migration_v2alpha/migration_service.rst   |   10 +
 .../docs/migration_v2alpha/services.rst       |    6 +
 .../docs/migration_v2alpha/types.rst          |    7 +
 .../docs/multiprocessing.rst                  |    7 +
 .../cloud/bigquery/migration/__init__.py      |   96 +
 .../google/cloud/bigquery/migration/py.typed  |    2 +
 .../bigquery/migration_v2alpha/__init__.py    |   62 +
 .../migration_v2alpha/gapic_metadata.json     |   93 +
 .../cloud/bigquery/migration_v2alpha/py.typed |    2 +
 .../migration_v2alpha/services/__init__.py    |   15 +
 .../services/migration_service/__init__.py    |   22 +
 .../migration_service/async_client.py         |  758 ++++
 .../services/migration_service/client.py      |  916 +++++
 .../services/migration_service/pagers.py      |  292 ++
 .../migration_service/transports/__init__.py  |   33 +
 .../migration_service/transports/base.py      |  320 ++
 .../migration_service/transports/grpc.py      |  432 +++
 .../transports/grpc_asyncio.py                |  439 +++
 .../migration_v2alpha/types/__init__.py       |   64 +
 .../types/migration_entities.py               |  205 ++
 .../types/migration_error_details.py          |   85 +
 .../types/migration_metrics.py                |  138 +
 .../types/migration_service.py                |  211 ++
 .../google-cloud-bigquery-migration/mypy.ini  |    3 +
 .../noxfile.py                                |  237 ++
 .../google-cloud-bigquery-migration/owlbot.py |   42 +
 .../renovate.json                             |   12 +
 .../scripts/decrypt-secrets.sh                |   46 +
 .../fixup_migration_v2alpha_keywords.py       |  182 +
 .../scripts/readme-gen/readme_gen.py          |   66 +
 .../readme-gen/templates/README.tmpl.rst      |   87 +
 .../readme-gen/templates/auth.tmpl.rst        |    9 +
 .../templates/auth_api_key.tmpl.rst           |   14 +
 .../templates/install_deps.tmpl.rst           |   29 +
 .../templates/install_portaudio.tmpl.rst      |   35 +
 .../google-cloud-bigquery-migration/setup.cfg |   19 +
 .../google-cloud-bigquery-migration/setup.py  |   55 +
 .../testing/.gitignore                        |    3 +
 .../tests/__init__.py                         |   15 +
 .../tests/unit/__init__.py                    |   15 +
 .../tests/unit/gapic/__init__.py              |   15 +
 .../unit/gapic/migration_v2alpha/__init__.py  |   15 +
 .../test_migration_service.py                 | 3066 +++++++++++++++++
 116 files changed, 11160 insertions(+)
 create mode 100644 packages/google-cloud-bigquery-migration/.coveragerc
 create mode 100644 packages/google-cloud-bigquery-migration/.flake8
 create mode 100644 packages/google-cloud-bigquery-migration/.github/.OwlBot.lock.yaml
 create mode 100644 packages/google-cloud-bigquery-migration/.github/.OwlBot.yaml
 create mode 100644 packages/google-cloud-bigquery-migration/.github/CODEOWNERS
 create mode 100644 packages/google-cloud-bigquery-migration/.github/CONTRIBUTING.md
 create mode 100644 packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/bug_report.md
 create mode 100644 packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/feature_request.md
 create mode 100644 packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/support_request.md
 create mode 100644 packages/google-cloud-bigquery-migration/.github/PULL_REQUEST_TEMPLATE.md
 create mode 100644 packages/google-cloud-bigquery-migration/.github/header-checker-lint.yml
 create mode 100644 packages/google-cloud-bigquery-migration/.github/release-please.yml
 create mode 100644 packages/google-cloud-bigquery-migration/.github/snippet-bot.yml
 create mode 100644 packages/google-cloud-bigquery-migration/.gitignore
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/build.sh
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/continuous/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/continuous/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/docker/docs/Dockerfile
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/docs/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/docs/docs-presubmit.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/docs/docs.cfg
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/populate-secrets.sh
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/presubmit/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/presubmit/presubmit.cfg
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/publish-docs.sh
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/release.sh
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/release/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/release/release.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/lint/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/lint/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/lint/periodic.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/lint/presubmit.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic-head.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/presubmit.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic-head.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/presubmit.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic-head.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/presubmit.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/common.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/continuous.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic-head.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/presubmit.cfg
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/test-samples-against-head.sh
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/test-samples-impl.sh
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/test-samples.sh
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/trampoline.sh
 create mode 100755 packages/google-cloud-bigquery-migration/.kokoro/trampoline_v2.sh
 create mode 100644 packages/google-cloud-bigquery-migration/.pre-commit-config.yaml
 create mode 100644 packages/google-cloud-bigquery-migration/.repo-metadata.json
 create mode 100644 packages/google-cloud-bigquery-migration/.trampolinerc
 create mode 100644 packages/google-cloud-bigquery-migration/CHANGELOG.md
 create mode 100644 packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md
 create mode 100644 packages/google-cloud-bigquery-migration/CONTRIBUTING.rst
 create mode 100644 packages/google-cloud-bigquery-migration/LICENSE
 create mode 100644 packages/google-cloud-bigquery-migration/MANIFEST.in
 create mode 100644 packages/google-cloud-bigquery-migration/README.rst
 create mode 100644 packages/google-cloud-bigquery-migration/SECURITY.md
 create mode 120000 packages/google-cloud-bigquery-migration/docs/README.rst
 create mode 100644 packages/google-cloud-bigquery-migration/docs/_static/custom.css
 create mode 100644 packages/google-cloud-bigquery-migration/docs/_templates/layout.html
 create mode 120000 packages/google-cloud-bigquery-migration/docs/changelog.md
 create mode 100644 packages/google-cloud-bigquery-migration/docs/conf.py
 create mode 100644 packages/google-cloud-bigquery-migration/docs/index.rst
 create mode 100644 packages/google-cloud-bigquery-migration/docs/migration_v2alpha/migration_service.rst
 create mode 100644 packages/google-cloud-bigquery-migration/docs/migration_v2alpha/services.rst
 create mode 100644 packages/google-cloud-bigquery-migration/docs/migration_v2alpha/types.rst
 create mode 100644 packages/google-cloud-bigquery-migration/docs/multiprocessing.rst
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/py.typed
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/gapic_metadata.json
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/py.typed
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/async_client.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/client.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/pagers.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/base.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_entities.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_error_details.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_metrics.py
 create mode 100644 packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_service.py
 create mode 100644 packages/google-cloud-bigquery-migration/mypy.ini
 create mode 100644 packages/google-cloud-bigquery-migration/noxfile.py
 create mode 100644 packages/google-cloud-bigquery-migration/owlbot.py
 create mode 100644 packages/google-cloud-bigquery-migration/renovate.json
 create mode 100755 packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/fixup_migration_v2alpha_keywords.py
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst
 create mode 100644 packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst
 create mode 100644 packages/google-cloud-bigquery-migration/setup.cfg
 create mode 100644 packages/google-cloud-bigquery-migration/setup.py
 create mode 100644 packages/google-cloud-bigquery-migration/testing/.gitignore
 create mode 100644 packages/google-cloud-bigquery-migration/tests/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/tests/unit/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/__init__.py
 create mode 100644 packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/test_migration_service.py

diff --git a/packages/google-cloud-bigquery-migration/.coveragerc b/packages/google-cloud-bigquery-migration/.coveragerc
new file mode 100644
index 000000000000..0654319ea3a2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.coveragerc
@@ -0,0 +1,17 @@
+[run]
+branch = True
+
+[report]
+show_missing = True
+omit =
+    google/cloud/bigquery/migration/__init__.py
+exclude_lines =
+    # Re-enable the standard pragma
+    pragma: NO COVER
+    # Ignore debug-only repr
+    def __repr__
+    # Ignore pkg_resources exceptions.
+    # This is added at the module level as a safeguard for if someone
+    # generates the code and tries to run it without pip installing. This
+    # makes it virtually impossible to test properly.
+    except pkg_resources.DistributionNotFound
diff --git a/packages/google-cloud-bigquery-migration/.flake8 b/packages/google-cloud-bigquery-migration/.flake8
new file mode 100644
index 000000000000..29227d4cf419
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.flake8
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+[flake8]
+ignore = E203, E266, E501, W503
+exclude =
+  # Exclude generated code.
+  **/proto/**
+  **/gapic/**
+  **/services/**
+  **/types/**
+  *_pb2.py
+
+  # Standard linting exemptions.
+  **/.nox/**
+  __pycache__,
+  .git,
+  *.pyc,
+  conf.py
diff --git a/packages/google-cloud-bigquery-migration/.github/.OwlBot.lock.yaml b/packages/google-cloud-bigquery-migration/.github/.OwlBot.lock.yaml
new file mode 100644
index 000000000000..80347782c3c2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/.OwlBot.lock.yaml
@@ -0,0 +1,17 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+  image: gcr.io/repo-automation-bots/owlbot-python:latest
+  digest: sha256:0ccd9f4d714d36e311f60f407199dd460e43a99a125b5ca64b1d75f6e5f8581b
diff --git a/packages/google-cloud-bigquery-migration/.github/.OwlBot.yaml b/packages/google-cloud-bigquery-migration/.github/.OwlBot.yaml
new file mode 100644
index 000000000000..0283a73bd62f
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/.OwlBot.yaml
@@ -0,0 +1,25 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+docker:
+  image: gcr.io/repo-automation-bots/owlbot-python:latest
+
+deep-remove-regex:
+  - /owl-bot-staging
+
+deep-copy-regex:
+  - source: /google/cloud/bigquery/migration/(v.*)/.*-py/(.*)
+    dest: /owl-bot-staging/$1/$2
+
+begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.github/CODEOWNERS b/packages/google-cloud-bigquery-migration/.github/CODEOWNERS
new file mode 100644
index 000000000000..8e1c1fb6c2a9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/CODEOWNERS
@@ -0,0 +1,11 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+
+# The @googleapis/api-bigquery is the default owner for changes in this repo
+*               @googleapis/api-bigquery @googleapis/yoshi-python
+
+# The python-samples-reviewers team is the default owner for samples changes
+/samples/   @googleapis/python-samples-owners @googleapis/api-bigquery @googleapis/yoshi-python
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.github/CONTRIBUTING.md b/packages/google-cloud-bigquery-migration/.github/CONTRIBUTING.md
new file mode 100644
index 000000000000..939e5341e74d
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to <https://cla.developers.google.com/> to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 000000000000..a7e10654485e
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,43 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+Please run down the following list and make sure you've tried the usual "quick fixes":
+
+  - Search the issues already opened: https://github.com/googleapis/python-bigquery-migration/issues
+  - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python
+
+If you are still having issues, please be sure to include as much information as possible:
+
+#### Environment details
+
+  - OS type and version:
+  - Python version: `python --version`
+  - pip version: `pip --version`
+  - `google-cloud-bigquery-migration` version: `pip show google-cloud-bigquery-migration`
+
+#### Steps to reproduce
+
+  1. ?
+  2. ?
+
+#### Code example
+
+```python
+# example
+```
+
+#### Stack trace
+```
+# example
+```
+
+Making sure to follow these steps will guarantee the quickest resolution possible.
+
+Thanks!
diff --git a/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 000000000000..6365857f33c6
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,18 @@
+---
+name: Feature request
+about: Suggest an idea for this library
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+ **Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ **Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+ **Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+ **Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/support_request.md
new file mode 100644
index 000000000000..995869032125
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/ISSUE_TEMPLATE/support_request.md
@@ -0,0 +1,7 @@
+---
+name: Support request
+about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
+
+---
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/packages/google-cloud-bigquery-migration/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-bigquery-migration/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000000..97ca9884e7e4
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,7 @@
+Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
+- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-bigquery-migration/issues/new/choose) before writing your code!  That way we can discuss the change, evaluate designs, and agree on the general idea
+- [ ] Ensure the tests and linter pass
+- [ ] Code coverage does not decrease (if any source code was changed)
+- [ ] Appropriate docs were updated (if necessary)
+
+Fixes #<issue_number_goes_here> 🦕
diff --git a/packages/google-cloud-bigquery-migration/.github/header-checker-lint.yml b/packages/google-cloud-bigquery-migration/.github/header-checker-lint.yml
new file mode 100644
index 000000000000..6fe78aa7987a
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"],
+ "sourceFileExtensions": [
+ 	"ts", 
+ 	"js", 
+ 	"java", 
+ 	"sh", 
+ 	"Dockerfile", 
+ 	"yaml", 
+ 	"py",
+ 	"html",
+ 	"txt"
+ ]
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.github/release-please.yml b/packages/google-cloud-bigquery-migration/.github/release-please.yml
new file mode 100644
index 000000000000..4507ad0598a5
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.github/release-please.yml
@@ -0,0 +1 @@
+releaseType: python
diff --git a/packages/google-cloud-bigquery-migration/.github/snippet-bot.yml b/packages/google-cloud-bigquery-migration/.github/snippet-bot.yml
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/packages/google-cloud-bigquery-migration/.gitignore b/packages/google-cloud-bigquery-migration/.gitignore
new file mode 100644
index 000000000000..b4243ced74e4
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.gitignore
@@ -0,0 +1,63 @@
+*.py[cod]
+*.sw[op]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+.eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+__pycache__
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.nox
+.cache
+.pytest_cache
+
+
+# Mac
+.DS_Store
+
+# JetBrains
+.idea
+
+# VS Code
+.vscode
+
+# emacs
+*~
+
+# Built documentation
+docs/_build
+bigquery/docs/generated
+docs.metadata
+
+# Virtual environment
+env/
+
+# Test logs
+coverage.xml
+*sponge_log.xml
+
+# System test environment variables.
+system_tests/local_test_setup
+
+# Make sure a generated file isn't accidentally committed.
+pylintrc
+pylintrc.test
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/build.sh b/packages/google-cloud-bigquery-migration/.kokoro/build.sh
new file mode 100755
index 000000000000..6eb0e222646c
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/build.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+    PROJECT_ROOT="github/python-bigquery-migration"
+fi
+
+cd "${PROJECT_ROOT}"
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Setup service account credentials.
+export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+
+# Setup project id.
+export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+
+# Remove old nox
+python3 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+  cleanup() {
+    chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+    $KOKORO_GFILE_DIR/linux_amd64/flakybot
+  }
+  trap cleanup EXIT HUP
+fi
+
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+    python3 -m nox -s ${NOX_SESSION:-}
+else
+    python3 -m nox
+fi
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/continuous/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/continuous/common.cfg
new file mode 100644
index 000000000000..5fca6ccf7cab
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/continuous/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/build.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/continuous/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/continuous/continuous.cfg
new file mode 100644
index 000000000000..8f43917d92fe
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/continuous/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-bigquery-migration/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 000000000000..4e1b1fb8b5a5
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,67 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+  && apt-get install -y --no-install-recommends \
+    apt-transport-https \
+    build-essential \
+    ca-certificates \
+    curl \
+    dirmngr \
+    git \
+    gpg-agent \
+    graphviz \
+    libbz2-dev \
+    libdb5.3-dev \
+    libexpat1-dev \
+    libffi-dev \
+    liblzma-dev \
+    libreadline-dev \
+    libsnappy-dev \
+    libssl-dev \
+    libsqlite3-dev \
+    portaudio19-dev \
+    python3-distutils \
+    redis-server \
+    software-properties-common \
+    ssh \
+    sudo \
+    tcl \
+    tcl-dev \
+    tk \
+    tk-dev \
+    uuid-dev \
+    wget \
+    zlib1g-dev \
+  && add-apt-repository universe \
+  && apt-get update \
+  && apt-get -y install jq \
+  && apt-get clean autoclean \
+  && apt-get autoremove -y \
+  && rm -rf /var/lib/apt/lists/* \
+  && rm -f /var/cache/apt/archives/*.deb
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+  && python3.8 /tmp/get-pip.py \
+  && rm /tmp/get-pip.py
+
+CMD ["python3.8"]
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/docs/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/docs/common.cfg
new file mode 100644
index 000000000000..7b600d007da9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/docs/common.cfg
@@ -0,0 +1,65 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline_v2.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
+}
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/publish-docs.sh"
+}
+
+env_vars: {
+    key: "STAGING_BUCKET"
+    value: "docs-staging"
+}
+
+env_vars: {
+    key: "V2_STAGING_BUCKET"
+    value: "docs-staging-v2"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE_UPLOAD"
+    value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+    key: "TRAMPOLINE_DOCKERFILE"
+    value: ".kokoro/docker/docs/Dockerfile"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+  fetch_keystore {
+    keystore_resource {
+      keystore_config_id: 73713
+      keyname: "yoshi-automation-github-key"
+    }
+  }
+}
+
+before_action {
+  fetch_keystore {
+    keystore_resource {
+      keystore_config_id: 73713
+      keyname: "docuploader_service_account"
+    }
+  }
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 000000000000..091e869ba628
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,28 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "STAGING_BUCKET"
+    value: "gcloud-python-test"
+}
+
+env_vars: {
+    key: "V2_STAGING_BUCKET"
+    value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE_UPLOAD"
+    value: "false"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+    key: "NOX_SESSION"
+    value: "docs docfx"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/docs/docs.cfg b/packages/google-cloud-bigquery-migration/.kokoro/docs/docs.cfg
new file mode 100644
index 000000000000..8f43917d92fe
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/docs/docs.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/populate-secrets.sh b/packages/google-cloud-bigquery-migration/.kokoro/populate-secrets.sh
new file mode 100755
index 000000000000..f52514257ef0
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+  msg "Retrieving secret ${key}"
+  docker run --entrypoint=gcloud \
+    --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+    gcr.io/google.com/cloudsdktool/cloud-sdk \
+    secrets versions access latest \
+    --project cloud-devrel-kokoro-resources \
+    --secret ${key} > \
+    "${SECRET_LOCATION}/${key}"
+  if [[ $? == 0 ]]; then
+    msg "Secret written to ${SECRET_LOCATION}/${key}"
+  else
+    msg "Error retrieving secret ${key}"
+  fi
+done
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/presubmit/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/presubmit/common.cfg
new file mode 100644
index 000000000000..5fca6ccf7cab
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/presubmit/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/build.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/presubmit/presubmit.cfg
new file mode 100644
index 000000000000..8f43917d92fe
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/presubmit/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/publish-docs.sh b/packages/google-cloud-bigquery-migration/.kokoro/publish-docs.sh
new file mode 100755
index 000000000000..8acb14e802b0
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/publish-docs.sh
@@ -0,0 +1,64 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+export PATH="${HOME}/.local/bin:${PATH}"
+
+# Install nox
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
+
+# build docs
+nox -s docs
+
+python3 -m pip install --user gcp-docuploader
+
+# create metadata
+python3 -m docuploader create-metadata \
+  --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+  --version=$(python3 setup.py --version) \
+  --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+  --distribution-name=$(python3 setup.py --name) \
+  --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+  --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+  --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+  --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+  --version=$(python3 setup.py --version) \
+  --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+  --distribution-name=$(python3 setup.py --name) \
+  --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+  --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+  --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/release.sh b/packages/google-cloud-bigquery-migration/.kokoro/release.sh
new file mode 100755
index 000000000000..64243d5aa106
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/release.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Start the releasetool reporter
+python3 -m pip install gcp-releasetool
+python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Move into the package, build the distribution and upload.
+TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token")
+cd github/python-bigquery-migration
+python3 setup.py sdist bdist_wheel
+twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/release/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/release/common.cfg
new file mode 100644
index 000000000000..d219b965e97c
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/release/common.cfg
@@ -0,0 +1,30 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/release.sh"
+}
+
+# Tokens needed to report release status back to GitHub
+env_vars: {
+  key: "SECRET_MANAGER_KEYS"
+  value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/release/release.cfg b/packages/google-cloud-bigquery-migration/.kokoro/release/release.cfg
new file mode 100644
index 000000000000..8f43917d92fe
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/release/release.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/common.cfg
new file mode 100644
index 000000000000..d53d2c6835aa
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Specify which tests to run
+env_vars: {
+    key: "RUN_TESTS_SESSION"
+    value: "lint"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 000000000000..50fec9649732
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "False"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 000000000000..877ccab3c371
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Specify which tests to run
+env_vars: {
+    key: "RUN_TESTS_SESSION"
+    value: "py-3.6"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py36"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 000000000000..7218af1499e5
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
+
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 000000000000..3b9b64689db9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples-against-head.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 000000000000..50fec9649732
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "False"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 000000000000..e15813e654df
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Specify which tests to run
+env_vars: {
+    key: "RUN_TESTS_SESSION"
+    value: "py-3.7"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py37"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 000000000000..3b9b64689db9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples-against-head.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 000000000000..50fec9649732
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "False"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 000000000000..59180d5d14da
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Specify which tests to run
+env_vars: {
+    key: "RUN_TESTS_SESSION"
+    value: "py-3.8"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py38"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 000000000000..3b9b64689db9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples-against-head.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 000000000000..50fec9649732
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "False"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 000000000000..0e2815e954d8
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+  define_artifacts {
+    regex: "**/*sponge_log.xml"
+  }
+}
+
+# Specify which tests to run
+env_vars: {
+    key: "RUN_TESTS_SESSION"
+    value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+    key: "TRAMPOLINE_IMAGE"
+    value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-bigquery-migration/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 000000000000..3b9b64689db9
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
+
+env_vars: {
+    key: "TRAMPOLINE_BUILD_FILE"
+    value: "github/python-bigquery-migration/.kokoro/test-samples-against-head.sh"
+}
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 000000000000..50fec9649732
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "False"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 000000000000..a1c8d9759c88
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+    key: "INSTALL_LIBRARY_FROM_SOURCE"
+    value: "True"
+}
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/test-samples-against-head.sh b/packages/google-cloud-bigquery-migration/.kokoro/test-samples-against-head.sh
new file mode 100755
index 000000000000..ff3868c88d1d
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-bigquery-migration
+
+exec .kokoro/test-samples-impl.sh
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/test-samples-impl.sh b/packages/google-cloud-bigquery-migration/.kokoro/test-samples-impl.sh
new file mode 100755
index 000000000000..8a324c9c7bc6
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples don't exist
+if ! find samples -name 'requirements.txt' | grep -q .; then
+  echo "No tests run. './samples/**/requirements.txt' not found"
+  exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+    gcloud auth activate-service-account \
+	   --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+	   --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+       --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+    cd "$ROOT"
+    # Navigate to the project folder.
+    file=$(dirname "$file")
+    cd "$file"
+
+    echo "------------------------------------------------------------"
+    echo "- testing $file"
+    echo "------------------------------------------------------------"
+
+    # Use nox to execute the tests for the project.
+    python3.6 -m nox -s "$RUN_TESTS_SESSION"
+    EXIT=$?
+
+    # If this is a periodic build, send the test log to the FlakyBot.
+    # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
+    if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+      chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+      $KOKORO_GFILE_DIR/linux_amd64/flakybot
+    fi
+
+    if [[ $EXIT -ne 0 ]]; then
+      RTN=1
+      echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+    else
+      echo -e "\n Testing completed.\n"
+    fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/test-samples.sh b/packages/google-cloud-bigquery-migration/.kokoro/test-samples.sh
new file mode 100755
index 000000000000..daf16dec9882
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/test-samples.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-bigquery-migration
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+    # preserving the test runner implementation.
+    cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+    echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+    echo "Now we rewind the repo back to the latest release..."
+    LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+    git checkout $LATEST_RELEASE
+    echo "The current head is: "
+    echo $(git rev-parse --verify HEAD)
+    echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+    # move back the test runner implementation if there's no file.
+    if [ ! -f .kokoro/test-samples-impl.sh ]; then
+	cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
+    fi
+fi
+
+exec .kokoro/test-samples-impl.sh
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/trampoline.sh b/packages/google-cloud-bigquery-migration/.kokoro/trampoline.sh
new file mode 100755
index 000000000000..f39236e943a8
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/trampoline.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+    chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+    ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+    echo "cleanup";
+}
+trap cleanup EXIT
+
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.kokoro/trampoline_v2.sh b/packages/google-cloud-bigquery-migration/.kokoro/trampoline_v2.sh
new file mode 100755
index 000000000000..4af6cdc26dbc
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+#     (true|false): Whether to upload the Docker image after the
+#                   successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+#                       Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+  readonly IO_COLOR_RED="$(tput setaf 1)"
+  readonly IO_COLOR_GREEN="$(tput setaf 2)"
+  readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+  readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+  readonly IO_COLOR_RED=""
+  readonly IO_COLOR_GREEN=""
+  readonly IO_COLOR_YELLOW=""
+  readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+    [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+    local color="$1"
+    shift
+    local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+    echo "================================================================"
+    echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+    echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+  log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+  log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+  log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+  log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+    rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+    # TRAMPOLINE_V2 variables.
+    # Tells scripts whether they are running as part of CI or not.
+    "RUNNING_IN_CI"
+    # Indicates which CI system we're in.
+    "TRAMPOLINE_CI"
+    # Indicates the version of the script.
+    "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+    # descriptive env var for indicating it's on CI.
+    RUNNING_IN_CI="true"
+    TRAMPOLINE_CI="kokoro"
+    if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+	if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+	    log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+	    exit 1
+	fi
+	# This service account will be activated later.
+	TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+    else
+	if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+	    gcloud auth list
+	fi
+	log_yellow "Configuring Container Registry access"
+	gcloud auth configure-docker --quiet
+    fi
+    pass_down_envvars+=(
+	# KOKORO dynamic variables.
+	"KOKORO_BUILD_NUMBER"
+	"KOKORO_BUILD_ID"
+	"KOKORO_JOB_NAME"
+	"KOKORO_GIT_COMMIT"
+	"KOKORO_GITHUB_COMMIT"
+	"KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+	"KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+	# For FlakyBot
+	"KOKORO_GITHUB_COMMIT_URL"
+	"KOKORO_GITHUB_PULL_REQUEST_URL"
+    )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+    RUNNING_IN_CI="true"
+    TRAMPOLINE_CI="travis"
+    pass_down_envvars+=(
+	"TRAVIS_BRANCH"
+	"TRAVIS_BUILD_ID"
+	"TRAVIS_BUILD_NUMBER"
+	"TRAVIS_BUILD_WEB_URL"
+	"TRAVIS_COMMIT"
+	"TRAVIS_COMMIT_MESSAGE"
+	"TRAVIS_COMMIT_RANGE"
+	"TRAVIS_JOB_NAME"
+	"TRAVIS_JOB_NUMBER"
+	"TRAVIS_JOB_WEB_URL"
+	"TRAVIS_PULL_REQUEST"
+	"TRAVIS_PULL_REQUEST_BRANCH"
+	"TRAVIS_PULL_REQUEST_SHA"
+	"TRAVIS_PULL_REQUEST_SLUG"
+	"TRAVIS_REPO_SLUG"
+	"TRAVIS_SECURE_ENV_VARS"
+	"TRAVIS_TAG"
+    )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+    RUNNING_IN_CI="true"
+    TRAMPOLINE_CI="github-workflow"
+    pass_down_envvars+=(
+	"GITHUB_WORKFLOW"
+	"GITHUB_RUN_ID"
+	"GITHUB_RUN_NUMBER"
+	"GITHUB_ACTION"
+	"GITHUB_ACTIONS"
+	"GITHUB_ACTOR"
+	"GITHUB_REPOSITORY"
+	"GITHUB_EVENT_NAME"
+	"GITHUB_EVENT_PATH"
+	"GITHUB_SHA"
+	"GITHUB_REF"
+	"GITHUB_HEAD_REF"
+	"GITHUB_BASE_REF"
+    )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+    RUNNING_IN_CI="true"
+    TRAMPOLINE_CI="circleci"
+    pass_down_envvars+=(
+	"CIRCLE_BRANCH"
+	"CIRCLE_BUILD_NUM"
+	"CIRCLE_BUILD_URL"
+	"CIRCLE_COMPARE_URL"
+	"CIRCLE_JOB"
+	"CIRCLE_NODE_INDEX"
+	"CIRCLE_NODE_TOTAL"
+	"CIRCLE_PREVIOUS_BUILD_NUM"
+	"CIRCLE_PROJECT_REPONAME"
+	"CIRCLE_PROJECT_USERNAME"
+	"CIRCLE_REPOSITORY_URL"
+	"CIRCLE_SHA1"
+	"CIRCLE_STAGE"
+	"CIRCLE_USERNAME"
+	"CIRCLE_WORKFLOW_ID"
+	"CIRCLE_WORKFLOW_JOB_ID"
+	"CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+	"CIRCLE_WORKFLOW_WORKSPACE_ID"
+    )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+    local dir="$1"
+    while [[ ! -d "${dir}/.git" ]]; do
+	dir="$(dirname "$dir")"
+    done
+    echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+    PROGRAM_PATH="$(realpath "$0")"
+    PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+    PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+    PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+    mkdir -p "${tmpdir}/gcloud"
+    gcloud_config_dir="${tmpdir}/gcloud"
+
+    log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+    export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+    log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+    gcloud auth activate-service-account \
+	   --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+    log_yellow "Configuring Container Registry access"
+    gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+    # The basic trampoline configurations.
+    "TRAMPOLINE_IMAGE"
+    "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+    source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+    if [[ -z "${!e:-}" ]]; then
+	log "Missing ${e} env var. Aborting."
+	exit 1
+    fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+    # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+    # We may want to add --max-concurrent-downloads flag.
+
+    log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+    if docker pull "${TRAMPOLINE_IMAGE}"; then
+	log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+	has_image="true"
+    else
+	log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+	has_image="false"
+    fi
+else
+    # For local run, check if we have the image.
+    if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+	has_image="true"
+    else
+	has_image="false"
+    fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+    # Build the Docker image from the source.
+    context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+    docker_build_flags=(
+	"-f" "${TRAMPOLINE_DOCKERFILE}"
+	"-t" "${TRAMPOLINE_IMAGE}"
+	"--build-arg" "UID=${user_uid}"
+	"--build-arg" "USERNAME=${user_name}"
+    )
+    if [[ "${has_image}" == "true" ]]; then
+	docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+    fi
+
+    log_yellow "Start building the docker image."
+    if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+	echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+    fi
+
+    # ON CI systems, we want to suppress docker build logs, only
+    # output the logs when it fails.
+    if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+	if docker build "${docker_build_flags[@]}" "${context_dir}" \
+		  > "${tmpdir}/docker_build.log" 2>&1; then
+	    if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+		cat "${tmpdir}/docker_build.log"
+	    fi
+
+	    log_green "Finished building the docker image."
+	    update_cache="true"
+	else
+	    log_red "Failed to build the Docker image, aborting."
+	    log_yellow "Dumping the build logs:"
+	    cat "${tmpdir}/docker_build.log"
+	    exit 1
+	fi
+    else
+	if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+	    log_green "Finished building the docker image."
+	    update_cache="true"
+	else
+	    log_red "Failed to build the Docker image, aborting."
+	    exit 1
+	fi
+    fi
+else
+    if [[ "${has_image}" != "true" ]]; then
+	log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+	exit 1
+    fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+    # Remove the container after it exists.
+    "--rm"
+
+    # Use the host network.
+    "--network=host"
+
+    # Run in priviledged mode. We are not using docker for sandboxing or
+    # isolation, just for packaging our dev tools.
+    "--privileged"
+
+    # Run the docker script with the user id. Because the docker image gets to
+    # write in ${PWD} you typically want this to be your user id.
+    # To allow docker in docker, we need to use docker gid on the host.
+    "--user" "${user_uid}:${docker_gid}"
+
+    # Pass down the USER.
+    "--env" "USER=${user_name}"
+
+    # Mount the project directory inside the Docker container.
+    "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+    "--workdir" "${TRAMPOLINE_WORKSPACE}"
+    "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+    # Mount the temporary home directory.
+    "--volume" "${tmphome}:/h"
+    "--env" "HOME=/h"
+
+    # Allow docker in docker.
+    "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+    # Mount the /tmp so that docker in docker can mount the files
+    # there correctly.
+    "--volume" "/tmp:/tmp"
+    # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+    # TODO(tmatsuo): This part is not portable.
+    "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+    "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+    "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+    "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+    "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+    docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+    if [[ -n "${!e:-}" ]]; then
+	docker_flags+=("--env" "${e}=${!e}")
+    fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+    log_yellow "Running the given commands '" "${@:1}" "' in the container."
+    readonly commands=("${@:1}")
+    if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+	echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+    fi
+    docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+    log_yellow "Running the tests in a Docker container."
+    docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+    if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+	echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+    fi
+    docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+    log_green "Build finished with ${test_retval}"
+else
+    log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+       [[ $test_retval == 0 ]] && \
+       [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+    log_yellow "Uploading the Docker image."
+    if docker push "${TRAMPOLINE_IMAGE}"; then
+	log_green "Finished uploading the Docker image."
+    else
+	log_red "Failed uploading the Docker image."
+    fi
+    # Call trampoline_after_upload_hook if it's defined.
+    if function_exists trampoline_after_upload_hook; then
+	trampoline_after_upload_hook
+    fi
+
+fi
+
+exit "${test_retval}"
diff --git a/packages/google-cloud-bigquery-migration/.pre-commit-config.yaml b/packages/google-cloud-bigquery-migration/.pre-commit-config.yaml
new file mode 100644
index 000000000000..62eb5a77d9a3
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.pre-commit-config.yaml
@@ -0,0 +1,31 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# See https://pre-commit.com for more information
+# See https://pre-commit.com/hooks.html for more hooks
+repos:
+-   repo: https://github.com/pre-commit/pre-commit-hooks
+    rev: v4.0.1
+    hooks:
+    -   id: trailing-whitespace
+    -   id: end-of-file-fixer
+    -   id: check-yaml
+-   repo: https://github.com/psf/black
+    rev: 19.10b0
+    hooks:
+    - id: black
+-   repo: https://gitlab.com/pycqa/flake8
+    rev: 3.9.2
+    hooks:
+    - id: flake8
diff --git a/packages/google-cloud-bigquery-migration/.repo-metadata.json b/packages/google-cloud-bigquery-migration/.repo-metadata.json
new file mode 100644
index 000000000000..738a96be661f
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.repo-metadata.json
@@ -0,0 +1,14 @@
+{
+    "name": "bigquerymigration",
+    "name_pretty": "Google BigQuery Migration",
+    "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/",
+    "client_documentation": "https://googleapis.dev/python/bigquerymigration/latest",
+    "issue_tracker": "https://issuetracker.google.com/savedsearches/559654",
+    "release_level": "alpha",
+    "language": "python",
+    "library_type": "GAPIC_AUTO",
+    "repo": "googleapis/python-bigquery-migration",
+    "distribution_name": "google-cloud-bigquery-migration",
+    "api_id": "bigquerymigration.googleapis.com",
+    "requires_billing": true
+  }
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/.trampolinerc b/packages/google-cloud-bigquery-migration/.trampolinerc
new file mode 100644
index 000000000000..383b6ec89fbc
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/.trampolinerc
@@ -0,0 +1,52 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+    "STAGING_BUCKET"
+    "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+    "STAGING_BUCKET"
+    "V2_STAGING_BUCKET"
+    "NOX_SESSION"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+   [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+   echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+   exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+    TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+    TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+    TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+    TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/packages/google-cloud-bigquery-migration/CHANGELOG.md b/packages/google-cloud-bigquery-migration/CHANGELOG.md
new file mode 100644
index 000000000000..825c32f0d03d
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/CHANGELOG.md
@@ -0,0 +1 @@
+# Changelog
diff --git a/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md b/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000000..039f43681204
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/CODE_OF_CONDUCT.md
@@ -0,0 +1,95 @@
+<!-- # Generated by synthtool. DO NOT EDIT! !-->
+# Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+*   Using welcoming and inclusive language
+*   Being respectful of differing viewpoints and experiences
+*   Gracefully accepting constructive criticism
+*   Focusing on what is best for the community
+*   Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+*   The use of sexualized language or imagery and unwelcome sexual attention or
+    advances
+*   Trolling, insulting/derogatory comments, and personal or political attacks
+*   Public or private harassment
+*   Publishing others' private information, such as a physical or electronic
+    address, without explicit permission
+*   Other conduct which could reasonably be considered inappropriate in a
+    professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst b/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst
new file mode 100644
index 000000000000..e5de945d3d6d
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/CONTRIBUTING.rst
@@ -0,0 +1,279 @@
+.. Generated by synthtool. DO NOT EDIT!
+############
+Contributing
+############
+
+#. **Please sign one of the contributor license agreements below.**
+#. Fork the repo, develop and test your code changes, add docs.
+#. Make sure that your commit messages clearly describe the changes.
+#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_)
+
+.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews
+
+.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries.
+
+***************
+Adding Features
+***************
+
+In order to add a feature:
+
+- The feature must be documented in both the API and narrative
+  documentation.
+
+- The feature must work fully on the following CPython versions:
+  3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+
+- The feature must not add unnecessary dependencies (where
+  "unnecessary" is of course subjective, but new dependencies should
+  be discussed).
+
+****************************
+Using a Development Checkout
+****************************
+
+You'll have to create a development environment using a Git checkout:
+
+- While logged into your GitHub account, navigate to the
+  ``python-bigquery-migration`` `repo`_ on GitHub.
+
+- Fork and clone the ``python-bigquery-migration`` repository to your GitHub account by
+  clicking the "Fork" button.
+
+- Clone your fork of ``python-bigquery-migration`` from your GitHub account to your local
+  computer, substituting your account username and specifying the destination
+  as ``hack-on-python-bigquery-migration``.  E.g.::
+
+   $ cd ${HOME}
+   $ git clone git@github.com:USERNAME/python-bigquery-migration.git hack-on-python-bigquery-migration
+   $ cd hack-on-python-bigquery-migration
+   # Configure remotes such that you can pull changes from the googleapis/python-bigquery-migration
+   # repository into your local repository.
+   $ git remote add upstream git@github.com:googleapis/python-bigquery-migration.git
+   # fetch and merge changes from upstream into main
+   $ git fetch upstream
+   $ git merge upstream/main
+
+Now your local repo is set up such that you will push changes to your GitHub
+repo, from which you can submit a pull request.
+
+To work on the codebase and run the tests, we recommend using ``nox``,
+but you can also use a ``virtualenv`` of your own creation.
+
+.. _repo: https://github.com/googleapis/python-bigquery-migration
+
+Using ``nox``
+=============
+
+We use `nox <https://nox.readthedocs.io/en/latest/>`__ to instrument our tests.
+
+- To test your changes, run unit tests with ``nox``::
+    $ nox -s unit
+
+- To run a single unit test::
+
+    $ nox -s unit-3.9 -- -k <name of test>
+
+
+  .. note::
+
+    The unit tests and system tests are described in the
+    ``noxfile.py`` files in each directory.
+
+.. nox: https://pypi.org/project/nox/
+
+*****************************************
+I'm getting weird errors... Can you help?
+*****************************************
+
+If the error mentions ``Python.h`` not being found,
+install ``python-dev`` and try again.
+On Debian/Ubuntu::
+
+  $ sudo apt-get install python-dev
+
+************
+Coding Style
+************
+- We use the automatic code formatter ``black``. You can run it using
+  the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+   $ nox -s blacken
+
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
+  If you have ``nox`` installed, you can test that you have not introduced
+  any non-compliant code via::
+
+   $ nox -s lint
+
+- In order to make ``nox -s lint`` run faster, you can set some environment
+  variables::
+
+   export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
+   export GOOGLE_CLOUD_TESTING_BRANCH="main"
+
+  By doing this, you are specifying the location of the most up-to-date
+  version of ``python-bigquery-migration``. The
+  remote name ``upstream`` should point to the official ``googleapis``
+  checkout and the branch should be the default branch on that remote (``main``).
+
+- This repository contains configuration for the
+  `pre-commit <https://pre-commit.com/>`__ tool, which automates checking
+  our linters during a commit.  If you have it installed on your ``$PATH``,
+  you can enable enforcing those checks via:
+
+.. code-block:: bash
+
+   $ pre-commit install
+   pre-commit installed at .git/hooks/pre-commit
+
+Exceptions to PEP8:
+
+- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
+  "Function-Under-Test"), which is PEP8-incompliant, but more readable.
+  Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
+
+********************
+Running System Tests
+********************
+
+- To run system tests, you can execute::
+
+   # Run all system tests
+   $ nox -s system
+
+   # Run a single system test
+   $ nox -s system-3.8 -- -k <name of test>
+
+
+  .. note::
+
+      System tests are only configured to run under Python 3.8.
+      For expediency, we do not run them in older versions of Python 3.
+
+  This alone will not run the tests. You'll need to change some local
+  auth settings and change some configuration in your project to
+  run all the tests.
+
+- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication <https://cloud.google.com/docs/authentication/best-practices-applications#local_development_and_testing_with_the>`__. Some tests require a service account. For those tests see `Authenticating as a service account <https://cloud.google.com/docs/authentication/production>`__.
+
+*************
+Test Coverage
+*************
+
+- The codebase *must* have 100% test statement coverage after each commit.
+  You can test coverage via ``nox -s cover``.
+
+******************************************************
+Documentation Coverage and Building HTML Documentation
+******************************************************
+
+If you fix a bug, and the bug requires an API or behavior modification, all
+documentation in this package which references that API or behavior must be
+changed to reflect the bug fix, ideally in the same commit that fixes the bug
+or adds the feature.
+
+Build the docs via:
+
+   $ nox -s docs
+
+*************************
+Samples and code snippets
+*************************
+
+Code samples and snippets live in the `samples/` catalogue. Feel free to
+provide more examples, but make sure to write tests for those examples.
+Each folder containing example code requires its own `noxfile.py` script
+which automates testing. If you decide to create a new folder, you can
+base it on the `samples/snippets` folder (providing `noxfile.py` and
+the requirements files).
+
+The tests will run against a real Google Cloud Project, so you should
+configure them just like the System Tests.
+
+- To run sample tests, you can execute::
+
+   # Run all tests in a folder
+   $ cd samples/snippets
+   $ nox -s py-3.8
+
+   # Run a single sample test
+   $ cd samples/snippets
+   $ nox -s py-3.8 -- -k <name of test>
+
+********************************************
+Note About ``README`` as it pertains to PyPI
+********************************************
+
+The `description on PyPI`_ for the project comes directly from the
+``README``. Due to the reStructuredText (``rst``) parser used by
+PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
+instead of
+``https://github.com/googleapis/python-bigquery-migration/blob/main/CONTRIBUTING.rst``)
+may cause problems creating links or rendering the description.
+
+.. _description on PyPI: https://pypi.org/project/google-cloud-bigquery-migration
+
+
+*************************
+Supported Python Versions
+*************************
+
+We support:
+
+-  `Python 3.6`_
+-  `Python 3.7`_
+-  `Python 3.8`_
+-  `Python 3.9`_
+
+.. _Python 3.6: https://docs.python.org/3.6/
+.. _Python 3.7: https://docs.python.org/3.7/
+.. _Python 3.8: https://docs.python.org/3.8/
+.. _Python 3.9: https://docs.python.org/3.9/
+
+
+Supported versions can be found in our ``noxfile.py`` `config`_.
+
+.. _config: https://github.com/googleapis/python-bigquery-migration/blob/main/noxfile.py
+
+
+We also explicitly decided to support Python 3 beginning with version 3.6.
+Reasons for this include:
+
+-  Encouraging use of newest versions of Python 3
+-  Taking the lead of `prominent`_ open-source `projects`_
+-  `Unicode literal support`_ which allows for a cleaner codebase that
+   works in both Python 2 and Python 3
+
+.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
+.. _projects: http://flask.pocoo.org/docs/0.10/python3/
+.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
+
+**********
+Versioning
+**********
+
+This library follows `Semantic Versioning`_.
+
+.. _Semantic Versioning: http://semver.org/
+
+Some packages are currently in major version zero (``0.y.z``), which means that
+anything may change at any time and the public API should not be considered
+stable.
+
+******************************
+Contributor License Agreements
+******************************
+
+Before we can accept your pull requests you'll need to sign a Contributor
+License Agreement (CLA):
+
+- **If you are an individual writing original source code** and **you own the
+  intellectual property**, then you'll need to sign an
+  `individual CLA <https://developers.google.com/open-source/cla/individual>`__.
+- **If you work for a company that wants to allow you to contribute your work**,
+  then you'll need to sign a
+  `corporate CLA <https://developers.google.com/open-source/cla/corporate>`__.
+
+You can sign these electronically (just scroll to the bottom). After that,
+we'll be able to accept your pull requests.
diff --git a/packages/google-cloud-bigquery-migration/LICENSE b/packages/google-cloud-bigquery-migration/LICENSE
new file mode 100644
index 000000000000..d64569567334
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/packages/google-cloud-bigquery-migration/MANIFEST.in b/packages/google-cloud-bigquery-migration/MANIFEST.in
new file mode 100644
index 000000000000..e783f4c6209b
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/MANIFEST.in
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+include README.rst LICENSE
+recursive-include google *.json *.proto py.typed
+recursive-include tests *
+global-exclude *.py[co]
+global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
diff --git a/packages/google-cloud-bigquery-migration/README.rst b/packages/google-cloud-bigquery-migration/README.rst
new file mode 100644
index 000000000000..e78566d03b24
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/README.rst
@@ -0,0 +1,49 @@
+Python Client for Google Cloud Bigquery Migration API
+=================================================
+
+Quick Start
+-----------
+
+In order to use this library, you first need to go through the following steps:
+
+1. `Select or create a Cloud Platform project.`_
+2. `Enable billing for your project.`_
+3. Enable the Google Cloud Bigquery Migration API.
+4. `Setup Authentication.`_
+
+.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project
+.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project
+.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html
+
+Installation
+~~~~~~~~~~~~
+
+Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to
+create isolated Python environments. The basic problem it addresses is one of
+dependencies and versions, and indirectly permissions.
+
+With `virtualenv`_, it's possible to install this library without needing system
+install permissions, and without clashing with the installed system
+dependencies.
+
+.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/
+
+
+Mac/Linux
+^^^^^^^^^
+
+.. code-block:: console
+
+    python3 -m venv <your-env>
+    source <your-env>/bin/activate
+    <your-env>/bin/pip install /path/to/library
+
+
+Windows
+^^^^^^^
+
+.. code-block:: console
+
+    python3 -m venv <your-env>
+    <your-env>\Scripts\activate
+    <your-env>\Scripts\pip.exe install \path\to\library
diff --git a/packages/google-cloud-bigquery-migration/SECURITY.md b/packages/google-cloud-bigquery-migration/SECURITY.md
new file mode 100644
index 000000000000..8b58ae9c01ae
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/SECURITY.md
@@ -0,0 +1,7 @@
+# Security Policy
+
+To report a security issue, please use [g.co/vulnz](https://g.co/vulnz).
+
+The Google Security Team will respond within 5 working days of your report on g.co/vulnz.
+
+We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue.
diff --git a/packages/google-cloud-bigquery-migration/docs/README.rst b/packages/google-cloud-bigquery-migration/docs/README.rst
new file mode 120000
index 000000000000..89a0106941ff
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/README.rst
@@ -0,0 +1 @@
+../README.rst
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/docs/_static/custom.css b/packages/google-cloud-bigquery-migration/docs/_static/custom.css
new file mode 100644
index 000000000000..b0a295464b23
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/_static/custom.css
@@ -0,0 +1,20 @@
+div#python2-eol {
+	border-color: red;
+	border-width: medium;
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+    min-width: 100px
+}
+
+/* Insert space between methods for readability */
+dl.method {
+	padding-top: 10px;
+	padding-bottom: 10px
+}
+
+/* Insert empty space between classes */
+dl.class {
+	padding-bottom: 50px
+}
diff --git a/packages/google-cloud-bigquery-migration/docs/_templates/layout.html b/packages/google-cloud-bigquery-migration/docs/_templates/layout.html
new file mode 100644
index 000000000000..6316a537f72b
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/_templates/layout.html
@@ -0,0 +1,50 @@
+
+{% extends "!layout.html" %}
+{%- block content %}
+{%- if theme_fixed_sidebar|lower == 'true' %}
+  <div class="document">
+    {{ sidebar() }}
+    {%- block document %}
+      <div class="documentwrapper">
+      {%- if render_sidebar %}
+        <div class="bodywrapper">
+      {%- endif %}
+
+          {%- block relbar_top %}
+            {%- if theme_show_relbar_top|tobool %}
+              <div class="related top">
+                &nbsp;
+                {{- rellink_markup () }}
+              </div>
+            {%- endif %}
+          {% endblock %}
+
+          <div class="body" role="main">
+          	<div class="admonition" id="python2-eol"> 
+          	 As of January 1, 2020 this library no longer supports Python 2 on the latest released version. 
+          	 Library versions released prior to that date will continue to be available. For more information please
+          	 visit <a href="https://cloud.google.com/python/docs/python2-sunset/">Python 2 support on Google Cloud</a>.
+          	</div>
+            {% block body %} {% endblock %}
+          </div>
+
+          {%- block relbar_bottom %}
+            {%- if theme_show_relbar_bottom|tobool %}
+              <div class="related bottom">
+                &nbsp;
+                {{- rellink_markup () }}
+              </div>
+            {%- endif %}
+          {% endblock %}
+
+      {%- if render_sidebar %}
+        </div>
+      {%- endif %}
+      </div>
+    {%- endblock %}
+    <div class="clearer"></div>
+  </div>
+{%- else %}
+{{ super() }}
+{%- endif %}
+{%- endblock %}
diff --git a/packages/google-cloud-bigquery-migration/docs/changelog.md b/packages/google-cloud-bigquery-migration/docs/changelog.md
new file mode 120000
index 000000000000..04c99a55caae
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/changelog.md
@@ -0,0 +1 @@
+../CHANGELOG.md
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/docs/conf.py b/packages/google-cloud-bigquery-migration/docs/conf.py
new file mode 100644
index 000000000000..87e5a71332ca
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/conf.py
@@ -0,0 +1,381 @@
+# -*- coding: utf-8 -*-
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# google-cloud-bigquery-migration documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+import shlex
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath(".."))
+
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
+__version__ = ""
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+needs_sphinx = "1.5.5"
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+    "sphinx.ext.autodoc",
+    "sphinx.ext.autosummary",
+    "sphinx.ext.intersphinx",
+    "sphinx.ext.coverage",
+    "sphinx.ext.doctest",
+    "sphinx.ext.napoleon",
+    "sphinx.ext.todo",
+    "sphinx.ext.viewcode",
+    "recommonmark",
+]
+
+# autodoc/autosummary flags
+autoclass_content = "both"
+autodoc_default_options = {"members": True}
+autosummary_generate = True
+
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ["_templates"]
+
+# The suffix(es) of source filenames.
+# You can specify multiple suffix as a list of string:
+# source_suffix = ['.rst', '.md']
+source_suffix = [".rst", ".md"]
+
+# The encoding of source files.
+# source_encoding = 'utf-8-sig'
+
+# The root toctree document.
+root_doc = "index"
+
+# General information about the project.
+project = "google-cloud-bigquery-migration"
+copyright = "2019, Google"
+author = "Google APIs"
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The full version, including alpha/beta/rc tags.
+release = __version__
+# The short X.Y version.
+version = ".".join(release.split(".")[0:2])
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#
+# This is also used if you do content translation via gettext catalogs.
+# Usually you set "language" from the command line for these cases.
+language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+# today = ''
+# Else, today_fmt is used as the format for a strftime call.
+# today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = [
+    "_build",
+    "**/.nox/**/*",
+    "samples/AUTHORING_GUIDE.md",
+    "samples/CONTRIBUTING.md",
+    "samples/snippets/README.rst",
+]
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+# default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+# add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+# add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+# show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = "sphinx"
+
+# A list of ignored prefixes for module index sorting.
+# modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+# keep_warnings = False
+
+# If true, `todo` and `todoList` produce output, else they produce nothing.
+todo_include_todos = True
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = "alabaster"
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+html_theme_options = {
+    "description": "Google Cloud Client Libraries for google-cloud-bigquery-migration",
+    "github_user": "googleapis",
+    "github_repo": "python-bigquery-migration",
+    "github_banner": True,
+    "font_family": "'Roboto', Georgia, sans",
+    "head_font_family": "'Roboto', Georgia, serif",
+    "code_font_family": "'Roboto Mono', 'Consolas', monospace",
+}
+
+# Add any paths that contain custom themes here, relative to this directory.
+# html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+# html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+# html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+# html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+# html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ["_static"]
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+# html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+# html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+# html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+# html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+# html_additional_pages = {}
+
+# If false, no module index is generated.
+# html_domain_indices = True
+
+# If false, no index is generated.
+# html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+# html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+# html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+# html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+# html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+# html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+# html_file_suffix = None
+
+# Language to be used for generating the HTML full-text search index.
+# Sphinx supports the following languages:
+#   'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
+#   'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
+# html_search_language = 'en'
+
+# A dictionary with options for the search language support, empty by default.
+# Now only 'ja' uses this config value
+# html_search_options = {'type': 'default'}
+
+# The name of a javascript file (relative to the configuration directory) that
+# implements a search results scorer. If empty, the default will be used.
+# html_search_scorer = 'scorer.js'
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = "google-cloud-bigquery-migration-doc"
+
+# -- Options for warnings ------------------------------------------------------
+
+
+suppress_warnings = [
+    # Temporarily suppress this to avoid "more than one target found for
+    # cross-reference" warning, which are intractable for us to avoid while in
+    # a mono-repo.
+    # See https://github.com/sphinx-doc/sphinx/blob
+    # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
+    "ref.python"
+]
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+    # The paper size ('letterpaper' or 'a4paper').
+    #'papersize': 'letterpaper',
+    # The font size ('10pt', '11pt' or '12pt').
+    #'pointsize': '10pt',
+    # Additional stuff for the LaTeX preamble.
+    #'preamble': '',
+    # Latex figure (float) alignment
+    #'figure_align': 'htbp',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+#  author, documentclass [howto, manual, or own class]).
+latex_documents = [
+    (
+        root_doc,
+        "google-cloud-bigquery-migration.tex",
+        "google-cloud-bigquery-migration Documentation",
+        author,
+        "manual",
+    )
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+# latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+# latex_use_parts = False
+
+# If true, show page references after internal links.
+# latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+# latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+# latex_appendices = []
+
+# If false, no module index is generated.
+# latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    (
+        root_doc,
+        "google-cloud-bigquery-migration",
+        "google-cloud-bigquery-migration Documentation",
+        [author],
+        1,
+    )
+]
+
+# If true, show URL addresses after external links.
+# man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+    (
+        root_doc,
+        "google-cloud-bigquery-migration",
+        "google-cloud-bigquery-migration Documentation",
+        author,
+        "google-cloud-bigquery-migration",
+        "google-cloud-bigquery-migration Library",
+        "APIs",
+    )
+]
+
+# Documents to append as an appendix to all manuals.
+# texinfo_appendices = []
+
+# If false, no module index is generated.
+# texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+# texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+# texinfo_no_detailmenu = False
+
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {
+    "python": ("https://python.readthedocs.org/en/latest/", None),
+    "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
+    "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
+    "grpc": ("https://grpc.github.io/grpc/python/", None),
+    "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
+    "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None),
+}
+
+
+# Napoleon settings
+napoleon_google_docstring = True
+napoleon_numpy_docstring = True
+napoleon_include_private_with_doc = False
+napoleon_include_special_with_doc = True
+napoleon_use_admonition_for_examples = False
+napoleon_use_admonition_for_notes = False
+napoleon_use_admonition_for_references = False
+napoleon_use_ivar = False
+napoleon_use_param = True
+napoleon_use_rtype = True
diff --git a/packages/google-cloud-bigquery-migration/docs/index.rst b/packages/google-cloud-bigquery-migration/docs/index.rst
new file mode 100644
index 000000000000..2f7845367b0a
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/index.rst
@@ -0,0 +1,21 @@
+.. include:: README.rst
+
+.. include:: multiprocessing.rst
+
+API Reference
+-------------
+.. toctree::
+    :maxdepth: 2
+
+    migration_v2alpha/services
+    migration_v2alpha/types
+
+Changelog
+---------
+
+For a list of all ``google-cloud-bigquery-migration`` releases:
+
+.. toctree::
+   :maxdepth: 2
+
+   changelog
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/migration_service.rst b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/migration_service.rst
new file mode 100644
index 000000000000..bf74826580a7
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/migration_service.rst
@@ -0,0 +1,10 @@
+MigrationService
+----------------------------------
+
+.. automodule:: google.cloud.bigquery.migration_v2alpha.services.migration_service
+    :members:
+    :inherited-members:
+
+.. automodule:: google.cloud.bigquery.migration_v2alpha.services.migration_service.pagers
+    :members:
+    :inherited-members:
diff --git a/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/services.rst b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/services.rst
new file mode 100644
index 000000000000..64b04ab048be
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/services.rst
@@ -0,0 +1,6 @@
+Services for Google Cloud Bigquery Migration v2alpha API
+========================================================
+.. toctree::
+    :maxdepth: 2
+
+    migration_service
diff --git a/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/types.rst b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/types.rst
new file mode 100644
index 000000000000..cf262b71bb08
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/migration_v2alpha/types.rst
@@ -0,0 +1,7 @@
+Types for Google Cloud Bigquery Migration v2alpha API
+=====================================================
+
+.. automodule:: google.cloud.bigquery.migration_v2alpha.types
+    :members:
+    :undoc-members:
+    :show-inheritance:
diff --git a/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst b/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst
new file mode 100644
index 000000000000..536d17b2ea65
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/docs/multiprocessing.rst
@@ -0,0 +1,7 @@
+.. note::
+
+   Because this client uses :mod:`grpc` library, it is safe to
+   share instances across threads. In multiprocessing scenarios, the best
+   practice is to create client instances *after* the invocation of
+   :func:`os.fork` by :class:`multiprocessing.pool.Pool` or
+   :class:`multiprocessing.Process`.
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/__init__.py
new file mode 100644
index 000000000000..eefbf2845ac3
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/__init__.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.cloud.bigquery.migration_v2alpha.services.migration_service.client import (
+    MigrationServiceClient,
+)
+from google.cloud.bigquery.migration_v2alpha.services.migration_service.async_client import (
+    MigrationServiceAsyncClient,
+)
+
+from google.cloud.bigquery.migration_v2alpha.types.migration_entities import (
+    MigrationSubtask,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_entities import (
+    MigrationTask,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_entities import (
+    MigrationWorkflow,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_error_details import (
+    ErrorDetail,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_error_details import (
+    ErrorLocation,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_error_details import (
+    ResourceErrorDetail,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_metrics import Point
+from google.cloud.bigquery.migration_v2alpha.types.migration_metrics import TimeInterval
+from google.cloud.bigquery.migration_v2alpha.types.migration_metrics import TimeSeries
+from google.cloud.bigquery.migration_v2alpha.types.migration_metrics import TypedValue
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    CreateMigrationWorkflowRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    DeleteMigrationWorkflowRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    GetMigrationSubtaskRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    GetMigrationWorkflowRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    ListMigrationSubtasksRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    ListMigrationSubtasksResponse,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    ListMigrationWorkflowsRequest,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    ListMigrationWorkflowsResponse,
+)
+from google.cloud.bigquery.migration_v2alpha.types.migration_service import (
+    StartMigrationWorkflowRequest,
+)
+
+__all__ = (
+    "MigrationServiceClient",
+    "MigrationServiceAsyncClient",
+    "MigrationSubtask",
+    "MigrationTask",
+    "MigrationWorkflow",
+    "ErrorDetail",
+    "ErrorLocation",
+    "ResourceErrorDetail",
+    "Point",
+    "TimeInterval",
+    "TimeSeries",
+    "TypedValue",
+    "CreateMigrationWorkflowRequest",
+    "DeleteMigrationWorkflowRequest",
+    "GetMigrationSubtaskRequest",
+    "GetMigrationWorkflowRequest",
+    "ListMigrationSubtasksRequest",
+    "ListMigrationSubtasksResponse",
+    "ListMigrationWorkflowsRequest",
+    "ListMigrationWorkflowsResponse",
+    "StartMigrationWorkflowRequest",
+)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/py.typed b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/py.typed
new file mode 100644
index 000000000000..fcbd0b586dd2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-bigquery-migration package uses inline types.
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/__init__.py
new file mode 100644
index 000000000000..4b5dcfac8ddb
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/__init__.py
@@ -0,0 +1,62 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .services.migration_service import MigrationServiceClient
+from .services.migration_service import MigrationServiceAsyncClient
+
+from .types.migration_entities import MigrationSubtask
+from .types.migration_entities import MigrationTask
+from .types.migration_entities import MigrationWorkflow
+from .types.migration_error_details import ErrorDetail
+from .types.migration_error_details import ErrorLocation
+from .types.migration_error_details import ResourceErrorDetail
+from .types.migration_metrics import Point
+from .types.migration_metrics import TimeInterval
+from .types.migration_metrics import TimeSeries
+from .types.migration_metrics import TypedValue
+from .types.migration_service import CreateMigrationWorkflowRequest
+from .types.migration_service import DeleteMigrationWorkflowRequest
+from .types.migration_service import GetMigrationSubtaskRequest
+from .types.migration_service import GetMigrationWorkflowRequest
+from .types.migration_service import ListMigrationSubtasksRequest
+from .types.migration_service import ListMigrationSubtasksResponse
+from .types.migration_service import ListMigrationWorkflowsRequest
+from .types.migration_service import ListMigrationWorkflowsResponse
+from .types.migration_service import StartMigrationWorkflowRequest
+
+__all__ = (
+    "MigrationServiceAsyncClient",
+    "CreateMigrationWorkflowRequest",
+    "DeleteMigrationWorkflowRequest",
+    "ErrorDetail",
+    "ErrorLocation",
+    "GetMigrationSubtaskRequest",
+    "GetMigrationWorkflowRequest",
+    "ListMigrationSubtasksRequest",
+    "ListMigrationSubtasksResponse",
+    "ListMigrationWorkflowsRequest",
+    "ListMigrationWorkflowsResponse",
+    "MigrationServiceClient",
+    "MigrationSubtask",
+    "MigrationTask",
+    "MigrationWorkflow",
+    "Point",
+    "ResourceErrorDetail",
+    "StartMigrationWorkflowRequest",
+    "TimeInterval",
+    "TimeSeries",
+    "TypedValue",
+)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/gapic_metadata.json b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/gapic_metadata.json
new file mode 100644
index 000000000000..515d090a4548
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/gapic_metadata.json
@@ -0,0 +1,93 @@
+ {
+  "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+  "language": "python",
+  "libraryPackage": "google.cloud.bigquery.migration_v2alpha",
+  "protoPackage": "google.cloud.bigquery.migration.v2alpha",
+  "schema": "1.0",
+  "services": {
+    "MigrationService": {
+      "clients": {
+        "grpc": {
+          "libraryClient": "MigrationServiceClient",
+          "rpcs": {
+            "CreateMigrationWorkflow": {
+              "methods": [
+                "create_migration_workflow"
+              ]
+            },
+            "DeleteMigrationWorkflow": {
+              "methods": [
+                "delete_migration_workflow"
+              ]
+            },
+            "GetMigrationSubtask": {
+              "methods": [
+                "get_migration_subtask"
+              ]
+            },
+            "GetMigrationWorkflow": {
+              "methods": [
+                "get_migration_workflow"
+              ]
+            },
+            "ListMigrationSubtasks": {
+              "methods": [
+                "list_migration_subtasks"
+              ]
+            },
+            "ListMigrationWorkflows": {
+              "methods": [
+                "list_migration_workflows"
+              ]
+            },
+            "StartMigrationWorkflow": {
+              "methods": [
+                "start_migration_workflow"
+              ]
+            }
+          }
+        },
+        "grpc-async": {
+          "libraryClient": "MigrationServiceAsyncClient",
+          "rpcs": {
+            "CreateMigrationWorkflow": {
+              "methods": [
+                "create_migration_workflow"
+              ]
+            },
+            "DeleteMigrationWorkflow": {
+              "methods": [
+                "delete_migration_workflow"
+              ]
+            },
+            "GetMigrationSubtask": {
+              "methods": [
+                "get_migration_subtask"
+              ]
+            },
+            "GetMigrationWorkflow": {
+              "methods": [
+                "get_migration_workflow"
+              ]
+            },
+            "ListMigrationSubtasks": {
+              "methods": [
+                "list_migration_subtasks"
+              ]
+            },
+            "ListMigrationWorkflows": {
+              "methods": [
+                "list_migration_workflows"
+              ]
+            },
+            "StartMigrationWorkflow": {
+              "methods": [
+                "start_migration_workflow"
+              ]
+            }
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/py.typed b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/py.typed
new file mode 100644
index 000000000000..fcbd0b586dd2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-bigquery-migration package uses inline types.
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/__init__.py
new file mode 100644
index 000000000000..4de65971c238
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/__init__.py
new file mode 100644
index 000000000000..5af6b09eb3c3
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import MigrationServiceClient
+from .async_client import MigrationServiceAsyncClient
+
+__all__ = (
+    "MigrationServiceClient",
+    "MigrationServiceAsyncClient",
+)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/async_client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/async_client.py
new file mode 100644
index 000000000000..4f1fe7b28442
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/async_client.py
@@ -0,0 +1,758 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions  # type: ignore
+from google.api_core import exceptions as core_exceptions  # type: ignore
+from google.api_core import gapic_v1  # type: ignore
+from google.api_core import retry as retries  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import pagers
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_error_details
+from google.cloud.bigquery.migration_v2alpha.types import migration_metrics
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.rpc import error_details_pb2  # type: ignore
+from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport
+from .client import MigrationServiceClient
+
+
+class MigrationServiceAsyncClient:
+    """Service to handle EDW migrations."""
+
+    _client: MigrationServiceClient
+
+    DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT
+    DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT
+
+    migration_subtask_path = staticmethod(MigrationServiceClient.migration_subtask_path)
+    parse_migration_subtask_path = staticmethod(
+        MigrationServiceClient.parse_migration_subtask_path
+    )
+    migration_workflow_path = staticmethod(
+        MigrationServiceClient.migration_workflow_path
+    )
+    parse_migration_workflow_path = staticmethod(
+        MigrationServiceClient.parse_migration_workflow_path
+    )
+    common_billing_account_path = staticmethod(
+        MigrationServiceClient.common_billing_account_path
+    )
+    parse_common_billing_account_path = staticmethod(
+        MigrationServiceClient.parse_common_billing_account_path
+    )
+    common_folder_path = staticmethod(MigrationServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(
+        MigrationServiceClient.parse_common_folder_path
+    )
+    common_organization_path = staticmethod(
+        MigrationServiceClient.common_organization_path
+    )
+    parse_common_organization_path = staticmethod(
+        MigrationServiceClient.parse_common_organization_path
+    )
+    common_project_path = staticmethod(MigrationServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(
+        MigrationServiceClient.parse_common_project_path
+    )
+    common_location_path = staticmethod(MigrationServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(
+        MigrationServiceClient.parse_common_location_path
+    )
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            MigrationServiceAsyncClient: The constructed client.
+        """
+        return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs)  # type: ignore
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            MigrationServiceAsyncClient: The constructed client.
+        """
+        return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs)  # type: ignore
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> MigrationServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            MigrationServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
+    get_transport_class = functools.partial(
+        type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient)
+    )
+
+    def __init__(
+        self,
+        *,
+        credentials: ga_credentials.Credentials = None,
+        transport: Union[str, MigrationServiceTransport] = "grpc_asyncio",
+        client_options: ClientOptions = None,
+        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+    ) -> None:
+        """Instantiates the migration service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Union[str, ~.MigrationServiceTransport]): The
+                transport to use. If set to None, a transport is chosen
+                automatically.
+            client_options (ClientOptions): Custom options for the client. It
+                won't take effect if a ``transport`` instance is provided.
+                (1) The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+                environment variable can also be used to override the endpoint:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto switch to the
+                default mTLS endpoint if client certificate is present, this is
+                the default value). However, the ``api_endpoint`` property takes
+                precedence if provided.
+                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide client certificate for mutual TLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        self._client = MigrationServiceClient(
+            credentials=credentials,
+            transport=transport,
+            client_options=client_options,
+            client_info=client_info,
+        )
+
+    async def create_migration_workflow(
+        self,
+        request: migration_service.CreateMigrationWorkflowRequest = None,
+        *,
+        parent: str = None,
+        migration_workflow: migration_entities.MigrationWorkflow = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationWorkflow:
+        r"""Creates a migration workflow.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.CreateMigrationWorkflowRequest`):
+                The request object. Request to create a migration
+                workflow resource.
+            parent (:class:`str`):
+                Required. The name of the project to which this
+                migration workflow belongs. Example:
+                ``projects/foo/locations/bar``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            migration_workflow (:class:`google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow`):
+                Required. The migration workflow to
+                create.
+
+                This corresponds to the ``migration_workflow`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow:
+                A migration workflow which specifies
+                what needs to be done for an EDW
+                migration.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, migration_workflow])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.CreateMigrationWorkflowRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+        if migration_workflow is not None:
+            request.migration_workflow = migration_workflow
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.create_migration_workflow,
+            default_timeout=60.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def get_migration_workflow(
+        self,
+        request: migration_service.GetMigrationWorkflowRequest = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationWorkflow:
+        r"""Gets a previously created migration workflow.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.GetMigrationWorkflowRequest`):
+                The request object. A request to get a previously
+                created migration workflow.
+            name (:class:`str`):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow:
+                A migration workflow which specifies
+                what needs to be done for an EDW
+                migration.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.GetMigrationWorkflowRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.get_migration_workflow,
+            default_retry=retries.Retry(
+                initial=1.0,
+                maximum=10.0,
+                multiplier=1.3,
+                predicate=retries.if_exception_type(
+                    core_exceptions.ServiceUnavailable,
+                ),
+                deadline=120.0,
+            ),
+            default_timeout=120.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def list_migration_workflows(
+        self,
+        request: migration_service.ListMigrationWorkflowsRequest = None,
+        *,
+        parent: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> pagers.ListMigrationWorkflowsAsyncPager:
+        r"""Lists previously created migration workflow.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsRequest`):
+                The request object. A request to list previously created
+                migration workflows.
+            parent (:class:`str`):
+                Required. The project and location of the migration
+                workflows to list. Example:
+                ``projects/123/locations/us``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsAsyncPager:
+                Response object for a ListMigrationWorkflows call.
+
+                Iterating over this object will yield results and
+                resolve additional pages automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.ListMigrationWorkflowsRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.list_migration_workflows,
+            default_retry=retries.Retry(
+                initial=1.0,
+                maximum=10.0,
+                multiplier=1.3,
+                predicate=retries.if_exception_type(
+                    core_exceptions.ServiceUnavailable,
+                ),
+                deadline=120.0,
+            ),
+            default_timeout=120.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListMigrationWorkflowsAsyncPager(
+            method=rpc, request=request, response=response, metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    async def delete_migration_workflow(
+        self,
+        request: migration_service.DeleteMigrationWorkflowRequest = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> None:
+        r"""Deletes a migration workflow by name.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.DeleteMigrationWorkflowRequest`):
+                The request object. A request to delete a previously
+                created migration workflow.
+            name (:class:`str`):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.DeleteMigrationWorkflowRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.delete_migration_workflow,
+            default_timeout=60.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,
+        )
+
+    async def start_migration_workflow(
+        self,
+        request: migration_service.StartMigrationWorkflowRequest = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> None:
+        r"""Starts a previously created migration workflow. I.e.,
+        the state transitions from DRAFT to RUNNING. This is a
+        no-op if the state is already RUNNING. An error will be
+        signaled if the state is anything other than DRAFT or
+        RUNNING.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.StartMigrationWorkflowRequest`):
+                The request object. A request to start a previously
+                created migration workflow.
+            name (:class:`str`):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.StartMigrationWorkflowRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.start_migration_workflow,
+            default_retry=retries.Retry(
+                initial=1.0,
+                maximum=10.0,
+                multiplier=1.3,
+                predicate=retries.if_exception_type(
+                    core_exceptions.ServiceUnavailable,
+                ),
+                deadline=120.0,
+            ),
+            default_timeout=120.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        await rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,
+        )
+
+    async def get_migration_subtask(
+        self,
+        request: migration_service.GetMigrationSubtaskRequest = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationSubtask:
+        r"""Gets a previously created migration subtask.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.GetMigrationSubtaskRequest`):
+                The request object. A request to get a previously
+                created migration subtasks.
+            name (:class:`str`):
+                Required. The unique identifier for the migration
+                subtask. Example:
+                ``projects/123/locations/us/workflows/1234/subtasks/543``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationSubtask:
+                A subtask for a migration which
+                carries details about the configuration
+                of the subtask. The content of the
+                details should not matter to the end
+                user, but is a contract between the
+                subtask creator and subtask worker.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.GetMigrationSubtaskRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if name is not None:
+            request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.get_migration_subtask,
+            default_retry=retries.Retry(
+                initial=1.0,
+                maximum=10.0,
+                multiplier=1.3,
+                predicate=retries.if_exception_type(
+                    core_exceptions.ServiceUnavailable,
+                ),
+                deadline=120.0,
+            ),
+            default_timeout=120.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    async def list_migration_subtasks(
+        self,
+        request: migration_service.ListMigrationSubtasksRequest = None,
+        *,
+        parent: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> pagers.ListMigrationSubtasksAsyncPager:
+        r"""Lists previously created migration subtasks.
+
+        Args:
+            request (:class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksRequest`):
+                The request object. A request to list previously created
+                migration subtasks.
+            parent (:class:`str`):
+                Required. The migration task of the subtasks to list.
+                Example: ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksAsyncPager:
+                Response object for a ListMigrationSubtasks call.
+
+                Iterating over this object will yield results and
+                resolve additional pages automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        request = migration_service.ListMigrationSubtasksRequest(request)
+
+        # If we have keyword arguments corresponding to fields on the
+        # request, apply these.
+        if parent is not None:
+            request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = gapic_v1.method_async.wrap_method(
+            self._client._transport.list_migration_subtasks,
+            default_retry=retries.Retry(
+                initial=1.0,
+                maximum=10.0,
+                multiplier=1.3,
+                predicate=retries.if_exception_type(
+                    core_exceptions.ServiceUnavailable,
+                ),
+                deadline=120.0,
+            ),
+            default_timeout=120.0,
+            client_info=DEFAULT_CLIENT_INFO,
+        )
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__aiter__` convenience method.
+        response = pagers.ListMigrationSubtasksAsyncPager(
+            method=rpc, request=request, response=response, metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+
+try:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+        gapic_version=pkg_resources.get_distribution(
+            "google-cloud-bigquery-migration",
+        ).version,
+    )
+except pkg_resources.DistributionNotFound:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("MigrationServiceAsyncClient",)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/client.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/client.py
new file mode 100644
index 000000000000..bd57bfb28263
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/client.py
@@ -0,0 +1,916 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
+import pkg_resources
+
+from google.api_core import client_options as client_options_lib  # type: ignore
+from google.api_core import exceptions as core_exceptions  # type: ignore
+from google.api_core import gapic_v1  # type: ignore
+from google.api_core import retry as retries  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport import mtls  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+from google.auth.exceptions import MutualTLSChannelError  # type: ignore
+from google.oauth2 import service_account  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import pagers
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_error_details
+from google.cloud.bigquery.migration_v2alpha.types import migration_metrics
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.rpc import error_details_pb2  # type: ignore
+from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import MigrationServiceGrpcTransport
+from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport
+
+
+class MigrationServiceClientMeta(type):
+    """Metaclass for the MigrationService client.
+
+    This provides class-level methods for building and retrieving
+    support objects (e.g. transport) without polluting the client instance
+    objects.
+    """
+
+    _transport_registry = (
+        OrderedDict()
+    )  # type: Dict[str, Type[MigrationServiceTransport]]
+    _transport_registry["grpc"] = MigrationServiceGrpcTransport
+    _transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport
+
+    def get_transport_class(cls, label: str = None,) -> Type[MigrationServiceTransport]:
+        """Returns an appropriate transport class.
+
+        Args:
+            label: The name of the desired transport. If none is
+                provided, then the first transport in the registry is used.
+
+        Returns:
+            The transport class to use.
+        """
+        # If a specific transport is requested, return that one.
+        if label:
+            return cls._transport_registry[label]
+
+        # No transport is requested; return the default (that is, the first one
+        # in the dictionary).
+        return next(iter(cls._transport_registry.values()))
+
+
+class MigrationServiceClient(metaclass=MigrationServiceClientMeta):
+    """Service to handle EDW migrations."""
+
+    @staticmethod
+    def _get_default_mtls_endpoint(api_endpoint):
+        """Converts api endpoint to mTLS endpoint.
+
+        Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+        "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+        Args:
+            api_endpoint (Optional[str]): the api endpoint to convert.
+        Returns:
+            str: converted mTLS api endpoint.
+        """
+        if not api_endpoint:
+            return api_endpoint
+
+        mtls_endpoint_re = re.compile(
+            r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
+        )
+
+        m = mtls_endpoint_re.match(api_endpoint)
+        name, mtls, sandbox, googledomain = m.groups()
+        if mtls or not googledomain:
+            return api_endpoint
+
+        if sandbox:
+            return api_endpoint.replace(
+                "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+            )
+
+        return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+    DEFAULT_ENDPOINT = "bigquerymigration.googleapis.com"
+    DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__(  # type: ignore
+        DEFAULT_ENDPOINT
+    )
+
+    @classmethod
+    def from_service_account_info(cls, info: dict, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            info.
+
+        Args:
+            info (dict): The service account private key info.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            MigrationServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_info(info)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    @classmethod
+    def from_service_account_file(cls, filename: str, *args, **kwargs):
+        """Creates an instance of this client using the provided credentials
+            file.
+
+        Args:
+            filename (str): The path to the service account private key json
+                file.
+            args: Additional arguments to pass to the constructor.
+            kwargs: Additional arguments to pass to the constructor.
+
+        Returns:
+            MigrationServiceClient: The constructed client.
+        """
+        credentials = service_account.Credentials.from_service_account_file(filename)
+        kwargs["credentials"] = credentials
+        return cls(*args, **kwargs)
+
+    from_service_account_json = from_service_account_file
+
+    @property
+    def transport(self) -> MigrationServiceTransport:
+        """Returns the transport used by the client instance.
+
+        Returns:
+            MigrationServiceTransport: The transport used by the client
+                instance.
+        """
+        return self._transport
+
+    @staticmethod
+    def migration_subtask_path(
+        project: str, location: str, workflow: str, subtask: str,
+    ) -> str:
+        """Returns a fully-qualified migration_subtask string."""
+        return "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(
+            project=project, location=location, workflow=workflow, subtask=subtask,
+        )
+
+    @staticmethod
+    def parse_migration_subtask_path(path: str) -> Dict[str, str]:
+        """Parses a migration_subtask path into its component segments."""
+        m = re.match(
+            r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/workflows/(?P<workflow>.+?)/subtasks/(?P<subtask>.+?)$",
+            path,
+        )
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def migration_workflow_path(project: str, location: str, workflow: str,) -> str:
+        """Returns a fully-qualified migration_workflow string."""
+        return "projects/{project}/locations/{location}/workflows/{workflow}".format(
+            project=project, location=location, workflow=workflow,
+        )
+
+    @staticmethod
+    def parse_migration_workflow_path(path: str) -> Dict[str, str]:
+        """Parses a migration_workflow path into its component segments."""
+        m = re.match(
+            r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)/workflows/(?P<workflow>.+?)$",
+            path,
+        )
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_billing_account_path(billing_account: str,) -> str:
+        """Returns a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(
+            billing_account=billing_account,
+        )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str,) -> str:
+        """Returns a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder,)
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str, str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str,) -> str:
+        """Returns a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization,)
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str, str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str,) -> str:
+        """Returns a fully-qualified project string."""
+        return "projects/{project}".format(project=project,)
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str, str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str,) -> str:
+        """Returns a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(
+            project=project, location=location,
+        )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str, str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    def __init__(
+        self,
+        *,
+        credentials: Optional[ga_credentials.Credentials] = None,
+        transport: Union[str, MigrationServiceTransport, None] = None,
+        client_options: Optional[client_options_lib.ClientOptions] = None,
+        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+    ) -> None:
+        """Instantiates the migration service client.
+
+        Args:
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            transport (Union[str, MigrationServiceTransport]): The
+                transport to use. If set to None, a transport is chosen
+                automatically.
+            client_options (google.api_core.client_options.ClientOptions): Custom options for the
+                client. It won't take effect if a ``transport`` instance is provided.
+                (1) The ``api_endpoint`` property can be used to override the
+                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+                environment variable can also be used to override the endpoint:
+                "always" (always use the default mTLS endpoint), "never" (always
+                use the default regular endpoint) and "auto" (auto switch to the
+                default mTLS endpoint if client certificate is present, this is
+                the default value). However, the ``api_endpoint`` property takes
+                precedence if provided.
+                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide client certificate for mutual TLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+
+        Raises:
+            google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+                creation failed for any reason.
+        """
+        if isinstance(client_options, dict):
+            client_options = client_options_lib.from_dict(client_options)
+        if client_options is None:
+            client_options = client_options_lib.ClientOptions()
+
+        # Create SSL credentials for mutual TLS if needed.
+        use_client_cert = bool(
+            util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+        )
+
+        client_cert_source_func = None
+        is_mtls = False
+        if use_client_cert:
+            if client_options.client_cert_source:
+                is_mtls = True
+                client_cert_source_func = client_options.client_cert_source
+            else:
+                is_mtls = mtls.has_default_client_cert_source()
+                if is_mtls:
+                    client_cert_source_func = mtls.default_client_cert_source()
+                else:
+                    client_cert_source_func = None
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        else:
+            use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+            if use_mtls_env == "never":
+                api_endpoint = self.DEFAULT_ENDPOINT
+            elif use_mtls_env == "always":
+                api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+            elif use_mtls_env == "auto":
+                if is_mtls:
+                    api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+                else:
+                    api_endpoint = self.DEFAULT_ENDPOINT
+            else:
+                raise MutualTLSChannelError(
+                    "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+                    "values: never, auto, always"
+                )
+
+        # Save or instantiate the transport.
+        # Ordinarily, we provide the transport, but allowing a custom transport
+        # instance provides an extensibility point for unusual situations.
+        if isinstance(transport, MigrationServiceTransport):
+            # transport is a MigrationServiceTransport instance.
+            if credentials or client_options.credentials_file:
+                raise ValueError(
+                    "When providing a transport instance, "
+                    "provide its credentials directly."
+                )
+            if client_options.scopes:
+                raise ValueError(
+                    "When providing a transport instance, provide its scopes "
+                    "directly."
+                )
+            self._transport = transport
+        else:
+            Transport = type(self).get_transport_class(transport)
+            self._transport = Transport(
+                credentials=credentials,
+                credentials_file=client_options.credentials_file,
+                host=api_endpoint,
+                scopes=client_options.scopes,
+                client_cert_source_for_mtls=client_cert_source_func,
+                quota_project_id=client_options.quota_project_id,
+                client_info=client_info,
+                always_use_jwt_access=(
+                    Transport == type(self).get_transport_class("grpc")
+                    or Transport == type(self).get_transport_class("grpc_asyncio")
+                ),
+            )
+
+    def create_migration_workflow(
+        self,
+        request: Union[migration_service.CreateMigrationWorkflowRequest, dict] = None,
+        *,
+        parent: str = None,
+        migration_workflow: migration_entities.MigrationWorkflow = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationWorkflow:
+        r"""Creates a migration workflow.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]):
+                The request object. Request to create a migration
+                workflow resource.
+            parent (str):
+                Required. The name of the project to which this
+                migration workflow belongs. Example:
+                ``projects/foo/locations/bar``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            migration_workflow (google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow):
+                Required. The migration workflow to
+                create.
+
+                This corresponds to the ``migration_workflow`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow:
+                A migration workflow which specifies
+                what needs to be done for an EDW
+                migration.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent, migration_workflow])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.CreateMigrationWorkflowRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.CreateMigrationWorkflowRequest):
+            request = migration_service.CreateMigrationWorkflowRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+            if migration_workflow is not None:
+                request.migration_workflow = migration_workflow
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[
+            self._transport.create_migration_workflow
+        ]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    def get_migration_workflow(
+        self,
+        request: Union[migration_service.GetMigrationWorkflowRequest, dict] = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationWorkflow:
+        r"""Gets a previously created migration workflow.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.GetMigrationWorkflowRequest, dict]):
+                The request object. A request to get a previously
+                created migration workflow.
+            name (str):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow:
+                A migration workflow which specifies
+                what needs to be done for an EDW
+                migration.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.GetMigrationWorkflowRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.GetMigrationWorkflowRequest):
+            request = migration_service.GetMigrationWorkflowRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_migration_workflow]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    def list_migration_workflows(
+        self,
+        request: Union[migration_service.ListMigrationWorkflowsRequest, dict] = None,
+        *,
+        parent: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> pagers.ListMigrationWorkflowsPager:
+        r"""Lists previously created migration workflow.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]):
+                The request object. A request to list previously created
+                migration workflows.
+            parent (str):
+                Required. The project and location of the migration
+                workflows to list. Example:
+                ``projects/123/locations/us``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.services.migration_service.pagers.ListMigrationWorkflowsPager:
+                Response object for a ListMigrationWorkflows call.
+
+                Iterating over this object will yield results and
+                resolve additional pages automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.ListMigrationWorkflowsRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.ListMigrationWorkflowsRequest):
+            request = migration_service.ListMigrationWorkflowsRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_migration_workflows]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListMigrationWorkflowsPager(
+            method=rpc, request=request, response=response, metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+    def delete_migration_workflow(
+        self,
+        request: Union[migration_service.DeleteMigrationWorkflowRequest, dict] = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> None:
+        r"""Deletes a migration workflow by name.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]):
+                The request object. A request to delete a previously
+                created migration workflow.
+            name (str):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.DeleteMigrationWorkflowRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.DeleteMigrationWorkflowRequest):
+            request = migration_service.DeleteMigrationWorkflowRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[
+            self._transport.delete_migration_workflow
+        ]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,
+        )
+
+    def start_migration_workflow(
+        self,
+        request: Union[migration_service.StartMigrationWorkflowRequest, dict] = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> None:
+        r"""Starts a previously created migration workflow. I.e.,
+        the state transitions from DRAFT to RUNNING. This is a
+        no-op if the state is already RUNNING. An error will be
+        signaled if the state is anything other than DRAFT or
+        RUNNING.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.StartMigrationWorkflowRequest, dict]):
+                The request object. A request to start a previously
+                created migration workflow.
+            name (str):
+                Required. The unique identifier for the migration
+                workflow. Example:
+                ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.StartMigrationWorkflowRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.StartMigrationWorkflowRequest):
+            request = migration_service.StartMigrationWorkflowRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.start_migration_workflow]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        rpc(
+            request, retry=retry, timeout=timeout, metadata=metadata,
+        )
+
+    def get_migration_subtask(
+        self,
+        request: Union[migration_service.GetMigrationSubtaskRequest, dict] = None,
+        *,
+        name: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> migration_entities.MigrationSubtask:
+        r"""Gets a previously created migration subtask.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.GetMigrationSubtaskRequest, dict]):
+                The request object. A request to get a previously
+                created migration subtasks.
+            name (str):
+                Required. The unique identifier for the migration
+                subtask. Example:
+                ``projects/123/locations/us/workflows/1234/subtasks/543``
+
+                This corresponds to the ``name`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.types.MigrationSubtask:
+                A subtask for a migration which
+                carries details about the configuration
+                of the subtask. The content of the
+                details should not matter to the end
+                user, but is a contract between the
+                subtask creator and subtask worker.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.GetMigrationSubtaskRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.GetMigrationSubtaskRequest):
+            request = migration_service.GetMigrationSubtaskRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if name is not None:
+                request.name = name
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.get_migration_subtask]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+        )
+
+        # Send the request.
+        response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # Done; return the response.
+        return response
+
+    def list_migration_subtasks(
+        self,
+        request: Union[migration_service.ListMigrationSubtasksRequest, dict] = None,
+        *,
+        parent: str = None,
+        retry: retries.Retry = gapic_v1.method.DEFAULT,
+        timeout: float = None,
+        metadata: Sequence[Tuple[str, str]] = (),
+    ) -> pagers.ListMigrationSubtasksPager:
+        r"""Lists previously created migration subtasks.
+
+        Args:
+            request (Union[google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksRequest, dict]):
+                The request object. A request to list previously created
+                migration subtasks.
+            parent (str):
+                Required. The migration task of the subtasks to list.
+                Example: ``projects/123/locations/us/workflows/1234``
+
+                This corresponds to the ``parent`` field
+                on the ``request`` instance; if ``request`` is provided, this
+                should not be set.
+            retry (google.api_core.retry.Retry): Designation of what errors, if any,
+                should be retried.
+            timeout (float): The timeout for this request.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+
+        Returns:
+            google.cloud.bigquery.migration_v2alpha.services.migration_service.pagers.ListMigrationSubtasksPager:
+                Response object for a ListMigrationSubtasks call.
+
+                Iterating over this object will yield results and
+                resolve additional pages automatically.
+
+        """
+        # Create or coerce a protobuf request object.
+        # Sanity check: If we got a request object, we should *not* have
+        # gotten any keyword arguments that map to the request.
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
+            raise ValueError(
+                "If the `request` argument is set, then none of "
+                "the individual field arguments should be set."
+            )
+
+        # Minor optimization to avoid making a copy if the user passes
+        # in a migration_service.ListMigrationSubtasksRequest.
+        # There's no risk of modifying the input as we've already verified
+        # there are no flattened fields.
+        if not isinstance(request, migration_service.ListMigrationSubtasksRequest):
+            request = migration_service.ListMigrationSubtasksRequest(request)
+            # If we have keyword arguments corresponding to fields on the
+            # request, apply these.
+            if parent is not None:
+                request.parent = parent
+
+        # Wrap the RPC method; this adds retry and timeout information,
+        # and friendly error handling.
+        rpc = self._transport._wrapped_methods[self._transport.list_migration_subtasks]
+
+        # Certain fields should be provided within the metadata header;
+        # add these here.
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+        )
+
+        # Send the request.
+        response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+        # This method is paged; wrap the response in a pager, which provides
+        # an `__iter__` convenience method.
+        response = pagers.ListMigrationSubtasksPager(
+            method=rpc, request=request, response=response, metadata=metadata,
+        )
+
+        # Done; return the response.
+        return response
+
+
+try:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+        gapic_version=pkg_resources.get_distribution(
+            "google-cloud-bigquery-migration",
+        ).version,
+    )
+except pkg_resources.DistributionNotFound:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("MigrationServiceClient",)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/pagers.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/pagers.py
new file mode 100644
index 000000000000..fcfa97daef05
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/pagers.py
@@ -0,0 +1,292 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+    Any,
+    AsyncIterable,
+    Awaitable,
+    Callable,
+    Iterable,
+    Sequence,
+    Tuple,
+    Optional,
+)
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+
+
+class ListMigrationWorkflowsPager:
+    """A pager for iterating through ``list_migration_workflows`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``migration_workflows`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListMigrationWorkflows`` requests and continue to iterate
+    through the ``migration_workflows`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+
+    def __init__(
+        self,
+        method: Callable[..., migration_service.ListMigrationWorkflowsResponse],
+        request: migration_service.ListMigrationWorkflowsRequest,
+        response: migration_service.ListMigrationWorkflowsResponse,
+        *,
+        metadata: Sequence[Tuple[str, str]] = ()
+    ):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsRequest):
+                The initial request object.
+            response (google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse):
+                The initial response object.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = migration_service.ListMigrationWorkflowsRequest(request)
+        self._response = response
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterable[migration_service.ListMigrationWorkflowsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterable[migration_entities.MigrationWorkflow]:
+        for page in self.pages:
+            yield from page.migration_workflows
+
+    def __repr__(self) -> str:
+        return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListMigrationWorkflowsAsyncPager:
+    """A pager for iterating through ``list_migration_workflows`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``migration_workflows`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListMigrationWorkflows`` requests and continue to iterate
+    through the ``migration_workflows`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+
+    def __init__(
+        self,
+        method: Callable[
+            ..., Awaitable[migration_service.ListMigrationWorkflowsResponse]
+        ],
+        request: migration_service.ListMigrationWorkflowsRequest,
+        response: migration_service.ListMigrationWorkflowsResponse,
+        *,
+        metadata: Sequence[Tuple[str, str]] = ()
+    ):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsRequest):
+                The initial request object.
+            response (google.cloud.bigquery.migration_v2alpha.types.ListMigrationWorkflowsResponse):
+                The initial response object.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = migration_service.ListMigrationWorkflowsRequest(request)
+        self._response = response
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(
+        self,
+    ) -> AsyncIterable[migration_service.ListMigrationWorkflowsResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, metadata=self._metadata)
+            yield self._response
+
+    def __aiter__(self) -> AsyncIterable[migration_entities.MigrationWorkflow]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.migration_workflows:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListMigrationSubtasksPager:
+    """A pager for iterating through ``list_migration_subtasks`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse` object, and
+    provides an ``__iter__`` method to iterate through its
+    ``migration_subtasks`` field.
+
+    If there are more pages, the ``__iter__`` method will make additional
+    ``ListMigrationSubtasks`` requests and continue to iterate
+    through the ``migration_subtasks`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+
+    def __init__(
+        self,
+        method: Callable[..., migration_service.ListMigrationSubtasksResponse],
+        request: migration_service.ListMigrationSubtasksRequest,
+        response: migration_service.ListMigrationSubtasksResponse,
+        *,
+        metadata: Sequence[Tuple[str, str]] = ()
+    ):
+        """Instantiate the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksRequest):
+                The initial request object.
+            response (google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse):
+                The initial response object.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = migration_service.ListMigrationSubtasksRequest(request)
+        self._response = response
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    def pages(self) -> Iterable[migration_service.ListMigrationSubtasksResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = self._method(self._request, metadata=self._metadata)
+            yield self._response
+
+    def __iter__(self) -> Iterable[migration_entities.MigrationSubtask]:
+        for page in self.pages:
+            yield from page.migration_subtasks
+
+    def __repr__(self) -> str:
+        return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListMigrationSubtasksAsyncPager:
+    """A pager for iterating through ``list_migration_subtasks`` requests.
+
+    This class thinly wraps an initial
+    :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse` object, and
+    provides an ``__aiter__`` method to iterate through its
+    ``migration_subtasks`` field.
+
+    If there are more pages, the ``__aiter__`` method will make additional
+    ``ListMigrationSubtasks`` requests and continue to iterate
+    through the ``migration_subtasks`` field on the
+    corresponding responses.
+
+    All the usual :class:`google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse`
+    attributes are available on the pager. If multiple requests are made, only
+    the most recent response is retained, and thus used for attribute lookup.
+    """
+
+    def __init__(
+        self,
+        method: Callable[
+            ..., Awaitable[migration_service.ListMigrationSubtasksResponse]
+        ],
+        request: migration_service.ListMigrationSubtasksRequest,
+        response: migration_service.ListMigrationSubtasksResponse,
+        *,
+        metadata: Sequence[Tuple[str, str]] = ()
+    ):
+        """Instantiates the pager.
+
+        Args:
+            method (Callable): The method that was originally called, and
+                which instantiated this pager.
+            request (google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksRequest):
+                The initial request object.
+            response (google.cloud.bigquery.migration_v2alpha.types.ListMigrationSubtasksResponse):
+                The initial response object.
+            metadata (Sequence[Tuple[str, str]]): Strings which should be
+                sent along with the request as metadata.
+        """
+        self._method = method
+        self._request = migration_service.ListMigrationSubtasksRequest(request)
+        self._response = response
+        self._metadata = metadata
+
+    def __getattr__(self, name: str) -> Any:
+        return getattr(self._response, name)
+
+    @property
+    async def pages(
+        self,
+    ) -> AsyncIterable[migration_service.ListMigrationSubtasksResponse]:
+        yield self._response
+        while self._response.next_page_token:
+            self._request.page_token = self._response.next_page_token
+            self._response = await self._method(self._request, metadata=self._metadata)
+            yield self._response
+
+    def __aiter__(self) -> AsyncIterable[migration_entities.MigrationSubtask]:
+        async def async_generator():
+            async for page in self.pages:
+                for response in page.migration_subtasks:
+                    yield response
+
+        return async_generator()
+
+    def __repr__(self) -> str:
+        return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/__init__.py
new file mode 100644
index 000000000000..565048f2a351
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/__init__.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import MigrationServiceTransport
+from .grpc import MigrationServiceGrpcTransport
+from .grpc_asyncio import MigrationServiceGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict()  # type: Dict[str, Type[MigrationServiceTransport]]
+_transport_registry["grpc"] = MigrationServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = MigrationServiceGrpcAsyncIOTransport
+
+__all__ = (
+    "MigrationServiceTransport",
+    "MigrationServiceGrpcTransport",
+    "MigrationServiceGrpcAsyncIOTransport",
+)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/base.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/base.py
new file mode 100644
index 000000000000..6136fafb2843
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/base.py
@@ -0,0 +1,320 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+import packaging.version
+import pkg_resources
+
+import google.auth  # type: ignore
+import google.api_core  # type: ignore
+from google.api_core import exceptions as core_exceptions  # type: ignore
+from google.api_core import gapic_v1  # type: ignore
+from google.api_core import retry as retries  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.oauth2 import service_account  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.protobuf import empty_pb2  # type: ignore
+
+try:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+        gapic_version=pkg_resources.get_distribution(
+            "google-cloud-bigquery-migration",
+        ).version,
+    )
+except pkg_resources.DistributionNotFound:
+    DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+try:
+    # google.auth.__version__ was added in 1.26.0
+    _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+    try:  # try pkg_resources if it is available
+        _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+    except pkg_resources.DistributionNotFound:  # pragma: NO COVER
+        _GOOGLE_AUTH_VERSION = None
+
+
+class MigrationServiceTransport(abc.ABC):
+    """Abstract transport class for MigrationService."""
+
+    AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+
+    DEFAULT_HOST: str = "bigquerymigration.googleapis.com"
+
+    def __init__(
+        self,
+        *,
+        host: str = DEFAULT_HOST,
+        credentials: ga_credentials.Credentials = None,
+        credentials_file: Optional[str] = None,
+        scopes: Optional[Sequence[str]] = None,
+        quota_project_id: Optional[str] = None,
+        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+        always_use_jwt_access: Optional[bool] = False,
+        **kwargs,
+    ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to.
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A list of scopes.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+        """
+        # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+        if ":" not in host:
+            host += ":443"
+        self._host = host
+
+        scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
+
+        # Save the scopes.
+        self._scopes = scopes
+
+        # If no credentials are provided, then determine the appropriate
+        # defaults.
+        if credentials and credentials_file:
+            raise core_exceptions.DuplicateCredentialArgs(
+                "'credentials_file' and 'credentials' are mutually exclusive"
+            )
+
+        if credentials_file is not None:
+            credentials, _ = google.auth.load_credentials_from_file(
+                credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+            )
+
+        elif credentials is None:
+            credentials, _ = google.auth.default(
+                **scopes_kwargs, quota_project_id=quota_project_id
+            )
+
+        # If the credentials are service account credentials, then always try to use self signed JWT.
+        if (
+            always_use_jwt_access
+            and isinstance(credentials, service_account.Credentials)
+            and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+        ):
+            credentials = credentials.with_always_use_jwt_access(True)
+
+        # Save the credentials.
+        self._credentials = credentials
+
+    # TODO(busunkim): This method is in the base transport
+    # to avoid duplicating code across the transport classes. These functions
+    # should be deleted once the minimum required versions of google-auth is increased.
+
+    # TODO: Remove this function once google-auth >= 1.25.0 is required
+    @classmethod
+    def _get_scopes_kwargs(
+        cls, host: str, scopes: Optional[Sequence[str]]
+    ) -> Dict[str, Optional[Sequence[str]]]:
+        """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
+
+        scopes_kwargs = {}
+
+        if _GOOGLE_AUTH_VERSION and (
+            packaging.version.parse(_GOOGLE_AUTH_VERSION)
+            >= packaging.version.parse("1.25.0")
+        ):
+            scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
+        else:
+            scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
+
+        return scopes_kwargs
+
+    def _prep_wrapped_messages(self, client_info):
+        # Precompute the wrapped methods.
+        self._wrapped_methods = {
+            self.create_migration_workflow: gapic_v1.method.wrap_method(
+                self.create_migration_workflow,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.get_migration_workflow: gapic_v1.method.wrap_method(
+                self.get_migration_workflow,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.list_migration_workflows: gapic_v1.method.wrap_method(
+                self.list_migration_workflows,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.delete_migration_workflow: gapic_v1.method.wrap_method(
+                self.delete_migration_workflow,
+                default_timeout=60.0,
+                client_info=client_info,
+            ),
+            self.start_migration_workflow: gapic_v1.method.wrap_method(
+                self.start_migration_workflow,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.get_migration_subtask: gapic_v1.method.wrap_method(
+                self.get_migration_subtask,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+            self.list_migration_subtasks: gapic_v1.method.wrap_method(
+                self.list_migration_subtasks,
+                default_retry=retries.Retry(
+                    initial=1.0,
+                    maximum=10.0,
+                    multiplier=1.3,
+                    predicate=retries.if_exception_type(
+                        core_exceptions.ServiceUnavailable,
+                    ),
+                    deadline=120.0,
+                ),
+                default_timeout=120.0,
+                client_info=client_info,
+            ),
+        }
+
+    @property
+    def create_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.CreateMigrationWorkflowRequest],
+        Union[
+            migration_entities.MigrationWorkflow,
+            Awaitable[migration_entities.MigrationWorkflow],
+        ],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def get_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationWorkflowRequest],
+        Union[
+            migration_entities.MigrationWorkflow,
+            Awaitable[migration_entities.MigrationWorkflow],
+        ],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def list_migration_workflows(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationWorkflowsRequest],
+        Union[
+            migration_service.ListMigrationWorkflowsResponse,
+            Awaitable[migration_service.ListMigrationWorkflowsResponse],
+        ],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def delete_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.DeleteMigrationWorkflowRequest],
+        Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def start_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.StartMigrationWorkflowRequest],
+        Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def get_migration_subtask(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationSubtaskRequest],
+        Union[
+            migration_entities.MigrationSubtask,
+            Awaitable[migration_entities.MigrationSubtask],
+        ],
+    ]:
+        raise NotImplementedError()
+
+    @property
+    def list_migration_subtasks(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationSubtasksRequest],
+        Union[
+            migration_service.ListMigrationSubtasksResponse,
+            Awaitable[migration_service.ListMigrationSubtasksResponse],
+        ],
+    ]:
+        raise NotImplementedError()
+
+
+__all__ = ("MigrationServiceTransport",)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc.py
new file mode 100644
index 000000000000..c8bd5beb52a7
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc.py
@@ -0,0 +1,432 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers  # type: ignore
+from google.api_core import gapic_v1  # type: ignore
+import google.auth  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+
+import grpc  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.protobuf import empty_pb2  # type: ignore
+from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class MigrationServiceGrpcTransport(MigrationServiceTransport):
+    """gRPC backend transport for MigrationService.
+
+    Service to handle EDW migrations.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _stubs: Dict[str, Callable]
+
+    def __init__(
+        self,
+        *,
+        host: str = "bigquerymigration.googleapis.com",
+        credentials: ga_credentials.Credentials = None,
+        credentials_file: str = None,
+        scopes: Sequence[str] = None,
+        channel: grpc.Channel = None,
+        api_mtls_endpoint: str = None,
+        client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+        ssl_channel_credentials: grpc.ChannelCredentials = None,
+        client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+        quota_project_id: Optional[str] = None,
+        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+        always_use_jwt_access: Optional[bool] = False,
+    ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to.
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if ``channel`` is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional(Sequence[str])): A list of scopes. This argument is
+                ignored if ``channel`` is provided.
+            channel (Optional[grpc.Channel]): A ``Channel`` instance through
+                which to make calls.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+          google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if channel:
+            # Ignore credentials if a channel was passed.
+            credentials = False
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+        )
+
+        if not self._grpc_channel:
+            self._grpc_channel = type(self).create_channel(
+                self._host,
+                credentials=self._credentials,
+                credentials_file=credentials_file,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @classmethod
+    def create_channel(
+        cls,
+        host: str = "bigquerymigration.googleapis.com",
+        credentials: ga_credentials.Credentials = None,
+        credentials_file: str = None,
+        scopes: Optional[Sequence[str]] = None,
+        quota_project_id: Optional[str] = None,
+        **kwargs,
+    ) -> grpc.Channel:
+        """Create and return a gRPC channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is mutually exclusive with credentials.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            grpc.Channel: A gRPC channel object.
+
+        Raises:
+            google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+
+        return grpc_helpers.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs,
+        )
+
+    @property
+    def grpc_channel(self) -> grpc.Channel:
+        """Return the channel designed to connect to this service.
+        """
+        return self._grpc_channel
+
+    @property
+    def create_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.CreateMigrationWorkflowRequest],
+        migration_entities.MigrationWorkflow,
+    ]:
+        r"""Return a callable for the create migration workflow method over gRPC.
+
+        Creates a migration workflow.
+
+        Returns:
+            Callable[[~.CreateMigrationWorkflowRequest],
+                    ~.MigrationWorkflow]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "create_migration_workflow" not in self._stubs:
+            self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow",
+                request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize,
+                response_deserializer=migration_entities.MigrationWorkflow.deserialize,
+            )
+        return self._stubs["create_migration_workflow"]
+
+    @property
+    def get_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationWorkflowRequest],
+        migration_entities.MigrationWorkflow,
+    ]:
+        r"""Return a callable for the get migration workflow method over gRPC.
+
+        Gets a previously created migration workflow.
+
+        Returns:
+            Callable[[~.GetMigrationWorkflowRequest],
+                    ~.MigrationWorkflow]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_migration_workflow" not in self._stubs:
+            self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow",
+                request_serializer=migration_service.GetMigrationWorkflowRequest.serialize,
+                response_deserializer=migration_entities.MigrationWorkflow.deserialize,
+            )
+        return self._stubs["get_migration_workflow"]
+
+    @property
+    def list_migration_workflows(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationWorkflowsRequest],
+        migration_service.ListMigrationWorkflowsResponse,
+    ]:
+        r"""Return a callable for the list migration workflows method over gRPC.
+
+        Lists previously created migration workflow.
+
+        Returns:
+            Callable[[~.ListMigrationWorkflowsRequest],
+                    ~.ListMigrationWorkflowsResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "list_migration_workflows" not in self._stubs:
+            self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows",
+                request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize,
+                response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize,
+            )
+        return self._stubs["list_migration_workflows"]
+
+    @property
+    def delete_migration_workflow(
+        self,
+    ) -> Callable[[migration_service.DeleteMigrationWorkflowRequest], empty_pb2.Empty]:
+        r"""Return a callable for the delete migration workflow method over gRPC.
+
+        Deletes a migration workflow by name.
+
+        Returns:
+            Callable[[~.DeleteMigrationWorkflowRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "delete_migration_workflow" not in self._stubs:
+            self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow",
+                request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs["delete_migration_workflow"]
+
+    @property
+    def start_migration_workflow(
+        self,
+    ) -> Callable[[migration_service.StartMigrationWorkflowRequest], empty_pb2.Empty]:
+        r"""Return a callable for the start migration workflow method over gRPC.
+
+        Starts a previously created migration workflow. I.e.,
+        the state transitions from DRAFT to RUNNING. This is a
+        no-op if the state is already RUNNING. An error will be
+        signaled if the state is anything other than DRAFT or
+        RUNNING.
+
+        Returns:
+            Callable[[~.StartMigrationWorkflowRequest],
+                    ~.Empty]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "start_migration_workflow" not in self._stubs:
+            self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow",
+                request_serializer=migration_service.StartMigrationWorkflowRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs["start_migration_workflow"]
+
+    @property
+    def get_migration_subtask(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationSubtaskRequest],
+        migration_entities.MigrationSubtask,
+    ]:
+        r"""Return a callable for the get migration subtask method over gRPC.
+
+        Gets a previously created migration subtask.
+
+        Returns:
+            Callable[[~.GetMigrationSubtaskRequest],
+                    ~.MigrationSubtask]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_migration_subtask" not in self._stubs:
+            self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask",
+                request_serializer=migration_service.GetMigrationSubtaskRequest.serialize,
+                response_deserializer=migration_entities.MigrationSubtask.deserialize,
+            )
+        return self._stubs["get_migration_subtask"]
+
+    @property
+    def list_migration_subtasks(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationSubtasksRequest],
+        migration_service.ListMigrationSubtasksResponse,
+    ]:
+        r"""Return a callable for the list migration subtasks method over gRPC.
+
+        Lists previously created migration subtasks.
+
+        Returns:
+            Callable[[~.ListMigrationSubtasksRequest],
+                    ~.ListMigrationSubtasksResponse]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "list_migration_subtasks" not in self._stubs:
+            self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks",
+                request_serializer=migration_service.ListMigrationSubtasksRequest.serialize,
+                response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize,
+            )
+        return self._stubs["list_migration_subtasks"]
+
+
+__all__ = ("MigrationServiceGrpcTransport",)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
new file mode 100644
index 000000000000..82d69f9ccb2f
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/services/migration_service/transports/grpc_asyncio.py
@@ -0,0 +1,439 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1  # type: ignore
+from google.api_core import grpc_helpers_async  # type: ignore
+from google.auth import credentials as ga_credentials  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
+import packaging.version
+
+import grpc  # type: ignore
+from grpc.experimental import aio  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.protobuf import empty_pb2  # type: ignore
+from .base import MigrationServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import MigrationServiceGrpcTransport
+
+
+class MigrationServiceGrpcAsyncIOTransport(MigrationServiceTransport):
+    """gRPC AsyncIO backend transport for MigrationService.
+
+    Service to handle EDW migrations.
+
+    This class defines the same methods as the primary client, so the
+    primary client can load the underlying transport implementation
+    and call it.
+
+    It sends protocol buffers over the wire using gRPC (which is built on
+    top of HTTP/2); the ``grpcio`` package must be installed.
+    """
+
+    _grpc_channel: aio.Channel
+    _stubs: Dict[str, Callable] = {}
+
+    @classmethod
+    def create_channel(
+        cls,
+        host: str = "bigquerymigration.googleapis.com",
+        credentials: ga_credentials.Credentials = None,
+        credentials_file: Optional[str] = None,
+        scopes: Optional[Sequence[str]] = None,
+        quota_project_id: Optional[str] = None,
+        **kwargs,
+    ) -> aio.Channel:
+        """Create and return a gRPC AsyncIO channel object.
+        Args:
+            host (Optional[str]): The host for the channel to use.
+            credentials (Optional[~.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify this application to the service. If
+                none are specified, the client will attempt to ascertain
+                the credentials from the environment.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            kwargs (Optional[dict]): Keyword arguments, which are passed to the
+                channel creation.
+        Returns:
+            aio.Channel: A gRPC AsyncIO channel object.
+        """
+
+        return grpc_helpers_async.create_channel(
+            host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            quota_project_id=quota_project_id,
+            default_scopes=cls.AUTH_SCOPES,
+            scopes=scopes,
+            default_host=cls.DEFAULT_HOST,
+            **kwargs,
+        )
+
+    def __init__(
+        self,
+        *,
+        host: str = "bigquerymigration.googleapis.com",
+        credentials: ga_credentials.Credentials = None,
+        credentials_file: Optional[str] = None,
+        scopes: Optional[Sequence[str]] = None,
+        channel: aio.Channel = None,
+        api_mtls_endpoint: str = None,
+        client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+        ssl_channel_credentials: grpc.ChannelCredentials = None,
+        client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+        quota_project_id=None,
+        client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+        always_use_jwt_access: Optional[bool] = False,
+    ) -> None:
+        """Instantiate the transport.
+
+        Args:
+            host (Optional[str]):
+                 The hostname to connect to.
+            credentials (Optional[google.auth.credentials.Credentials]): The
+                authorization credentials to attach to requests. These
+                credentials identify the application to the service; if none
+                are specified, the client will attempt to ascertain the
+                credentials from the environment.
+                This argument is ignored if ``channel`` is provided.
+            credentials_file (Optional[str]): A file with credentials that can
+                be loaded with :func:`google.auth.load_credentials_from_file`.
+                This argument is ignored if ``channel`` is provided.
+            scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+                service. These are only used when credentials are not specified and
+                are passed to :func:`google.auth.default`.
+            channel (Optional[aio.Channel]): A ``Channel`` instance through
+                which to make calls.
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
+                a mutual TLS channel with client SSL credentials from
+                ``client_cert_source`` or application default SSL credentials.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for the grpc channel. It is ignored if ``channel`` is provided.
+            client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                A callback to provide client certificate bytes and private key bytes,
+                both in PEM format. It is used to configure a mutual TLS channel. It is
+                ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+            quota_project_id (Optional[str]): An optional project to use for billing
+                and quota.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
+                your own client library.
+            always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+                be used for service account credentials.
+
+        Raises:
+            google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+              creation failed for any reason.
+          google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+              and ``credentials_file`` are passed.
+        """
+        self._grpc_channel = None
+        self._ssl_channel_credentials = ssl_channel_credentials
+        self._stubs: Dict[str, Callable] = {}
+
+        if api_mtls_endpoint:
+            warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+        if client_cert_source:
+            warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+        if channel:
+            # Ignore credentials if a channel was passed.
+            credentials = False
+            # If a channel was explicitly provided, set it.
+            self._grpc_channel = channel
+            self._ssl_channel_credentials = None
+        else:
+            if api_mtls_endpoint:
+                host = api_mtls_endpoint
+
+                # Create SSL credentials with client_cert_source or application
+                # default SSL credentials.
+                if client_cert_source:
+                    cert, key = client_cert_source()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+                else:
+                    self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+            else:
+                if client_cert_source_for_mtls and not ssl_channel_credentials:
+                    cert, key = client_cert_source_for_mtls()
+                    self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+                        certificate_chain=cert, private_key=key
+                    )
+
+        # The base transport sets the host, credentials and scopes
+        super().__init__(
+            host=host,
+            credentials=credentials,
+            credentials_file=credentials_file,
+            scopes=scopes,
+            quota_project_id=quota_project_id,
+            client_info=client_info,
+            always_use_jwt_access=always_use_jwt_access,
+        )
+
+        if not self._grpc_channel:
+            self._grpc_channel = type(self).create_channel(
+                self._host,
+                credentials=self._credentials,
+                credentials_file=credentials_file,
+                scopes=self._scopes,
+                ssl_credentials=self._ssl_channel_credentials,
+                quota_project_id=quota_project_id,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+
+        # Wrap messages. This must be done after self._grpc_channel exists
+        self._prep_wrapped_messages(client_info)
+
+    @property
+    def grpc_channel(self) -> aio.Channel:
+        """Create the channel designed to connect to this service.
+
+        This property caches on the instance; repeated calls return
+        the same channel.
+        """
+        # Return the channel from cache.
+        return self._grpc_channel
+
+    @property
+    def create_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.CreateMigrationWorkflowRequest],
+        Awaitable[migration_entities.MigrationWorkflow],
+    ]:
+        r"""Return a callable for the create migration workflow method over gRPC.
+
+        Creates a migration workflow.
+
+        Returns:
+            Callable[[~.CreateMigrationWorkflowRequest],
+                    Awaitable[~.MigrationWorkflow]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "create_migration_workflow" not in self._stubs:
+            self._stubs["create_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/CreateMigrationWorkflow",
+                request_serializer=migration_service.CreateMigrationWorkflowRequest.serialize,
+                response_deserializer=migration_entities.MigrationWorkflow.deserialize,
+            )
+        return self._stubs["create_migration_workflow"]
+
+    @property
+    def get_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationWorkflowRequest],
+        Awaitable[migration_entities.MigrationWorkflow],
+    ]:
+        r"""Return a callable for the get migration workflow method over gRPC.
+
+        Gets a previously created migration workflow.
+
+        Returns:
+            Callable[[~.GetMigrationWorkflowRequest],
+                    Awaitable[~.MigrationWorkflow]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_migration_workflow" not in self._stubs:
+            self._stubs["get_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationWorkflow",
+                request_serializer=migration_service.GetMigrationWorkflowRequest.serialize,
+                response_deserializer=migration_entities.MigrationWorkflow.deserialize,
+            )
+        return self._stubs["get_migration_workflow"]
+
+    @property
+    def list_migration_workflows(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationWorkflowsRequest],
+        Awaitable[migration_service.ListMigrationWorkflowsResponse],
+    ]:
+        r"""Return a callable for the list migration workflows method over gRPC.
+
+        Lists previously created migration workflow.
+
+        Returns:
+            Callable[[~.ListMigrationWorkflowsRequest],
+                    Awaitable[~.ListMigrationWorkflowsResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "list_migration_workflows" not in self._stubs:
+            self._stubs["list_migration_workflows"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationWorkflows",
+                request_serializer=migration_service.ListMigrationWorkflowsRequest.serialize,
+                response_deserializer=migration_service.ListMigrationWorkflowsResponse.deserialize,
+            )
+        return self._stubs["list_migration_workflows"]
+
+    @property
+    def delete_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.DeleteMigrationWorkflowRequest], Awaitable[empty_pb2.Empty]
+    ]:
+        r"""Return a callable for the delete migration workflow method over gRPC.
+
+        Deletes a migration workflow by name.
+
+        Returns:
+            Callable[[~.DeleteMigrationWorkflowRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "delete_migration_workflow" not in self._stubs:
+            self._stubs["delete_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/DeleteMigrationWorkflow",
+                request_serializer=migration_service.DeleteMigrationWorkflowRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs["delete_migration_workflow"]
+
+    @property
+    def start_migration_workflow(
+        self,
+    ) -> Callable[
+        [migration_service.StartMigrationWorkflowRequest], Awaitable[empty_pb2.Empty]
+    ]:
+        r"""Return a callable for the start migration workflow method over gRPC.
+
+        Starts a previously created migration workflow. I.e.,
+        the state transitions from DRAFT to RUNNING. This is a
+        no-op if the state is already RUNNING. An error will be
+        signaled if the state is anything other than DRAFT or
+        RUNNING.
+
+        Returns:
+            Callable[[~.StartMigrationWorkflowRequest],
+                    Awaitable[~.Empty]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "start_migration_workflow" not in self._stubs:
+            self._stubs["start_migration_workflow"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/StartMigrationWorkflow",
+                request_serializer=migration_service.StartMigrationWorkflowRequest.serialize,
+                response_deserializer=empty_pb2.Empty.FromString,
+            )
+        return self._stubs["start_migration_workflow"]
+
+    @property
+    def get_migration_subtask(
+        self,
+    ) -> Callable[
+        [migration_service.GetMigrationSubtaskRequest],
+        Awaitable[migration_entities.MigrationSubtask],
+    ]:
+        r"""Return a callable for the get migration subtask method over gRPC.
+
+        Gets a previously created migration subtask.
+
+        Returns:
+            Callable[[~.GetMigrationSubtaskRequest],
+                    Awaitable[~.MigrationSubtask]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "get_migration_subtask" not in self._stubs:
+            self._stubs["get_migration_subtask"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/GetMigrationSubtask",
+                request_serializer=migration_service.GetMigrationSubtaskRequest.serialize,
+                response_deserializer=migration_entities.MigrationSubtask.deserialize,
+            )
+        return self._stubs["get_migration_subtask"]
+
+    @property
+    def list_migration_subtasks(
+        self,
+    ) -> Callable[
+        [migration_service.ListMigrationSubtasksRequest],
+        Awaitable[migration_service.ListMigrationSubtasksResponse],
+    ]:
+        r"""Return a callable for the list migration subtasks method over gRPC.
+
+        Lists previously created migration subtasks.
+
+        Returns:
+            Callable[[~.ListMigrationSubtasksRequest],
+                    Awaitable[~.ListMigrationSubtasksResponse]]:
+                A function that, when called, will call the underlying RPC
+                on the server.
+        """
+        # Generate a "stub function" on-the-fly which will actually make
+        # the request.
+        # gRPC handles serialization and deserialization, so we just need
+        # to pass in the functions for each.
+        if "list_migration_subtasks" not in self._stubs:
+            self._stubs["list_migration_subtasks"] = self.grpc_channel.unary_unary(
+                "/google.cloud.bigquery.migration.v2alpha.MigrationService/ListMigrationSubtasks",
+                request_serializer=migration_service.ListMigrationSubtasksRequest.serialize,
+                response_deserializer=migration_service.ListMigrationSubtasksResponse.deserialize,
+            )
+        return self._stubs["list_migration_subtasks"]
+
+
+__all__ = ("MigrationServiceGrpcAsyncIOTransport",)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/__init__.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/__init__.py
new file mode 100644
index 000000000000..8a7d29f753d8
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/__init__.py
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .migration_entities import (
+    MigrationSubtask,
+    MigrationTask,
+    MigrationWorkflow,
+)
+from .migration_error_details import (
+    ErrorDetail,
+    ErrorLocation,
+    ResourceErrorDetail,
+)
+from .migration_metrics import (
+    Point,
+    TimeInterval,
+    TimeSeries,
+    TypedValue,
+)
+from .migration_service import (
+    CreateMigrationWorkflowRequest,
+    DeleteMigrationWorkflowRequest,
+    GetMigrationSubtaskRequest,
+    GetMigrationWorkflowRequest,
+    ListMigrationSubtasksRequest,
+    ListMigrationSubtasksResponse,
+    ListMigrationWorkflowsRequest,
+    ListMigrationWorkflowsResponse,
+    StartMigrationWorkflowRequest,
+)
+
+__all__ = (
+    "MigrationSubtask",
+    "MigrationTask",
+    "MigrationWorkflow",
+    "ErrorDetail",
+    "ErrorLocation",
+    "ResourceErrorDetail",
+    "Point",
+    "TimeInterval",
+    "TimeSeries",
+    "TypedValue",
+    "CreateMigrationWorkflowRequest",
+    "DeleteMigrationWorkflowRequest",
+    "GetMigrationSubtaskRequest",
+    "GetMigrationWorkflowRequest",
+    "ListMigrationSubtasksRequest",
+    "ListMigrationSubtasksResponse",
+    "ListMigrationWorkflowsRequest",
+    "ListMigrationWorkflowsResponse",
+    "StartMigrationWorkflowRequest",
+)
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_entities.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_entities.py
new file mode 100644
index 000000000000..0ce56d4cce3d
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_entities.py
@@ -0,0 +1,205 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_error_details
+from google.cloud.bigquery.migration_v2alpha.types import migration_metrics
+from google.protobuf import any_pb2  # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.rpc import error_details_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package="google.cloud.bigquery.migration.v2alpha",
+    manifest={"MigrationWorkflow", "MigrationTask", "MigrationSubtask",},
+)
+
+
+class MigrationWorkflow(proto.Message):
+    r"""A migration workflow which specifies what needs to be done
+    for an EDW migration.
+
+    Attributes:
+        name (str):
+            Output only. Immutable. The unique identifier for the
+            migration workflow. The ID is server-generated.
+
+            Example: ``projects/123/locations/us/workflows/345``
+        display_name (str):
+            The display name of the workflow. This can be
+            set to give a workflow a descriptive name. There
+            is no guarantee or enforcement of uniqueness.
+        tasks (Sequence[google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow.TasksEntry]):
+            The tasks in a workflow in a named map. The
+            name (i.e. key) has no meaning and is merely a
+            convenient way to address a specific task in a
+            workflow.
+        state (google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow.State):
+            Output only. That status of the workflow.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the workflow was created.
+        last_update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the workflow was last updated.
+    """
+
+    class State(proto.Enum):
+        r"""Possible migration workflow states."""
+        STATE_UNSPECIFIED = 0
+        DRAFT = 1
+        RUNNING = 2
+        PAUSED = 3
+        COMPLETED = 4
+
+    name = proto.Field(proto.STRING, number=1,)
+    display_name = proto.Field(proto.STRING, number=6,)
+    tasks = proto.MapField(
+        proto.STRING, proto.MESSAGE, number=2, message="MigrationTask",
+    )
+    state = proto.Field(proto.ENUM, number=3, enum=State,)
+    create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,)
+    last_update_time = proto.Field(
+        proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,
+    )
+
+
+class MigrationTask(proto.Message):
+    r"""A single task for a migration which has details about the
+    configuration of the task.
+
+    Attributes:
+        id (str):
+            Output only. Immutable. The unique identifier
+            for the migration task. The ID is server-
+            generated.
+        type_ (str):
+            The type of the task. This must be a
+            supported task type.
+        details (google.protobuf.any_pb2.Any):
+            The details of the task. The type URL must be
+            one of the supported task details messages and
+            correspond to the Task's type.
+        state (google.cloud.bigquery.migration_v2alpha.types.MigrationTask.State):
+            Output only. The current state of the task.
+        processing_error (google.rpc.error_details_pb2.ErrorInfo):
+            Output only. An explanation that may be
+            populated when the task is in FAILED state.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the task was created.
+        last_update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the task was last updated.
+    """
+
+    class State(proto.Enum):
+        r"""Possible states of a migration task."""
+        STATE_UNSPECIFIED = 0
+        PENDING = 1
+        ORCHESTRATING = 2
+        RUNNING = 3
+        PAUSED = 4
+        SUCCEEDED = 5
+        FAILED = 6
+
+    id = proto.Field(proto.STRING, number=1,)
+    type_ = proto.Field(proto.STRING, number=2,)
+    details = proto.Field(proto.MESSAGE, number=3, message=any_pb2.Any,)
+    state = proto.Field(proto.ENUM, number=4, enum=State,)
+    processing_error = proto.Field(
+        proto.MESSAGE, number=5, message=error_details_pb2.ErrorInfo,
+    )
+    create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,)
+    last_update_time = proto.Field(
+        proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,
+    )
+
+
+class MigrationSubtask(proto.Message):
+    r"""A subtask for a migration which carries details about the
+    configuration of the subtask. The content of the details should
+    not matter to the end user, but is a contract between the
+    subtask creator and subtask worker.
+
+    Attributes:
+        name (str):
+            Output only. Immutable. The resource name for the migration
+            subtask. The ID is server-generated.
+
+            Example:
+            ``projects/123/locations/us/workflows/345/subtasks/678``
+        task_id (str):
+            The unique ID of the task to which this
+            subtask belongs.
+        type_ (str):
+            The type of the Subtask. The migration
+            service does not check whether this is a known
+            type. It is up to the task creator (i.e.
+            orchestrator or worker) to ensure it only
+            creates subtasks for which there are compatible
+            workers polling for Subtasks.
+        state (google.cloud.bigquery.migration_v2alpha.types.MigrationSubtask.State):
+            Output only. The current state of the
+            subtask.
+        processing_error (google.rpc.error_details_pb2.ErrorInfo):
+            Output only. An explanation that may be
+            populated when the task is in FAILED state.
+        resource_error_details (Sequence[google.cloud.bigquery.migration_v2alpha.types.ResourceErrorDetail]):
+            Output only. Provides details to errors and
+            issues encountered while processing the subtask.
+            Presence of error details does not mean that the
+            subtask failed.
+        resource_error_count (int):
+            The number or resources with errors. Note: This is not the
+            total number of errors as each resource can have more than
+            one error. This is used to indicate truncation by having a
+            ``resource_error_count`` that is higher than the size of
+            ``resource_error_details``.
+        create_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the subtask was created.
+        last_update_time (google.protobuf.timestamp_pb2.Timestamp):
+            Time when the subtask was last updated.
+        metrics (Sequence[google.cloud.bigquery.migration_v2alpha.types.TimeSeries]):
+            The metrics for the subtask.
+    """
+
+    class State(proto.Enum):
+        r"""Possible states of a migration subtask."""
+        STATE_UNSPECIFIED = 0
+        ACTIVE = 1
+        RUNNING = 2
+        SUCCEEDED = 3
+        FAILED = 4
+        PAUSED = 5
+
+    name = proto.Field(proto.STRING, number=1,)
+    task_id = proto.Field(proto.STRING, number=2,)
+    type_ = proto.Field(proto.STRING, number=3,)
+    state = proto.Field(proto.ENUM, number=5, enum=State,)
+    processing_error = proto.Field(
+        proto.MESSAGE, number=6, message=error_details_pb2.ErrorInfo,
+    )
+    resource_error_details = proto.RepeatedField(
+        proto.MESSAGE, number=12, message=migration_error_details.ResourceErrorDetail,
+    )
+    resource_error_count = proto.Field(proto.INT32, number=13,)
+    create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,)
+    last_update_time = proto.Field(
+        proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp,
+    )
+    metrics = proto.RepeatedField(
+        proto.MESSAGE, number=11, message=migration_metrics.TimeSeries,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_error_details.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_error_details.py
new file mode 100644
index 000000000000..c7d99e19d811
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_error_details.py
@@ -0,0 +1,85 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+from google.rpc import error_details_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package="google.cloud.bigquery.migration.v2alpha",
+    manifest={"ResourceErrorDetail", "ErrorDetail", "ErrorLocation",},
+)
+
+
+class ResourceErrorDetail(proto.Message):
+    r"""Provides details for errors and the corresponding resources.
+    Attributes:
+        resource_info (google.rpc.error_details_pb2.ResourceInfo):
+            Required. Information about the resource
+            where the error is located.
+        error_details (Sequence[google.cloud.bigquery.migration_v2alpha.types.ErrorDetail]):
+            Required. The error details for the resource.
+        error_count (int):
+            Required. How many errors there are in total for the
+            resource. Truncation can be indicated by having an
+            ``error_count`` that is higher than the size of
+            ``error_details``.
+    """
+
+    resource_info = proto.Field(
+        proto.MESSAGE, number=1, message=error_details_pb2.ResourceInfo,
+    )
+    error_details = proto.RepeatedField(proto.MESSAGE, number=2, message="ErrorDetail",)
+    error_count = proto.Field(proto.INT32, number=3,)
+
+
+class ErrorDetail(proto.Message):
+    r"""Provides details for errors, e.g. issues that where
+    encountered when processing a subtask.
+
+    Attributes:
+        location (google.cloud.bigquery.migration_v2alpha.types.ErrorLocation):
+            Optional. The exact location within the
+            resource (if applicable).
+        error_info (google.rpc.error_details_pb2.ErrorInfo):
+            Required. Describes the cause of the error
+            with structured detail.
+    """
+
+    location = proto.Field(proto.MESSAGE, number=1, message="ErrorLocation",)
+    error_info = proto.Field(
+        proto.MESSAGE, number=2, message=error_details_pb2.ErrorInfo,
+    )
+
+
+class ErrorLocation(proto.Message):
+    r"""Holds information about where the error is located.
+    Attributes:
+        line (int):
+            Optional. If applicable, denotes the line
+            where the error occurred. A zero value means
+            that there is no line information.
+        column (int):
+            Optional. If applicable, denotes the column
+            where the error occurred. A zero value means
+            that there is no columns information.
+    """
+
+    line = proto.Field(proto.INT32, number=1,)
+    column = proto.Field(proto.INT32, number=2,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_metrics.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_metrics.py
new file mode 100644
index 000000000000..30851946924e
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_metrics.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+from google.api import distribution_pb2  # type: ignore
+from google.api import metric_pb2  # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package="google.cloud.bigquery.migration.v2alpha",
+    manifest={"TimeSeries", "Point", "TimeInterval", "TypedValue",},
+)
+
+
+class TimeSeries(proto.Message):
+    r"""The metrics object for a SubTask.
+    Attributes:
+        metric (str):
+            Required. The name of the metric.
+            If the metric is not known by the service yet,
+            it will be auto-created.
+        value_type (google.api.metric_pb2.ValueType):
+            Required. The value type of the time series.
+        metric_kind (google.api.metric_pb2.MetricKind):
+            Optional. The metric kind of the time series.
+
+            If present, it must be the same as the metric kind of the
+            associated metric. If the associated metric's descriptor
+            must be auto-created, then this field specifies the metric
+            kind of the new descriptor and must be either ``GAUGE`` (the
+            default) or ``CUMULATIVE``.
+        points (Sequence[google.cloud.bigquery.migration_v2alpha.types.Point]):
+            Required. The data points of this time series. When listing
+            time series, points are returned in reverse time order.
+
+            When creating a time series, this field must contain exactly
+            one point and the point's type must be the same as the value
+            type of the associated metric. If the associated metric's
+            descriptor must be auto-created, then the value type of the
+            descriptor is determined by the point's type, which must be
+            ``BOOL``, ``INT64``, ``DOUBLE``, or ``DISTRIBUTION``.
+    """
+
+    metric = proto.Field(proto.STRING, number=1,)
+    value_type = proto.Field(
+        proto.ENUM, number=2, enum=metric_pb2.MetricDescriptor.ValueType,
+    )
+    metric_kind = proto.Field(
+        proto.ENUM, number=3, enum=metric_pb2.MetricDescriptor.MetricKind,
+    )
+    points = proto.RepeatedField(proto.MESSAGE, number=4, message="Point",)
+
+
+class Point(proto.Message):
+    r"""A single data point in a time series.
+    Attributes:
+        interval (google.cloud.bigquery.migration_v2alpha.types.TimeInterval):
+            The time interval to which the data point applies. For
+            ``GAUGE`` metrics, the start time does not need to be
+            supplied, but if it is supplied, it must equal the end time.
+            For ``DELTA`` metrics, the start and end time should specify
+            a non-zero interval, with subsequent points specifying
+            contiguous and non-overlapping intervals. For ``CUMULATIVE``
+            metrics, the start and end time should specify a non-zero
+            interval, with subsequent points specifying the same start
+            time and increasing end times, until an event resets the
+            cumulative value to zero and sets a new start time for the
+            following points.
+        value (google.cloud.bigquery.migration_v2alpha.types.TypedValue):
+            The value of the data point.
+    """
+
+    interval = proto.Field(proto.MESSAGE, number=1, message="TimeInterval",)
+    value = proto.Field(proto.MESSAGE, number=2, message="TypedValue",)
+
+
+class TimeInterval(proto.Message):
+    r"""A time interval extending just after a start time through an
+    end time. If the start time is the same as the end time, then
+    the interval represents a single point in time.
+
+    Attributes:
+        start_time (google.protobuf.timestamp_pb2.Timestamp):
+            Optional. The beginning of the time interval.
+            The default value for the start time is the end
+            time. The start time must not be later than the
+            end time.
+        end_time (google.protobuf.timestamp_pb2.Timestamp):
+            Required. The end of the time interval.
+    """
+
+    start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
+    end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
+
+
+class TypedValue(proto.Message):
+    r"""A single strongly-typed value.
+    Attributes:
+        bool_value (bool):
+            A Boolean value: ``true`` or ``false``.
+        int64_value (int):
+            A 64-bit integer. Its range is approximately
+            +/-9.2x10^18.
+        double_value (float):
+            A 64-bit double-precision floating-point
+            number. Its magnitude is approximately
+            +/-10^(+/-300) and it has 16 significant digits
+            of precision.
+        string_value (str):
+            A variable-length string value.
+        distribution_value (google.api.distribution_pb2.Distribution):
+            A distribution value.
+    """
+
+    bool_value = proto.Field(proto.BOOL, number=1, oneof="value",)
+    int64_value = proto.Field(proto.INT64, number=2, oneof="value",)
+    double_value = proto.Field(proto.DOUBLE, number=3, oneof="value",)
+    string_value = proto.Field(proto.STRING, number=4, oneof="value",)
+    distribution_value = proto.Field(
+        proto.MESSAGE, number=5, oneof="value", message=distribution_pb2.Distribution,
+    )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_service.py b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_service.py
new file mode 100644
index 000000000000..531063bf4ab2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/google/cloud/bigquery/migration_v2alpha/types/migration_service.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto  # type: ignore
+
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.protobuf import field_mask_pb2  # type: ignore
+
+
+__protobuf__ = proto.module(
+    package="google.cloud.bigquery.migration.v2alpha",
+    manifest={
+        "CreateMigrationWorkflowRequest",
+        "GetMigrationWorkflowRequest",
+        "ListMigrationWorkflowsRequest",
+        "ListMigrationWorkflowsResponse",
+        "DeleteMigrationWorkflowRequest",
+        "StartMigrationWorkflowRequest",
+        "GetMigrationSubtaskRequest",
+        "ListMigrationSubtasksRequest",
+        "ListMigrationSubtasksResponse",
+    },
+)
+
+
+class CreateMigrationWorkflowRequest(proto.Message):
+    r"""Request to create a migration workflow resource.
+    Attributes:
+        parent (str):
+            Required. The name of the project to which this migration
+            workflow belongs. Example: ``projects/foo/locations/bar``
+        migration_workflow (google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow):
+            Required. The migration workflow to create.
+    """
+
+    parent = proto.Field(proto.STRING, number=1,)
+    migration_workflow = proto.Field(
+        proto.MESSAGE, number=2, message=migration_entities.MigrationWorkflow,
+    )
+
+
+class GetMigrationWorkflowRequest(proto.Message):
+    r"""A request to get a previously created migration workflow.
+    Attributes:
+        name (str):
+            Required. The unique identifier for the migration workflow.
+            Example: ``projects/123/locations/us/workflows/1234``
+        read_mask (google.protobuf.field_mask_pb2.FieldMask):
+            The list of fields to be retrieved.
+    """
+
+    name = proto.Field(proto.STRING, number=1,)
+    read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,)
+
+
+class ListMigrationWorkflowsRequest(proto.Message):
+    r"""A request to list previously created migration workflows.
+    Attributes:
+        parent (str):
+            Required. The project and location of the migration
+            workflows to list. Example: ``projects/123/locations/us``
+        read_mask (google.protobuf.field_mask_pb2.FieldMask):
+            The list of fields to be retrieved.
+        page_size (int):
+            The maximum number of migration workflows to
+            return. The service may return fewer than this
+            number.
+        page_token (str):
+            A page token, received from previous
+            ``ListMigrationWorkflows`` call. Provide this to retrieve
+            the subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListMigrationWorkflows`` must match the call that provided
+            the page token.
+    """
+
+    parent = proto.Field(proto.STRING, number=1,)
+    read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,)
+    page_size = proto.Field(proto.INT32, number=3,)
+    page_token = proto.Field(proto.STRING, number=4,)
+
+
+class ListMigrationWorkflowsResponse(proto.Message):
+    r"""Response object for a ``ListMigrationWorkflows`` call.
+    Attributes:
+        migration_workflows (Sequence[google.cloud.bigquery.migration_v2alpha.types.MigrationWorkflow]):
+            The migration workflows for the specified
+            project / location.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    migration_workflows = proto.RepeatedField(
+        proto.MESSAGE, number=1, message=migration_entities.MigrationWorkflow,
+    )
+    next_page_token = proto.Field(proto.STRING, number=2,)
+
+
+class DeleteMigrationWorkflowRequest(proto.Message):
+    r"""A request to delete a previously created migration workflow.
+    Attributes:
+        name (str):
+            Required. The unique identifier for the migration workflow.
+            Example: ``projects/123/locations/us/workflows/1234``
+    """
+
+    name = proto.Field(proto.STRING, number=1,)
+
+
+class StartMigrationWorkflowRequest(proto.Message):
+    r"""A request to start a previously created migration workflow.
+    Attributes:
+        name (str):
+            Required. The unique identifier for the migration workflow.
+            Example: ``projects/123/locations/us/workflows/1234``
+    """
+
+    name = proto.Field(proto.STRING, number=1,)
+
+
+class GetMigrationSubtaskRequest(proto.Message):
+    r"""A request to get a previously created migration subtasks.
+    Attributes:
+        name (str):
+            Required. The unique identifier for the migration subtask.
+            Example:
+            ``projects/123/locations/us/workflows/1234/subtasks/543``
+        read_mask (google.protobuf.field_mask_pb2.FieldMask):
+            Optional. The list of fields to be retrieved.
+    """
+
+    name = proto.Field(proto.STRING, number=1,)
+    read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,)
+
+
+class ListMigrationSubtasksRequest(proto.Message):
+    r"""A request to list previously created migration subtasks.
+    Attributes:
+        parent (str):
+            Required. The migration task of the subtasks to list.
+            Example: ``projects/123/locations/us/workflows/1234``
+        read_mask (google.protobuf.field_mask_pb2.FieldMask):
+            Optional. The list of fields to be retrieved.
+        page_size (int):
+            Optional. The maximum number of migration
+            tasks to return. The service may return fewer
+            than this number.
+        page_token (str):
+            Optional. A page token, received from previous
+            ``ListMigrationSubtasks`` call. Provide this to retrieve the
+            subsequent page.
+
+            When paginating, all other parameters provided to
+            ``ListMigrationSubtasks`` must match the call that provided
+            the page token.
+        filter (str):
+            Optional. The filter to apply. This can be used to get the
+            subtasks of a specific tasks in a workflow, e.g.
+            ``migration_task = "ab012"`` where ``"ab012"`` is the task
+            ID (not the name in the named map).
+    """
+
+    parent = proto.Field(proto.STRING, number=1,)
+    read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,)
+    page_size = proto.Field(proto.INT32, number=3,)
+    page_token = proto.Field(proto.STRING, number=4,)
+    filter = proto.Field(proto.STRING, number=5,)
+
+
+class ListMigrationSubtasksResponse(proto.Message):
+    r"""Response object for a ``ListMigrationSubtasks`` call.
+    Attributes:
+        migration_subtasks (Sequence[google.cloud.bigquery.migration_v2alpha.types.MigrationSubtask]):
+            The migration subtasks for the specified
+            task.
+        next_page_token (str):
+            A token, which can be sent as ``page_token`` to retrieve the
+            next page. If this field is omitted, there are no subsequent
+            pages.
+    """
+
+    @property
+    def raw_page(self):
+        return self
+
+    migration_subtasks = proto.RepeatedField(
+        proto.MESSAGE, number=1, message=migration_entities.MigrationSubtask,
+    )
+    next_page_token = proto.Field(proto.STRING, number=2,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/packages/google-cloud-bigquery-migration/mypy.ini b/packages/google-cloud-bigquery-migration/mypy.ini
new file mode 100644
index 000000000000..4505b485436b
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/mypy.ini
@@ -0,0 +1,3 @@
+[mypy]
+python_version = 3.6
+namespace_packages = True
diff --git a/packages/google-cloud-bigquery-migration/noxfile.py b/packages/google-cloud-bigquery-migration/noxfile.py
new file mode 100644
index 000000000000..935a92413a90
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/noxfile.py
@@ -0,0 +1,237 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+
+from __future__ import absolute_import
+import os
+import pathlib
+import shutil
+
+import nox
+
+
+BLACK_VERSION = "black==19.10b0"
+BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
+
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+    "unit",
+    "system",
+    "cover",
+    "lint",
+    "lint_setup_py",
+    "blacken",
+    "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint(session):
+    """Run linters.
+
+    Returns a failure if the linters find linting errors or sufficiently
+    serious code quality issues.
+    """
+    session.install("flake8", BLACK_VERSION)
+    session.run(
+        "black", "--check", *BLACK_PATHS,
+    )
+    session.run("flake8", "google", "tests")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def blacken(session):
+    """Run black. Format code to uniform standard."""
+    session.install(BLACK_VERSION)
+    session.run(
+        "black", *BLACK_PATHS,
+    )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint_setup_py(session):
+    """Verify that setup.py is valid (including RST check)."""
+    session.install("docutils", "pygments")
+    session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
+
+
+def default(session):
+    # Install all test dependencies, then install this package in-place.
+
+    constraints_path = str(
+        CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+    )
+    session.install(
+        "mock",
+        "asyncmock",
+        "pytest",
+        "pytest-cov",
+        "pytest-asyncio",
+        "-c",
+        constraints_path,
+    )
+
+    session.install("-e", ".", "-c", constraints_path)
+
+    # Run py.test against the unit tests.
+    session.run(
+        "py.test",
+        "--quiet",
+        f"--junitxml=unit_{session.python}_sponge_log.xml",
+        "--cov=google/cloud",
+        "--cov=tests/unit",
+        "--cov-append",
+        "--cov-config=.coveragerc",
+        "--cov-report=",
+        "--cov-fail-under=0",
+        os.path.join("tests", "unit"),
+        *session.posargs,
+    )
+
+
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
+def unit(session):
+    """Run the unit test suite."""
+    default(session)
+
+
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
+def system(session):
+    """Run the system test suite."""
+    constraints_path = str(
+        CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+    )
+    system_test_path = os.path.join("tests", "system.py")
+    system_test_folder_path = os.path.join("tests", "system")
+
+    # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+    if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+        session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
+    # Install pyopenssl for mTLS testing.
+    if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+        session.install("pyopenssl")
+
+    system_test_exists = os.path.exists(system_test_path)
+    system_test_folder_exists = os.path.exists(system_test_folder_path)
+    # Sanity check: only run tests if found.
+    if not system_test_exists and not system_test_folder_exists:
+        session.skip("System tests were not found")
+
+    # Use pre-release gRPC for system tests.
+    session.install("--pre", "grpcio")
+
+    # Install all test dependencies, then install this package into the
+    # virtualenv's dist-packages.
+    session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+    session.install("-e", ".", "-c", constraints_path)
+
+    # Run py.test against the system tests.
+    if system_test_exists:
+        session.run(
+            "py.test",
+            "--quiet",
+            f"--junitxml=system_{session.python}_sponge_log.xml",
+            system_test_path,
+            *session.posargs,
+        )
+    if system_test_folder_exists:
+        session.run(
+            "py.test",
+            "--quiet",
+            f"--junitxml=system_{session.python}_sponge_log.xml",
+            system_test_folder_path,
+            *session.posargs,
+        )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def cover(session):
+    """Run the final coverage report.
+
+    This outputs the coverage report aggregating coverage from the unit
+    test runs (not system test runs), and then erases coverage data.
+    """
+    session.install("coverage", "pytest-cov")
+    session.run("coverage", "report", "--show-missing", "--fail-under=100")
+
+    session.run("coverage", "erase")
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docs(session):
+    """Build the docs for this library."""
+
+    session.install("-e", ".")
+    session.install("sphinx==4.0.1", "alabaster", "recommonmark")
+
+    shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+    session.run(
+        "sphinx-build",
+        "-W",  # warnings as errors
+        "-T",  # show full traceback on exception
+        "-N",  # no colors
+        "-b",
+        "html",
+        "-d",
+        os.path.join("docs", "_build", "doctrees", ""),
+        os.path.join("docs", ""),
+        os.path.join("docs", "_build", "html", ""),
+    )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+    """Build the docfx yaml files for this library."""
+
+    session.install("-e", ".")
+    session.install(
+        "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+    )
+
+    shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+    session.run(
+        "sphinx-build",
+        "-T",  # show full traceback on exception
+        "-N",  # no colors
+        "-D",
+        (
+            "extensions=sphinx.ext.autodoc,"
+            "sphinx.ext.autosummary,"
+            "docfx_yaml.extension,"
+            "sphinx.ext.intersphinx,"
+            "sphinx.ext.coverage,"
+            "sphinx.ext.napoleon,"
+            "sphinx.ext.todo,"
+            "sphinx.ext.viewcode,"
+            "recommonmark"
+        ),
+        "-b",
+        "html",
+        "-d",
+        os.path.join("docs", "_build", "doctrees", ""),
+        os.path.join("docs", ""),
+        os.path.join("docs", "_build", "html", ""),
+    )
diff --git a/packages/google-cloud-bigquery-migration/owlbot.py b/packages/google-cloud-bigquery-migration/owlbot.py
new file mode 100644
index 000000000000..0a0f83991638
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/owlbot.py
@@ -0,0 +1,42 @@
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import synthtool as s
+import synthtool.gcp as gcp
+from synthtool.languages import python
+
+# ----------------------------------------------------------------------------
+# Copy the generated client from the owl-bot staging directory
+# ----------------------------------------------------------------------------
+
+default_version = "v1"
+
+for library in s.get_staging_dirs(default_version):
+    s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"])
+s.remove_staging_dirs()
+
+# ----------------------------------------------------------------------------
+# Add templated files
+# ----------------------------------------------------------------------------
+
+templated_files = gcp.CommonTemplates().py_library(microgenerator=True)
+s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
+
+python.py_samples(skip_readmes=True)
+
+# ----------------------------------------------------------------------------
+# Run blacken session
+# ----------------------------------------------------------------------------
+
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/renovate.json b/packages/google-cloud-bigquery-migration/renovate.json
new file mode 100644
index 000000000000..c21036d385e5
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/renovate.json
@@ -0,0 +1,12 @@
+{
+  "extends": [
+    "config:base",
+    "group:all",
+    ":preserveSemverRanges",
+    ":disableDependencyDashboard"
+  ],
+  "ignorePaths": [".pre-commit-config.yaml"],
+  "pip_requirements": {
+    "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
+  }
+}
diff --git a/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh b/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh
new file mode 100755
index 000000000000..21f6d2a26d90
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/decrypt-secrets.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+       [[ -f "testing/service-account.json" ]] || \
+       [[ -f "testing/client-secrets.json" ]]; then
+    echo "One or more target files exist, aborting."
+    exit 1
+fi
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+       --project="${PROJECT_ID}" \
+       > testing/test-env.sh
+gcloud secrets versions access latest \
+       --secret="python-docs-samples-service-account" \
+       --project="${PROJECT_ID}" \
+       > testing/service-account.json
+gcloud secrets versions access latest \
+       --secret="python-docs-samples-client-secrets" \
+       --project="${PROJECT_ID}" \
+       > testing/client-secrets.json
diff --git a/packages/google-cloud-bigquery-migration/scripts/fixup_migration_v2alpha_keywords.py b/packages/google-cloud-bigquery-migration/scripts/fixup_migration_v2alpha_keywords.py
new file mode 100644
index 000000000000..6ff8cc7ab7a2
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/fixup_migration_v2alpha_keywords.py
@@ -0,0 +1,182 @@
+#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+    predicate: Callable[[Any], bool],
+    iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+    """A stable, out-of-place partition."""
+    results = ([], [])
+
+    for i in iterator:
+        results[int(predicate(i))].append(i)
+
+    # Returns trueList, falseList
+    return results[1], results[0]
+
+
+class migrationCallTransformer(cst.CSTTransformer):
+    CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+    METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+        'create_migration_workflow': ('parent', 'migration_workflow', ),
+        'delete_migration_workflow': ('name', ),
+        'get_migration_subtask': ('name', 'read_mask', ),
+        'get_migration_workflow': ('name', 'read_mask', ),
+        'list_migration_subtasks': ('parent', 'read_mask', 'page_size', 'page_token', 'filter', ),
+        'list_migration_workflows': ('parent', 'read_mask', 'page_size', 'page_token', ),
+        'start_migration_workflow': ('name', ),
+    }
+
+    def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+        try:
+            key = original.func.attr.value
+            kword_params = self.METHOD_TO_PARAMS[key]
+        except (AttributeError, KeyError):
+            # Either not a method from the API or too convoluted to be sure.
+            return updated
+
+        # If the existing code is valid, keyword args come after positional args.
+        # Therefore, all positional args must map to the first parameters.
+        args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+        if any(k.keyword.value == "request" for k in kwargs):
+            # We've already fixed this file, don't fix it again.
+            return updated
+
+        kwargs, ctrl_kwargs = partition(
+            lambda a: a.keyword.value not in self.CTRL_PARAMS,
+            kwargs
+        )
+
+        args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+        ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+                           for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+        request_arg = cst.Arg(
+            value=cst.Dict([
+                cst.DictElement(
+                    cst.SimpleString("'{}'".format(name)),
+cst.Element(value=arg.value)
+                )
+                # Note: the args + kwargs looks silly, but keep in mind that
+                # the control parameters had to be stripped out, and that
+                # those could have been passed positionally or by keyword.
+                for name, arg in zip(kword_params, args + kwargs)]),
+            keyword=cst.Name("request")
+        )
+
+        return updated.with_changes(
+            args=[request_arg] + ctrl_kwargs
+        )
+
+
+def fix_files(
+    in_dir: pathlib.Path,
+    out_dir: pathlib.Path,
+    *,
+    transformer=migrationCallTransformer(),
+):
+    """Duplicate the input dir to the output dir, fixing file method calls.
+
+    Preconditions:
+    * in_dir is a real directory
+    * out_dir is a real, empty directory
+    """
+    pyfile_gen = (
+        pathlib.Path(os.path.join(root, f))
+        for root, _, files in os.walk(in_dir)
+        for f in files if os.path.splitext(f)[1] == ".py"
+    )
+
+    for fpath in pyfile_gen:
+        with open(fpath, 'r') as f:
+            src = f.read()
+
+        # Parse the code and insert method call fixes.
+        tree = cst.parse_module(src)
+        updated = tree.visit(transformer)
+
+        # Create the path and directory structure for the new file.
+        updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+        updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+        # Generate the updated source file at the corresponding path.
+        with open(updated_path, 'w') as f:
+            f.write(updated.code)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(
+        description="""Fix up source that uses the migration client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+      parameters in client method calls to keyword based parameters.
+      Cases where it WILL FAIL include
+      A) * or ** expansion in a method call.
+      B) Calls via function or method alias (includes free function calls)
+      C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+      These all constitute false negatives. The tool will also detect false
+      positives when an API method shares a name with another method.
+""")
+    parser.add_argument(
+        '-d',
+        '--input-directory',
+        required=True,
+        dest='input_dir',
+        help='the input directory to walk for python files to fix up',
+    )
+    parser.add_argument(
+        '-o',
+        '--output-directory',
+        required=True,
+        dest='output_dir',
+        help='the directory to output files fixed via un-flattening',
+    )
+    args = parser.parse_args()
+    input_dir = pathlib.Path(args.input_dir)
+    output_dir = pathlib.Path(args.output_dir)
+    if not input_dir.is_dir():
+        print(
+            f"input directory '{input_dir}' does not exist or is not a directory",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    if not output_dir.is_dir():
+        print(
+            f"output directory '{output_dir}' does not exist or is not a directory",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    if os.listdir(output_dir):
+        print(
+            f"output directory '{output_dir}' is not empty",
+            file=sys.stderr,
+        )
+        sys.exit(-1)
+
+    fix_files(input_dir, output_dir)
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py b/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py
new file mode 100644
index 000000000000..d309d6e97518
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+    trim_blocks=True,
+    loader=jinja2.FileSystemLoader(
+        os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+    return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('source')
+    parser.add_argument('--destination', default='README.rst')
+
+    args = parser.parse_args()
+
+    source = os.path.abspath(args.source)
+    root = os.path.dirname(source)
+    destination = os.path.join(root, args.destination)
+
+    jinja_env.globals['get_help'] = get_help
+
+    with io.open(source, 'r') as f:
+        config = yaml.load(f)
+
+    # This allows get_help to execute in the right directory.
+    os.chdir(root)
+
+    output = README_TMPL.render(config)
+
+    with io.open(destination, 'w') as f:
+        f.write(output)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 000000000000..4fd239765b0a
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+    become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+   :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+   :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+    $ python {{sample.file}}
+{% if sample.show_help %}
+
+    {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and  `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+    https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+    https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+    https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 000000000000..1446b94a5e3a
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+    https://cloud.google.com/docs/authentication/getting-started
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 000000000000..11957ce2714a
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+   one for your project.
+
+.. _API Key:
+    https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 000000000000..275d649890d7
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+    .. code-block:: bash
+
+        $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+   .. _Python Development Environment Setup Guide:
+       https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
+
+    .. code-block:: bash
+
+        $ virtualenv env
+        $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+    .. code-block:: bash
+
+        $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 000000000000..5ea33d18c00c
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+      brew install portaudio
+
+  **Note**: if you encounter an error when running `pip install` that indicates
+  it can't find `portaudio.h`, try running `pip install` with the following
+  flags::
+
+      pip install --global-option='build_ext' \
+          --global-option='-I/usr/local/include' \
+          --global-option='-L/usr/local/lib' \
+          pyaudio
+
+* For Debian / Ubuntu Linux::
+
+      apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+  installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+  https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/packages/google-cloud-bigquery-migration/setup.cfg b/packages/google-cloud-bigquery-migration/setup.cfg
new file mode 100644
index 000000000000..c3a2b39f6528
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/setup.cfg
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+[bdist_wheel]
+universal = 1
diff --git a/packages/google-cloud-bigquery-migration/setup.py b/packages/google-cloud-bigquery-migration/setup.py
new file mode 100644
index 000000000000..99dd0bd8c768
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/setup.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import io
+import os
+import setuptools  # type: ignore
+
+version = "0.1.0"
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+readme_filename = os.path.join(package_root, "README.rst")
+with io.open(readme_filename, encoding="utf-8") as readme_file:
+    readme = readme_file.read()
+
+setuptools.setup(
+    name="google-cloud-bigquery-migration",
+    version=version,
+    long_description=readme,
+    packages=setuptools.PEP420PackageFinder.find(),
+    namespace_packages=("google", "google.cloud", "google.cloud.bigquery"),
+    platforms="Posix; MacOS X; Windows",
+    include_package_data=True,
+    install_requires=(
+        "google-api-core[grpc] >= 1.27.0, < 3.0.0dev",
+        "libcst >= 0.2.5",
+        "proto-plus >= 1.15.0",
+        "packaging >= 14.3",
+    ),
+    python_requires=">=3.6",
+    classifiers=[
+        "Development Status :: 3 - Alpha",
+        "Intended Audience :: Developers",
+        "Operating System :: OS Independent",
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Programming Language :: Python :: 3.8",
+        "Programming Language :: Python :: 3.9",
+        "Topic :: Internet",
+        "Topic :: Software Development :: Libraries :: Python Modules",
+    ],
+    zip_safe=False,
+)
diff --git a/packages/google-cloud-bigquery-migration/testing/.gitignore b/packages/google-cloud-bigquery-migration/testing/.gitignore
new file mode 100644
index 000000000000..b05fbd630881
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/packages/google-cloud-bigquery-migration/tests/__init__.py b/packages/google-cloud-bigquery-migration/tests/__init__.py
new file mode 100644
index 000000000000..4de65971c238
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/tests/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/packages/google-cloud-bigquery-migration/tests/unit/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/__init__.py
new file mode 100644
index 000000000000..4de65971c238
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/tests/unit/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py
new file mode 100644
index 000000000000..4de65971c238
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/__init__.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/__init__.py
new file mode 100644
index 000000000000..4de65971c238
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/test_migration_service.py b/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/test_migration_service.py
new file mode 100644
index 000000000000..3e4c6ffbd4da
--- /dev/null
+++ b/packages/google-cloud-bigquery-migration/tests/unit/gapic/migration_v2alpha/test_migration_service.py
@@ -0,0 +1,3066 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import mock
+import packaging.version
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import (
+    MigrationServiceAsyncClient,
+)
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import (
+    MigrationServiceClient,
+)
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import pagers
+from google.cloud.bigquery.migration_v2alpha.services.migration_service import (
+    transports,
+)
+from google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.base import (
+    _GOOGLE_AUTH_VERSION,
+)
+from google.cloud.bigquery.migration_v2alpha.types import migration_entities
+from google.cloud.bigquery.migration_v2alpha.types import migration_error_details
+from google.cloud.bigquery.migration_v2alpha.types import migration_metrics
+from google.cloud.bigquery.migration_v2alpha.types import migration_service
+from google.oauth2 import service_account
+from google.protobuf import any_pb2  # type: ignore
+from google.protobuf import field_mask_pb2  # type: ignore
+from google.protobuf import timestamp_pb2  # type: ignore
+from google.rpc import error_details_pb2  # type: ignore
+import google.auth
+
+
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
+# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
+requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
+    packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
+    reason="This test requires google-auth < 1.25.0",
+)
+requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
+    packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
+    reason="This test requires google-auth >= 1.25.0",
+)
+
+
+def client_cert_source_callback():
+    return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+    return (
+        "foo.googleapis.com"
+        if ("localhost" in client.DEFAULT_ENDPOINT)
+        else client.DEFAULT_ENDPOINT
+    )
+
+
+def test__get_default_mtls_endpoint():
+    api_endpoint = "example.googleapis.com"
+    api_mtls_endpoint = "example.mtls.googleapis.com"
+    sandbox_endpoint = "example.sandbox.googleapis.com"
+    sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+    non_googleapi = "api.example.com"
+
+    assert MigrationServiceClient._get_default_mtls_endpoint(None) is None
+    assert (
+        MigrationServiceClient._get_default_mtls_endpoint(api_endpoint)
+        == api_mtls_endpoint
+    )
+    assert (
+        MigrationServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+        == api_mtls_endpoint
+    )
+    assert (
+        MigrationServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+        == sandbox_mtls_endpoint
+    )
+    assert (
+        MigrationServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+        == sandbox_mtls_endpoint
+    )
+    assert (
+        MigrationServiceClient._get_default_mtls_endpoint(non_googleapi)
+        == non_googleapi
+    )
+
+
+@pytest.mark.parametrize(
+    "client_class", [MigrationServiceClient, MigrationServiceAsyncClient,]
+)
+def test_migration_service_client_from_service_account_info(client_class):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(
+        service_account.Credentials, "from_service_account_info"
+    ) as factory:
+        factory.return_value = creds
+        info = {"valid": True}
+        client = client_class.from_service_account_info(info)
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == "bigquerymigration.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+    "transport_class,transport_name",
+    [
+        (transports.MigrationServiceGrpcTransport, "grpc"),
+        (transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+    ],
+)
+def test_migration_service_client_service_account_always_use_jwt(
+    transport_class, transport_name
+):
+    with mock.patch.object(
+        service_account.Credentials, "with_always_use_jwt_access", create=True
+    ) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=True)
+        use_jwt.assert_called_once_with(True)
+
+    with mock.patch.object(
+        service_account.Credentials, "with_always_use_jwt_access", create=True
+    ) as use_jwt:
+        creds = service_account.Credentials(None, None, None)
+        transport = transport_class(credentials=creds, always_use_jwt_access=False)
+        use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize(
+    "client_class", [MigrationServiceClient, MigrationServiceAsyncClient,]
+)
+def test_migration_service_client_from_service_account_file(client_class):
+    creds = ga_credentials.AnonymousCredentials()
+    with mock.patch.object(
+        service_account.Credentials, "from_service_account_file"
+    ) as factory:
+        factory.return_value = creds
+        client = client_class.from_service_account_file("dummy/file/path.json")
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        client = client_class.from_service_account_json("dummy/file/path.json")
+        assert client.transport._credentials == creds
+        assert isinstance(client, client_class)
+
+        assert client.transport._host == "bigquerymigration.googleapis.com:443"
+
+
+def test_migration_service_client_get_transport_class():
+    transport = MigrationServiceClient.get_transport_class()
+    available_transports = [
+        transports.MigrationServiceGrpcTransport,
+    ]
+    assert transport in available_transports
+
+    transport = MigrationServiceClient.get_transport_class("grpc")
+    assert transport == transports.MigrationServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name",
+    [
+        (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+        ),
+    ],
+)
+@mock.patch.object(
+    MigrationServiceClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(MigrationServiceClient),
+)
+@mock.patch.object(
+    MigrationServiceAsyncClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(MigrationServiceAsyncClient),
+)
+def test_migration_service_client_client_options(
+    client_class, transport_class, transport_name
+):
+    # Check that if channel is provided we won't create a new one.
+    with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc:
+        transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
+        client = client_class(transport=transport)
+        gtc.assert_not_called()
+
+    # Check that if channel is provided via str we will create a new one.
+    with mock.patch.object(MigrationServiceClient, "get_transport_class") as gtc:
+        client = client_class(transport=transport_name)
+        gtc.assert_called()
+
+    # Check the case api_endpoint is provided.
+    options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "never".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class()
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+    # "always".
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class()
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=client.DEFAULT_MTLS_ENDPOINT,
+                scopes=None,
+                client_cert_source_for_mtls=None,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+            )
+
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError):
+            client = client_class()
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+    ):
+        with pytest.raises(ValueError):
+            client = client_class()
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client.DEFAULT_ENDPOINT,
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name,use_client_cert_env",
+    [
+        (
+            MigrationServiceClient,
+            transports.MigrationServiceGrpcTransport,
+            "grpc",
+            "true",
+        ),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+            "true",
+        ),
+        (
+            MigrationServiceClient,
+            transports.MigrationServiceGrpcTransport,
+            "grpc",
+            "false",
+        ),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+            "false",
+        ),
+    ],
+)
+@mock.patch.object(
+    MigrationServiceClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(MigrationServiceClient),
+)
+@mock.patch.object(
+    MigrationServiceAsyncClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(MigrationServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_migration_service_client_mtls_env_auto(
+    client_class, transport_class, transport_name, use_client_cert_env
+):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
+        options = client_options.ClientOptions(
+            client_cert_source=client_cert_source_callback
+        )
+        with mock.patch.object(transport_class, "__init__") as patched:
+            patched.return_value = None
+            client = client_class(client_options=options)
+
+            if use_client_cert_env == "false":
+                expected_client_cert_source = None
+                expected_host = client.DEFAULT_ENDPOINT
+            else:
+                expected_client_cert_source = client_cert_source_callback
+                expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+            patched.assert_called_once_with(
+                credentials=None,
+                credentials_file=None,
+                host=expected_host,
+                scopes=None,
+                client_cert_source_for_mtls=expected_client_cert_source,
+                quota_project_id=None,
+                client_info=transports.base.DEFAULT_CLIENT_INFO,
+                always_use_jwt_access=True,
+            )
+
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
+        with mock.patch.object(transport_class, "__init__") as patched:
+            with mock.patch(
+                "google.auth.transport.mtls.has_default_client_cert_source",
+                return_value=True,
+            ):
+                with mock.patch(
+                    "google.auth.transport.mtls.default_client_cert_source",
+                    return_value=client_cert_source_callback,
+                ):
+                    if use_client_cert_env == "false":
+                        expected_host = client.DEFAULT_ENDPOINT
+                        expected_client_cert_source = None
+                    else:
+                        expected_host = client.DEFAULT_MTLS_ENDPOINT
+                        expected_client_cert_source = client_cert_source_callback
+
+                    patched.return_value = None
+                    client = client_class()
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=expected_host,
+                        scopes=None,
+                        client_cert_source_for_mtls=expected_client_cert_source,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        always_use_jwt_access=True,
+                    )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
+        with mock.patch.object(transport_class, "__init__") as patched:
+            with mock.patch(
+                "google.auth.transport.mtls.has_default_client_cert_source",
+                return_value=False,
+            ):
+                patched.return_value = None
+                client = client_class()
+                patched.assert_called_once_with(
+                    credentials=None,
+                    credentials_file=None,
+                    host=client.DEFAULT_ENDPOINT,
+                    scopes=None,
+                    client_cert_source_for_mtls=None,
+                    quota_project_id=None,
+                    client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    always_use_jwt_access=True,
+                )
+
+
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name",
+    [
+        (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+        ),
+    ],
+)
+def test_migration_service_client_client_options_scopes(
+    client_class, transport_class, transport_name
+):
+    # Check the case scopes are provided.
+    options = client_options.ClientOptions(scopes=["1", "2"],)
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client.DEFAULT_ENDPOINT,
+            scopes=["1", "2"],
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name",
+    [
+        (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"),
+        (
+            MigrationServiceAsyncClient,
+            transports.MigrationServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+        ),
+    ],
+)
+def test_migration_service_client_client_options_credentials_file(
+    client_class, transport_class, transport_name
+):
+    # Check the case credentials file is provided.
+    options = client_options.ClientOptions(credentials_file="credentials.json")
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file="credentials.json",
+            host=client.DEFAULT_ENDPOINT,
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+
+def test_migration_service_client_client_options_from_dict():
+    with mock.patch(
+        "google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.MigrationServiceGrpcTransport.__init__"
+    ) as grpc_transport:
+        grpc_transport.return_value = None
+        client = MigrationServiceClient(
+            client_options={"api_endpoint": "squid.clam.whelk"}
+        )
+        grpc_transport.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host="squid.clam.whelk",
+            scopes=None,
+            client_cert_source_for_mtls=None,
+            quota_project_id=None,
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+            always_use_jwt_access=True,
+        )
+
+
+def test_create_migration_workflow(
+    transport: str = "grpc",
+    request_type=migration_service.CreateMigrationWorkflowRequest,
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow(
+            name="name_value",
+            display_name="display_name_value",
+            state=migration_entities.MigrationWorkflow.State.DRAFT,
+        )
+        response = client.create_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.CreateMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationWorkflow)
+    assert response.name == "name_value"
+    assert response.display_name == "display_name_value"
+    assert response.state == migration_entities.MigrationWorkflow.State.DRAFT
+
+
+def test_create_migration_workflow_from_dict():
+    test_create_migration_workflow(request_type=dict)
+
+
+def test_create_migration_workflow_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        client.create_migration_workflow()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.CreateMigrationWorkflowRequest()
+
+
+@pytest.mark.asyncio
+async def test_create_migration_workflow_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.CreateMigrationWorkflowRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow(
+                name="name_value",
+                display_name="display_name_value",
+                state=migration_entities.MigrationWorkflow.State.DRAFT,
+            )
+        )
+        response = await client.create_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.CreateMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationWorkflow)
+    assert response.name == "name_value"
+    assert response.display_name == "display_name_value"
+    assert response.state == migration_entities.MigrationWorkflow.State.DRAFT
+
+
+@pytest.mark.asyncio
+async def test_create_migration_workflow_async_from_dict():
+    await test_create_migration_workflow_async(request_type=dict)
+
+
+def test_create_migration_workflow_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.CreateMigrationWorkflowRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = migration_entities.MigrationWorkflow()
+        client.create_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_migration_workflow_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.CreateMigrationWorkflowRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow()
+        )
+        await client.create_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_migration_workflow_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.create_migration_workflow(
+            parent="parent_value",
+            migration_workflow=migration_entities.MigrationWorkflow(name="name_value"),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+        assert args[0].migration_workflow == migration_entities.MigrationWorkflow(
+            name="name_value"
+        )
+
+
+def test_create_migration_workflow_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.create_migration_workflow(
+            migration_service.CreateMigrationWorkflowRequest(),
+            parent="parent_value",
+            migration_workflow=migration_entities.MigrationWorkflow(name="name_value"),
+        )
+
+
+@pytest.mark.asyncio
+async def test_create_migration_workflow_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.create_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow()
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.create_migration_workflow(
+            parent="parent_value",
+            migration_workflow=migration_entities.MigrationWorkflow(name="name_value"),
+        )
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+        assert args[0].migration_workflow == migration_entities.MigrationWorkflow(
+            name="name_value"
+        )
+
+
+@pytest.mark.asyncio
+async def test_create_migration_workflow_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.create_migration_workflow(
+            migration_service.CreateMigrationWorkflowRequest(),
+            parent="parent_value",
+            migration_workflow=migration_entities.MigrationWorkflow(name="name_value"),
+        )
+
+
+def test_get_migration_workflow(
+    transport: str = "grpc", request_type=migration_service.GetMigrationWorkflowRequest
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow(
+            name="name_value",
+            display_name="display_name_value",
+            state=migration_entities.MigrationWorkflow.State.DRAFT,
+        )
+        response = client.get_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationWorkflow)
+    assert response.name == "name_value"
+    assert response.display_name == "display_name_value"
+    assert response.state == migration_entities.MigrationWorkflow.State.DRAFT
+
+
+def test_get_migration_workflow_from_dict():
+    test_get_migration_workflow(request_type=dict)
+
+
+def test_get_migration_workflow_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        client.get_migration_workflow()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationWorkflowRequest()
+
+
+@pytest.mark.asyncio
+async def test_get_migration_workflow_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.GetMigrationWorkflowRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow(
+                name="name_value",
+                display_name="display_name_value",
+                state=migration_entities.MigrationWorkflow.State.DRAFT,
+            )
+        )
+        response = await client.get_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationWorkflow)
+    assert response.name == "name_value"
+    assert response.display_name == "display_name_value"
+    assert response.state == migration_entities.MigrationWorkflow.State.DRAFT
+
+
+@pytest.mark.asyncio
+async def test_get_migration_workflow_async_from_dict():
+    await test_get_migration_workflow_async(request_type=dict)
+
+
+def test_get_migration_workflow_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.GetMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = migration_entities.MigrationWorkflow()
+        client.get_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_migration_workflow_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.GetMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow()
+        )
+        await client.get_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_migration_workflow_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+def test_get_migration_workflow_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_migration_workflow(
+            migration_service.GetMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_get_migration_workflow_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationWorkflow()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationWorkflow()
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_migration_workflow_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_migration_workflow(
+            migration_service.GetMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+def test_list_migration_workflows(
+    transport: str = "grpc",
+    request_type=migration_service.ListMigrationWorkflowsRequest,
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationWorkflowsResponse(
+            next_page_token="next_page_token_value",
+        )
+        response = client.list_migration_workflows(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationWorkflowsRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListMigrationWorkflowsPager)
+    assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_migration_workflows_from_dict():
+    test_list_migration_workflows(request_type=dict)
+
+
+def test_list_migration_workflows_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        client.list_migration_workflows()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationWorkflowsRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.ListMigrationWorkflowsRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationWorkflowsResponse(
+                next_page_token="next_page_token_value",
+            )
+        )
+        response = await client.list_migration_workflows(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationWorkflowsRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListMigrationWorkflowsAsyncPager)
+    assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_async_from_dict():
+    await test_list_migration_workflows_async(request_type=dict)
+
+
+def test_list_migration_workflows_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.ListMigrationWorkflowsRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        call.return_value = migration_service.ListMigrationWorkflowsResponse()
+        client.list_migration_workflows(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.ListMigrationWorkflowsRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationWorkflowsResponse()
+        )
+        await client.list_migration_workflows(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_migration_workflows_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationWorkflowsResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_migration_workflows(parent="parent_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+
+
+def test_list_migration_workflows_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_migration_workflows(
+            migration_service.ListMigrationWorkflowsRequest(), parent="parent_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationWorkflowsResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationWorkflowsResponse()
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_migration_workflows(parent="parent_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_migration_workflows(
+            migration_service.ListMigrationWorkflowsRequest(), parent="parent_value",
+        )
+
+
+def test_list_migration_workflows_pager():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[migration_entities.MigrationWorkflow(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        metadata = ()
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+        )
+        pager = client.list_migration_workflows(request={})
+
+        assert pager._metadata == metadata
+
+        results = [i for i in pager]
+        assert len(results) == 6
+        assert all(isinstance(i, migration_entities.MigrationWorkflow) for i in results)
+
+
+def test_list_migration_workflows_pages():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows), "__call__"
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[migration_entities.MigrationWorkflow(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_migration_workflows(request={}).pages)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_async_pager():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows),
+        "__call__",
+        new_callable=mock.AsyncMock,
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[migration_entities.MigrationWorkflow(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_migration_workflows(request={},)
+        assert async_pager.next_page_token == "abc"
+        responses = []
+        async for response in async_pager:
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(
+            isinstance(i, migration_entities.MigrationWorkflow) for i in responses
+        )
+
+
+@pytest.mark.asyncio
+async def test_list_migration_workflows_async_pages():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_workflows),
+        "__call__",
+        new_callable=mock.AsyncMock,
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[migration_entities.MigrationWorkflow(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationWorkflowsResponse(
+                migration_workflows=[
+                    migration_entities.MigrationWorkflow(),
+                    migration_entities.MigrationWorkflow(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        async for page_ in (await client.list_migration_workflows(request={})).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_delete_migration_workflow(
+    transport: str = "grpc",
+    request_type=migration_service.DeleteMigrationWorkflowRequest,
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.delete_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.DeleteMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_delete_migration_workflow_from_dict():
+    test_delete_migration_workflow(request_type=dict)
+
+
+def test_delete_migration_workflow_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        client.delete_migration_workflow()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.DeleteMigrationWorkflowRequest()
+
+
+@pytest.mark.asyncio
+async def test_delete_migration_workflow_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.DeleteMigrationWorkflowRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.delete_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.DeleteMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_migration_workflow_async_from_dict():
+    await test_delete_migration_workflow_async(request_type=dict)
+
+
+def test_delete_migration_workflow_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.DeleteMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = None
+        client.delete_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_migration_workflow_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.DeleteMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.delete_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_migration_workflow_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.delete_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+def test_delete_migration_workflow_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.delete_migration_workflow(
+            migration_service.DeleteMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_delete_migration_workflow_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.delete_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.delete_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_migration_workflow_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.delete_migration_workflow(
+            migration_service.DeleteMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+def test_start_migration_workflow(
+    transport: str = "grpc",
+    request_type=migration_service.StartMigrationWorkflowRequest,
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        response = client.start_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.StartMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+def test_start_migration_workflow_from_dict():
+    test_start_migration_workflow(request_type=dict)
+
+
+def test_start_migration_workflow_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        client.start_migration_workflow()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.StartMigrationWorkflowRequest()
+
+
+@pytest.mark.asyncio
+async def test_start_migration_workflow_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.StartMigrationWorkflowRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        response = await client.start_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.StartMigrationWorkflowRequest()
+
+    # Establish that the response is the type that we expect.
+    assert response is None
+
+
+@pytest.mark.asyncio
+async def test_start_migration_workflow_async_from_dict():
+    await test_start_migration_workflow_async(request_type=dict)
+
+
+def test_start_migration_workflow_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.StartMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = None
+        client.start_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_start_migration_workflow_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.StartMigrationWorkflowRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        await client.start_migration_workflow(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_start_migration_workflow_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.start_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+def test_start_migration_workflow_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.start_migration_workflow(
+            migration_service.StartMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_start_migration_workflow_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.start_migration_workflow), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = None
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.start_migration_workflow(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_start_migration_workflow_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.start_migration_workflow(
+            migration_service.StartMigrationWorkflowRequest(), name="name_value",
+        )
+
+
+def test_get_migration_subtask(
+    transport: str = "grpc", request_type=migration_service.GetMigrationSubtaskRequest
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationSubtask(
+            name="name_value",
+            task_id="task_id_value",
+            type_="type__value",
+            state=migration_entities.MigrationSubtask.State.ACTIVE,
+            resource_error_count=2169,
+        )
+        response = client.get_migration_subtask(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationSubtaskRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationSubtask)
+    assert response.name == "name_value"
+    assert response.task_id == "task_id_value"
+    assert response.type_ == "type__value"
+    assert response.state == migration_entities.MigrationSubtask.State.ACTIVE
+    assert response.resource_error_count == 2169
+
+
+def test_get_migration_subtask_from_dict():
+    test_get_migration_subtask(request_type=dict)
+
+
+def test_get_migration_subtask_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        client.get_migration_subtask()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationSubtaskRequest()
+
+
+@pytest.mark.asyncio
+async def test_get_migration_subtask_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.GetMigrationSubtaskRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationSubtask(
+                name="name_value",
+                task_id="task_id_value",
+                type_="type__value",
+                state=migration_entities.MigrationSubtask.State.ACTIVE,
+                resource_error_count=2169,
+            )
+        )
+        response = await client.get_migration_subtask(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.GetMigrationSubtaskRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, migration_entities.MigrationSubtask)
+    assert response.name == "name_value"
+    assert response.task_id == "task_id_value"
+    assert response.type_ == "type__value"
+    assert response.state == migration_entities.MigrationSubtask.State.ACTIVE
+    assert response.resource_error_count == 2169
+
+
+@pytest.mark.asyncio
+async def test_get_migration_subtask_async_from_dict():
+    await test_get_migration_subtask_async(request_type=dict)
+
+
+def test_get_migration_subtask_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.GetMigrationSubtaskRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        call.return_value = migration_entities.MigrationSubtask()
+        client.get_migration_subtask(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_migration_subtask_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.GetMigrationSubtaskRequest()
+
+    request.name = "name/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationSubtask()
+        )
+        await client.get_migration_subtask(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_migration_subtask_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationSubtask()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.get_migration_subtask(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+def test_get_migration_subtask_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.get_migration_subtask(
+            migration_service.GetMigrationSubtaskRequest(), name="name_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_get_migration_subtask_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.get_migration_subtask), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_entities.MigrationSubtask()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_entities.MigrationSubtask()
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.get_migration_subtask(name="name_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_migration_subtask_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.get_migration_subtask(
+            migration_service.GetMigrationSubtaskRequest(), name="name_value",
+        )
+
+
+def test_list_migration_subtasks(
+    transport: str = "grpc", request_type=migration_service.ListMigrationSubtasksRequest
+):
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationSubtasksResponse(
+            next_page_token="next_page_token_value",
+        )
+        response = client.list_migration_subtasks(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationSubtasksRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListMigrationSubtasksPager)
+    assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_migration_subtasks_from_dict():
+    test_list_migration_subtasks(request_type=dict)
+
+
+def test_list_migration_subtasks_empty_call():
+    # This test is a coverage failsafe to make sure that totally empty calls,
+    # i.e. request == None and no flattened fields passed, work.
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        client.list_migration_subtasks()
+        call.assert_called()
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationSubtasksRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_async(
+    transport: str = "grpc_asyncio",
+    request_type=migration_service.ListMigrationSubtasksRequest,
+):
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+    )
+
+    # Everything is optional in proto3 as far as the runtime is concerned,
+    # and we are mocking out the actual API, so just send an empty request.
+    request = request_type()
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationSubtasksResponse(
+                next_page_token="next_page_token_value",
+            )
+        )
+        response = await client.list_migration_subtasks(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == migration_service.ListMigrationSubtasksRequest()
+
+    # Establish that the response is the type that we expect.
+    assert isinstance(response, pagers.ListMigrationSubtasksAsyncPager)
+    assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_async_from_dict():
+    await test_list_migration_subtasks_async(request_type=dict)
+
+
+def test_list_migration_subtasks_field_headers():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.ListMigrationSubtasksRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        call.return_value = migration_service.ListMigrationSubtasksResponse()
+        client.list_migration_subtasks(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_field_headers_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Any value that is part of the HTTP/1.1 URI should be sent as
+    # a field header. Set these to a non-empty value.
+    request = migration_service.ListMigrationSubtasksRequest()
+
+    request.parent = "parent/value"
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationSubtasksResponse()
+        )
+        await client.list_migration_subtasks(request)
+
+        # Establish that the underlying gRPC stub method was called.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0] == request
+
+    # Establish that the field header was sent.
+    _, _, kw = call.mock_calls[0]
+    assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_migration_subtasks_flattened():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationSubtasksResponse()
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        client.list_migration_subtasks(parent="parent_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls) == 1
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+
+
+def test_list_migration_subtasks_flattened_error():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        client.list_migration_subtasks(
+            migration_service.ListMigrationSubtasksRequest(), parent="parent_value",
+        )
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_flattened_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Designate an appropriate return value for the call.
+        call.return_value = migration_service.ListMigrationSubtasksResponse()
+
+        call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+            migration_service.ListMigrationSubtasksResponse()
+        )
+        # Call the method with a truthy value for each flattened field,
+        # using the keyword arguments to the method.
+        response = await client.list_migration_subtasks(parent="parent_value",)
+
+        # Establish that the underlying call was made with the expected
+        # request object values.
+        assert len(call.mock_calls)
+        _, args, _ = call.mock_calls[0]
+        assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_flattened_error_async():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+
+    # Attempting to call a method with both a request object and flattened
+    # fields is an error.
+    with pytest.raises(ValueError):
+        await client.list_migration_subtasks(
+            migration_service.ListMigrationSubtasksRequest(), parent="parent_value",
+        )
+
+
+def test_list_migration_subtasks_pager():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[migration_entities.MigrationSubtask(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+            ),
+            RuntimeError,
+        )
+
+        metadata = ()
+        metadata = tuple(metadata) + (
+            gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+        )
+        pager = client.list_migration_subtasks(request={})
+
+        assert pager._metadata == metadata
+
+        results = [i for i in pager]
+        assert len(results) == 6
+        assert all(isinstance(i, migration_entities.MigrationSubtask) for i in results)
+
+
+def test_list_migration_subtasks_pages():
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks), "__call__"
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[migration_entities.MigrationSubtask(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = list(client.list_migration_subtasks(request={}).pages)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_async_pager():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks),
+        "__call__",
+        new_callable=mock.AsyncMock,
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[migration_entities.MigrationSubtask(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+            ),
+            RuntimeError,
+        )
+        async_pager = await client.list_migration_subtasks(request={},)
+        assert async_pager.next_page_token == "abc"
+        responses = []
+        async for response in async_pager:
+            responses.append(response)
+
+        assert len(responses) == 6
+        assert all(
+            isinstance(i, migration_entities.MigrationSubtask) for i in responses
+        )
+
+
+@pytest.mark.asyncio
+async def test_list_migration_subtasks_async_pages():
+    client = MigrationServiceAsyncClient(
+        credentials=ga_credentials.AnonymousCredentials,
+    )
+
+    # Mock the actual call within the gRPC stub, and fake the request.
+    with mock.patch.object(
+        type(client.transport.list_migration_subtasks),
+        "__call__",
+        new_callable=mock.AsyncMock,
+    ) as call:
+        # Set the response to a series of pages.
+        call.side_effect = (
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+                next_page_token="abc",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[], next_page_token="def",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[migration_entities.MigrationSubtask(),],
+                next_page_token="ghi",
+            ),
+            migration_service.ListMigrationSubtasksResponse(
+                migration_subtasks=[
+                    migration_entities.MigrationSubtask(),
+                    migration_entities.MigrationSubtask(),
+                ],
+            ),
+            RuntimeError,
+        )
+        pages = []
+        async for page_ in (await client.list_migration_subtasks(request={})).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+    # It is an error to provide credentials and a transport instance.
+    transport = transports.MigrationServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = MigrationServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+        )
+
+    # It is an error to provide a credentials file and a transport instance.
+    transport = transports.MigrationServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = MigrationServiceClient(
+            client_options={"credentials_file": "credentials.json"},
+            transport=transport,
+        )
+
+    # It is an error to provide scopes and a transport instance.
+    transport = transports.MigrationServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    with pytest.raises(ValueError):
+        client = MigrationServiceClient(
+            client_options={"scopes": ["1", "2"]}, transport=transport,
+        )
+
+
+def test_transport_instance():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.MigrationServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    client = MigrationServiceClient(transport=transport)
+    assert client.transport is transport
+
+
+def test_transport_get_channel():
+    # A client may be instantiated with a custom transport instance.
+    transport = transports.MigrationServiceGrpcTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+    transport = transports.MigrationServiceGrpcAsyncIOTransport(
+        credentials=ga_credentials.AnonymousCredentials(),
+    )
+    channel = transport.grpc_channel
+    assert channel
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(google.auth, "default") as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+    # A client should use the gRPC transport by default.
+    client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+    assert isinstance(client.transport, transports.MigrationServiceGrpcTransport,)
+
+
+def test_migration_service_base_transport_error():
+    # Passing both a credentials object and credentials_file should raise an error
+    with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+        transport = transports.MigrationServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+            credentials_file="credentials.json",
+        )
+
+
+def test_migration_service_base_transport():
+    # Instantiate the base transport.
+    with mock.patch(
+        "google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.MigrationServiceTransport.__init__"
+    ) as Transport:
+        Transport.return_value = None
+        transport = transports.MigrationServiceTransport(
+            credentials=ga_credentials.AnonymousCredentials(),
+        )
+
+    # Every method on the transport should just blindly
+    # raise NotImplementedError.
+    methods = (
+        "create_migration_workflow",
+        "get_migration_workflow",
+        "list_migration_workflows",
+        "delete_migration_workflow",
+        "start_migration_workflow",
+        "get_migration_subtask",
+        "list_migration_subtasks",
+    )
+    for method in methods:
+        with pytest.raises(NotImplementedError):
+            getattr(transport, method)(request=object())
+
+
+@requires_google_auth_gte_1_25_0
+def test_migration_service_base_transport_with_credentials_file():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch(
+        "google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages"
+    ) as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.MigrationServiceTransport(
+            credentials_file="credentials.json", quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with(
+            "credentials.json",
+            scopes=None,
+            default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id="octopus",
+        )
+
+
+@requires_google_auth_lt_1_25_0
+def test_migration_service_base_transport_with_credentials_file_old_google_auth():
+    # Instantiate the base transport with a credentials file
+    with mock.patch.object(
+        google.auth, "load_credentials_from_file", autospec=True
+    ) as load_creds, mock.patch(
+        "google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages"
+    ) as Transport:
+        Transport.return_value = None
+        load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.MigrationServiceTransport(
+            credentials_file="credentials.json", quota_project_id="octopus",
+        )
+        load_creds.assert_called_once_with(
+            "credentials.json",
+            scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id="octopus",
+        )
+
+
+def test_migration_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
+        "google.cloud.bigquery.migration_v2alpha.services.migration_service.transports.MigrationServiceTransport._prep_wrapped_messages"
+    ) as Transport:
+        Transport.return_value = None
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport = transports.MigrationServiceTransport()
+        adc.assert_called_once()
+
+
+@requires_google_auth_gte_1_25_0
+def test_migration_service_auth_adc():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        MigrationServiceClient()
+        adc.assert_called_once_with(
+            scopes=None,
+            default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id=None,
+        )
+
+
+@requires_google_auth_lt_1_25_0
+def test_migration_service_auth_adc_old_google_auth():
+    # If no credentials are provided, we should use ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        MigrationServiceClient()
+        adc.assert_called_once_with(
+            scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id=None,
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+@requires_google_auth_gte_1_25_0
+def test_migration_service_transport_auth_adc(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+        adc.assert_called_once_with(
+            scopes=["1", "2"],
+            default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+@requires_google_auth_lt_1_25_0
+def test_migration_service_transport_auth_adc_old_google_auth(transport_class):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(google.auth, "default", autospec=True) as adc:
+        adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+        transport_class(quota_project_id="octopus")
+        adc.assert_called_once_with(
+            scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            quota_project_id="octopus",
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class,grpc_helpers",
+    [
+        (transports.MigrationServiceGrpcTransport, grpc_helpers),
+        (transports.MigrationServiceGrpcAsyncIOTransport, grpc_helpers_async),
+    ],
+)
+def test_migration_service_transport_create_channel(transport_class, grpc_helpers):
+    # If credentials and host are not provided, the transport class should use
+    # ADC credentials.
+    with mock.patch.object(
+        google.auth, "default", autospec=True
+    ) as adc, mock.patch.object(
+        grpc_helpers, "create_channel", autospec=True
+    ) as create_channel:
+        creds = ga_credentials.AnonymousCredentials()
+        adc.return_value = (creds, None)
+        transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+        create_channel.assert_called_with(
+            "bigquerymigration.googleapis.com:443",
+            credentials=creds,
+            credentials_file=None,
+            quota_project_id="octopus",
+            default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+            scopes=["1", "2"],
+            default_host="bigquerymigration.googleapis.com",
+            ssl_credentials=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_migration_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+    cred = ga_credentials.AnonymousCredentials()
+
+    # Check ssl_channel_credentials is used if provided.
+    with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+        mock_ssl_channel_creds = mock.Mock()
+        transport_class(
+            host="squid.clam.whelk",
+            credentials=cred,
+            ssl_channel_credentials=mock_ssl_channel_creds,
+        )
+        mock_create_channel.assert_called_once_with(
+            "squid.clam.whelk:443",
+            credentials=cred,
+            credentials_file=None,
+            scopes=None,
+            ssl_credentials=mock_ssl_channel_creds,
+            quota_project_id=None,
+            options=[
+                ("grpc.max_send_message_length", -1),
+                ("grpc.max_receive_message_length", -1),
+            ],
+        )
+
+    # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+    # is used.
+    with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+        with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+            transport_class(
+                credentials=cred,
+                client_cert_source_for_mtls=client_cert_source_callback,
+            )
+            expected_cert, expected_key = client_cert_source_callback()
+            mock_ssl_cred.assert_called_once_with(
+                certificate_chain=expected_cert, private_key=expected_key
+            )
+
+
+def test_migration_service_host_no_port():
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(
+            api_endpoint="bigquerymigration.googleapis.com"
+        ),
+    )
+    assert client.transport._host == "bigquerymigration.googleapis.com:443"
+
+
+def test_migration_service_host_with_port():
+    client = MigrationServiceClient(
+        credentials=ga_credentials.AnonymousCredentials(),
+        client_options=client_options.ClientOptions(
+            api_endpoint="bigquerymigration.googleapis.com:8000"
+        ),
+    )
+    assert client.transport._host == "bigquerymigration.googleapis.com:8000"
+
+
+def test_migration_service_grpc_transport_channel():
+    channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.MigrationServiceGrpcTransport(
+        host="squid.clam.whelk", channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+def test_migration_service_grpc_asyncio_transport_channel():
+    channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+    # Check that channel is used if provided.
+    transport = transports.MigrationServiceGrpcAsyncIOTransport(
+        host="squid.clam.whelk", channel=channel,
+    )
+    assert transport.grpc_channel == channel
+    assert transport._host == "squid.clam.whelk:443"
+    assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_migration_service_transport_channel_mtls_with_client_cert_source(
+    transport_class,
+):
+    with mock.patch(
+        "grpc.ssl_channel_credentials", autospec=True
+    ) as grpc_ssl_channel_cred:
+        with mock.patch.object(
+            transport_class, "create_channel"
+        ) as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = ga_credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(google.auth, "default") as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.MigrationServiceGrpcTransport,
+        transports.MigrationServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_migration_service_transport_channel_mtls_with_adc(transport_class):
+    mock_ssl_cred = mock.Mock()
+    with mock.patch.multiple(
+        "google.auth.transport.grpc.SslCredentials",
+        __init__=mock.Mock(return_value=None),
+        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+    ):
+        with mock.patch.object(
+            transport_class, "create_channel"
+        ) as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=None,
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+                options=[
+                    ("grpc.max_send_message_length", -1),
+                    ("grpc.max_receive_message_length", -1),
+                ],
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_migration_subtask_path():
+    project = "squid"
+    location = "clam"
+    workflow = "whelk"
+    subtask = "octopus"
+    expected = "projects/{project}/locations/{location}/workflows/{workflow}/subtasks/{subtask}".format(
+        project=project, location=location, workflow=workflow, subtask=subtask,
+    )
+    actual = MigrationServiceClient.migration_subtask_path(
+        project, location, workflow, subtask
+    )
+    assert expected == actual
+
+
+def test_parse_migration_subtask_path():
+    expected = {
+        "project": "oyster",
+        "location": "nudibranch",
+        "workflow": "cuttlefish",
+        "subtask": "mussel",
+    }
+    path = MigrationServiceClient.migration_subtask_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_migration_subtask_path(path)
+    assert expected == actual
+
+
+def test_migration_workflow_path():
+    project = "winkle"
+    location = "nautilus"
+    workflow = "scallop"
+    expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(
+        project=project, location=location, workflow=workflow,
+    )
+    actual = MigrationServiceClient.migration_workflow_path(project, location, workflow)
+    assert expected == actual
+
+
+def test_parse_migration_workflow_path():
+    expected = {
+        "project": "abalone",
+        "location": "squid",
+        "workflow": "clam",
+    }
+    path = MigrationServiceClient.migration_workflow_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_migration_workflow_path(path)
+    assert expected == actual
+
+
+def test_common_billing_account_path():
+    billing_account = "whelk"
+    expected = "billingAccounts/{billing_account}".format(
+        billing_account=billing_account,
+    )
+    actual = MigrationServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "octopus",
+    }
+    path = MigrationServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+
+def test_common_folder_path():
+    folder = "oyster"
+    expected = "folders/{folder}".format(folder=folder,)
+    actual = MigrationServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "nudibranch",
+    }
+    path = MigrationServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+
+def test_common_organization_path():
+    organization = "cuttlefish"
+    expected = "organizations/{organization}".format(organization=organization,)
+    actual = MigrationServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "mussel",
+    }
+    path = MigrationServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+
+def test_common_project_path():
+    project = "winkle"
+    expected = "projects/{project}".format(project=project,)
+    actual = MigrationServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "nautilus",
+    }
+    path = MigrationServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+
+def test_common_location_path():
+    project = "scallop"
+    location = "abalone"
+    expected = "projects/{project}/locations/{location}".format(
+        project=project, location=location,
+    )
+    actual = MigrationServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "squid",
+        "location": "clam",
+    }
+    path = MigrationServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = MigrationServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+    client_info = gapic_v1.client_info.ClientInfo()
+
+    with mock.patch.object(
+        transports.MigrationServiceTransport, "_prep_wrapped_messages"
+    ) as prep:
+        client = MigrationServiceClient(
+            credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)
+
+    with mock.patch.object(
+        transports.MigrationServiceTransport, "_prep_wrapped_messages"
+    ) as prep:
+        transport_class = MigrationServiceClient.get_transport_class()
+        transport = transport_class(
+            credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+        )
+        prep.assert_called_once_with(client_info)