diff --git a/scripts/generate_json_docs.py b/scripts/generate_json_docs.py index 6bc303219df4..f8376568d8e5 100644 --- a/scripts/generate_json_docs.py +++ b/scripts/generate_json_docs.py @@ -26,6 +26,7 @@ from parinx.errors import MethodParsingException import six +from script_utils import PROJECT_ROOT from verify_included_modules import get_public_modules @@ -601,7 +602,7 @@ def main(): parser.add_argument('--tag', help='The version of the documentation.', default='master') parser.add_argument('--basepath', help='Path to the library.', - default=os.path.join(os.path.dirname(__file__), '..')) + default=PROJECT_ROOT) parser.add_argument('--show-toc', help='Prints partial table of contents', default=False) args = parser.parse_args() @@ -635,10 +636,9 @@ def main(): } } - BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) - BASE_JSON_DOCS_DIR = os.path.join(BASE_DIR, 'docs', 'json') + BASE_JSON_DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs', 'json') - DOCS_BUILD_DIR = os.path.join(BASE_DIR, 'docs', '_build') + DOCS_BUILD_DIR = os.path.join(PROJECT_ROOT, 'docs', '_build') JSON_DOCS_DIR = os.path.join(DOCS_BUILD_DIR, 'json', args.tag) LIB_DIR = os.path.abspath(args.basepath) @@ -646,7 +646,7 @@ def main(): public_mods = get_public_modules(library_dir, base_package='google.cloud') - generate_module_docs(public_mods, JSON_DOCS_DIR, BASE_DIR, toc) + generate_module_docs(public_mods, JSON_DOCS_DIR, PROJECT_ROOT, toc) generate_doc_types_json(public_mods, os.path.join(JSON_DOCS_DIR, 'types.json')) package_files(JSON_DOCS_DIR, DOCS_BUILD_DIR, BASE_JSON_DOCS_DIR) diff --git a/scripts/make_datastore_grpc.py b/scripts/make_datastore_grpc.py index 33db9b313a0f..b0e67ffc7f62 100644 --- a/scripts/make_datastore_grpc.py +++ b/scripts/make_datastore_grpc.py @@ -20,13 +20,13 @@ import sys import tempfile +from script_utils import PROJECT_ROOT -ROOT_DIR = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) -PROTOS_DIR = os.path.join(ROOT_DIR, 'googleapis-pb') + +PROTOS_DIR = os.path.join(PROJECT_ROOT, 'googleapis-pb') PROTO_PATH = os.path.join(PROTOS_DIR, 'google', 'datastore', 'v1', 'datastore.proto') -GRPC_ONLY_FILE = os.path.join(ROOT_DIR, 'datastore', +GRPC_ONLY_FILE = os.path.join(PROJECT_ROOT, 'datastore', 'google', 'cloud', 'datastore', '_generated', 'datastore_grpc_pb2.py') GRPCIO_VIRTUALENV = os.getenv('GRPCIO_VIRTUALENV') diff --git a/scripts/run_pylint.py b/scripts/run_pylint.py index b9b2a7731c23..c18204c50e3c 100644 --- a/scripts/run_pylint.py +++ b/scripts/run_pylint.py @@ -30,6 +30,7 @@ import sys from script_utils import get_affected_files +from script_utils import PROJECT_ROOT IGNORED_DIRECTORIES = [ @@ -44,7 +45,7 @@ os.path.join('google', 'cloud', '__init__.py'), 'setup.py', ] -SCRIPTS_DIR = os.path.abspath(os.path.dirname(__file__)) +SCRIPTS_DIR = os.path.join(PROJECT_ROOT, 'scripts') PRODUCTION_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_default') TEST_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_reduced') TEST_DISABLED_MESSAGES = [ diff --git a/scripts/run_unit_tests.py b/scripts/run_unit_tests.py index 4254f2f8e6b9..44334a6f3f0b 100644 --- a/scripts/run_unit_tests.py +++ b/scripts/run_unit_tests.py @@ -27,14 +27,14 @@ import sys from script_utils import check_output +from script_utils import follow_dependencies from script_utils import get_changed_packages from script_utils import in_travis from script_utils import in_travis_pr +from script_utils import PROJECT_ROOT from script_utils import travis_branch -PROJECT_ROOT = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) IGNORED_DIRECTORIES = ( 'appveyor', 'docs', @@ -121,7 +121,9 @@ def get_test_packages(): * Check command line for packages passed in as positional arguments * Check if in Travis, then limit the subset based on changes in a Pull Request ("push" builds to branches may not have - any filtering) + any filtering). Once the filtered list of **changed** packages + is found, the package dependency graph is used to add any + additional packages which depend on the changed packages. * Just use all packages :rtype: list @@ -136,7 +138,8 @@ def get_test_packages(): verify_packages(args.packages, all_packages) return sorted(args.packages) elif in_travis(): - return get_travis_directories(all_packages) + changed_packages = get_travis_directories(all_packages) + return follow_dependencies(changed_packages, all_packages) else: return all_packages diff --git a/scripts/script_utils.py b/scripts/script_utils.py index 370408e6faa3..9c8c66fc1ea8 100644 --- a/scripts/script_utils.py +++ b/scripts/script_utils.py @@ -20,11 +20,14 @@ import subprocess +PROJECT_ROOT = os.path.abspath( + os.path.join(os.path.dirname(__file__), '..')) LOCAL_REMOTE_ENV = 'GOOGLE_CLOUD_TESTING_REMOTE' LOCAL_BRANCH_ENV = 'GOOGLE_CLOUD_TESTING_BRANCH' IN_TRAVIS_ENV = 'TRAVIS' TRAVIS_PR_ENV = 'TRAVIS_PULL_REQUEST' TRAVIS_BRANCH_ENV = 'TRAVIS_BRANCH' +PACKAGE_PREFIX = 'google-cloud-' def in_travis(): @@ -213,3 +216,79 @@ def get_affected_files(allow_limited=True): result = subprocess.check_output(['git', 'ls-files']) return result.rstrip('\n').split('\n'), diff_base + + +def get_required_packages(file_contents): + """Get required packages from a requirements.txt file. + + .. note:: + + This could be done in a bit more complete way via + https://pypi.python.org/pypi/requirements-parser + + :type file_contents: str + :param file_contents: The contents of a requirements.txt file. + + :rtype: list + :returns: The list of required packages. + """ + requirements = file_contents.strip().split('\n') + result = [] + for required in requirements: + parts = required.split() + result.append(parts[0]) + return result + + +def get_dependency_graph(package_list): + """Get a directed graph of package dependencies. + + :type package_list: list + :param package_list: The list of **all** valid packages. + + :rtype: dict + :returns: A dictionary where keys are packages and values are + the set of packages that depend on the key. + """ + result = {package: set() for package in package_list} + for package in package_list: + reqs_file = os.path.join(PROJECT_ROOT, package, + 'requirements.txt') + with open(reqs_file, 'r') as file_obj: + file_contents = file_obj.read() + + requirements = get_required_packages(file_contents) + for requirement in requirements: + if not requirement.startswith(PACKAGE_PREFIX): + continue + _, req_package = requirement.split(PACKAGE_PREFIX) + req_package = req_package.replace('-', '_') + result[req_package].add(package) + + return result + + +def follow_dependencies(subset, package_list): + """Get a directed graph of packages dependency. + + :type subset: list + :param subset: List of a subset of package names. + + :type package_list: list + :param package_list: The list of **all** valid packages. + + :rtype: list + :returns: An expanded list of packages containing everything + in ``subset`` and any packages that depend on those. + """ + dependency_graph = get_dependency_graph(package_list) + + curr_pkgs = None + updated_pkgs = set(subset) + while curr_pkgs != updated_pkgs: + curr_pkgs = updated_pkgs + updated_pkgs = set(curr_pkgs) + for package in curr_pkgs: + updated_pkgs.update(dependency_graph[package]) + + return sorted(curr_pkgs) diff --git a/scripts/verify_included_modules.py b/scripts/verify_included_modules.py index f528966278f3..ed447585e2d5 100644 --- a/scripts/verify_included_modules.py +++ b/scripts/verify_included_modules.py @@ -24,10 +24,10 @@ from sphinx.ext.intersphinx import fetch_inventory +from script_utils import PROJECT_ROOT -BASE_DIR = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..')) -DOCS_DIR = os.path.join(BASE_DIR, 'docs') + +DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs') IGNORED_PREFIXES = ('test_', '_') IGNORED_MODULES = frozenset([ 'google.cloud.__init__', @@ -153,7 +153,7 @@ def verify_modules(build_root='_build'): public_mods = set() for package in PACKAGES: - library_dir = os.path.join(BASE_DIR, package, 'google', 'cloud') + library_dir = os.path.join(PROJECT_ROOT, package, 'google', 'cloud') package_mods = get_public_modules(library_dir, base_package='google.cloud') public_mods.update(package_mods)