From df1f2b22547b7ca86bbdb791ad930003a815a677 Mon Sep 17 00:00:00 2001 From: James Winegar Date: Tue, 20 Nov 2018 14:42:11 -0600 Subject: [PATCH] Update submit_job_to_cluster.py [(#1708)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1708) switch region to new 'global' region and remove unnecessary function. --- samples/snippets/submit_job_to_cluster.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/samples/snippets/submit_job_to_cluster.py b/samples/snippets/submit_job_to_cluster.py index f06d5981..ed49013d 100644 --- a/samples/snippets/submit_job_to_cluster.py +++ b/samples/snippets/submit_job_to_cluster.py @@ -34,14 +34,6 @@ def get_pyspark_file(filename): return f, os.path.basename(filename) -def get_region_from_zone(zone): - try: - region_as_list = zone.split('-')[:-1] - return '-'.join(region_as_list) - except (AttributeError, IndexError, ValueError): - raise ValueError('Invalid zone provided, please check your input.') - - def upload_pyspark_file(project_id, bucket_name, filename, file): """Uploads the PySpark file in this directory to the configured input bucket.""" @@ -199,7 +191,7 @@ def get_client(): def main(project_id, zone, cluster_name, bucket_name, pyspark_file=None, create_new_cluster=True): dataproc = get_client() - region = get_region_from_zone(zone) + region = 'global' try: if pyspark_file: spark_file, spark_filename = get_pyspark_file(pyspark_file)