Skip to content

Commit

Permalink
fix(components): replace resource name class with vertex ai (#5702)
Browse files Browse the repository at this point in the history
* fix: replace resource name class with vertex ai

* fix: remove pipeline_root arg from compile in tests

* fix: remove aiplatformresourcenoun reference from test

* fix: change output_path to package_path

* fix: update output artifacts to be compatible with latest KFP SDK

* fix: update tests
  • Loading branch information
sasha-gitg authored May 20, 2021
1 parent 54ac9a6 commit a1ecb3d
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 24 deletions.
2 changes: 1 addition & 1 deletion components/google-cloud/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
def make_required_install_packages():
return [
"kfp>=1.4.0,<2.0.0",
"google-cloud-aiplatform>=0.7.1,<1.0.0",
"google-cloud-aiplatform>=1.0.0",
]


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ def write_to_artifact(executor_input, text):
# "bq://": For BigQuery resources.
elif text.startswith(RESOURCE_PREFIX.get('bigquery')):
uri_with_prefix = text
else:
uri_with_prefix = text

runtime_artifact = {
"name": artifact.get('name'),
Expand All @@ -111,7 +113,7 @@ def write_to_artifact(executor_input, text):
def resolve_input_args(value, type_to_resolve):
"""If this is an input from Pipelines, read it directly from gcs."""
if inspect.isclass(type_to_resolve) and issubclass(
type_to_resolve, aiplatform.base.AiPlatformResourceNoun):
type_to_resolve, aiplatform.base.VertexAiResourceNoun):
# Remove '/gcs/' prefix before attempting to remove `aiplatform` prefix
if value.startswith(RESOURCE_PREFIX['google_cloud_storage_gcs_fuse']):
value = value[len(RESOURCE_PREFIX['google_cloud_storage_gcs_fuse']):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

def get_forward_reference(
annotation: Any
) -> Optional[aiplatform.base.AiPlatformResourceNoun]:
) -> Optional[aiplatform.base.VertexAiResourceNoun]:
"""Resolves forward references to AiPlatform Class."""

def get_aiplatform_class_by_name(_annotation):
Expand Down Expand Up @@ -84,7 +84,7 @@ def resolve_annotation(annotation: Any) -> Any:

# if this is an Ai Platform resource noun
if inspect.isclass(annotation):
if issubclass(annotation, aiplatform.base.AiPlatformResourceNoun):
if issubclass(annotation, aiplatform.base.VertexAiResourceNoun):
return annotation

# handle forward references
Expand Down Expand Up @@ -129,7 +129,7 @@ def is_mb_sdk_resource_noun_type(mb_sdk_type: Any) -> bool:
True if this is a resource noun
"""
if inspect.isclass(mb_sdk_type):
return issubclass(mb_sdk_type, aiplatform.base.AiPlatformResourceNoun)
return issubclass(mb_sdk_type, aiplatform.base.VertexAiResourceNoun)
return False


Expand Down Expand Up @@ -163,7 +163,7 @@ def get_deserializer(annotation: Any) -> Optional[Callable[..., str]]:


def map_resource_to_metadata_type(
mb_sdk_type: aiplatform.base.AiPlatformResourceNoun
mb_sdk_type: aiplatform.base.VertexAiResourceNoun
) -> Tuple[str, str]:
"""Maps an MB SDK type to Metadata type.
Expand All @@ -187,7 +187,7 @@ def map_resource_to_metadata_type(
# handles the case of exported_dataset
# TODO generalize to all serializable outputs
if is_serializable_to_json(mb_sdk_type):
return "exported_dataset", "JsonArray"
return "exported_dataset", "Dataset"

# handles the case of imported datasets
if mb_sdk_type == '_Dataset':
Expand All @@ -197,7 +197,7 @@ def map_resource_to_metadata_type(
def should_be_metadata_type(mb_sdk_type: Any) -> bool:
"""Determines if type passed in should be a metadata type."""
if inspect.isclass(mb_sdk_type):
return issubclass(mb_sdk_type, aiplatform.base.AiPlatformResourceNoun)
return issubclass(mb_sdk_type, aiplatform.base.VertexAiResourceNoun)
return False


Expand All @@ -215,15 +215,15 @@ def is_resource_name_parameter_name(param_name: str) -> bool:
def filter_signature(
signature: inspect.Signature,
is_init_signature: bool = False,
self_type: Optional[aiplatform.base.AiPlatformResourceNoun] = None,
self_type: Optional[aiplatform.base.VertexAiResourceNoun] = None,
component_param_name_to_mb_sdk_param_name: Dict[str, str] = None
) -> inspect.Signature:
"""Removes unused params from signature.
Args:
signature (inspect.Signature): Model Builder SDK Method Signature.
is_init_signature (bool): is this constructor signature
self_type (aiplatform.base.AiPlatformResourceNoun): This is used to
self_type (aiplatform.base.VertexAiResourceNoun): This is used to
replace *_name str fields with resource name type.
component_param_name_to_mb_sdk_param_name dict[str, str]: Mapping to
keep track of param names changed to make them component
Expand Down Expand Up @@ -371,7 +371,7 @@ def generate_docstring(


def convert_method_to_component(
cls: aiplatform.base.AiPlatformResourceNoun, method: Callable
cls: aiplatform.base.VertexAiResourceNoun, method: Callable
) -> Callable:
"""Converts a MB SDK Method to a Component wrapper.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# limitations under the License.
"""Contains the version string of Google Cloud Pipeline Components."""

__version__ = "0.0.1.dev"
__version__ = "0.0.2.dev"
15 changes: 5 additions & 10 deletions components/google-cloud/tests/aiplatform/test_componets_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,7 @@ def pipeline():

compiler.Compiler().compile(
pipeline_func=pipeline,
pipeline_root=self._pipeline_root,
output_path="pipeline.json"
package_path="pipeline.json"
)

def test_tabular_data_pipeline_component_ops_compile(self):
Expand Down Expand Up @@ -159,8 +158,7 @@ def pipeline():

compiler.Compiler().compile(
pipeline_func=pipeline,
pipeline_root=self._pipeline_root,
output_path="pipeline.json"
package_path="pipeline.json"
)

def test_text_data_pipeline_component_ops_compile(self):
Expand Down Expand Up @@ -215,8 +213,7 @@ def pipeline():

compiler.Compiler().compile(
pipeline_func=pipeline,
pipeline_root=self._pipeline_root,
output_path="pipeline.json"
package_path="pipeline.json"
)

def test_video_data_pipeline_component_ops_compile(self):
Expand Down Expand Up @@ -270,8 +267,7 @@ def pipeline():

compiler.Compiler().compile(
pipeline_func=pipeline,
pipeline_root=self._pipeline_root,
output_path="pipeline.json"
package_path="pipeline.json"
)

def test_model_pipeline_component_ops_compile(self):
Expand Down Expand Up @@ -304,6 +300,5 @@ def pipeline():

compiler.Compiler().compile(
pipeline_func=pipeline,
pipeline_root=self._pipeline_root,
output_path="pipeline.json"
package_path="pipeline.json"
)
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def test_resolve_input_args_resource_noun_removes_gcs_prefix(self):
self.assertEqual(result, expected_result)

def test_resolve_input_args_resource_noun_not_changed(self):
type_to_resolve = aiplatform.base.AiPlatformResourceNoun
type_to_resolve = aiplatform.base.VertexAiResourceNoun
value = 'test_resource_name'
expected_result = 'test_resource_name'

Expand Down
2 changes: 1 addition & 1 deletion components/google-cloud/tests/aiplatform/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def test_map_resource_to_metadata_type_with_serializable_type(self):
mb_sdk_type
)
self.assertEqual(parameter_name, 'exported_dataset')
self.assertEqual(parameter_type, 'JsonArray')
self.assertEqual(parameter_type, 'Dataset')

def test_map_resource_to_metadata_type_with__Dataset_type(self):
mb_sdk_type = '_Dataset'
Expand Down

0 comments on commit a1ecb3d

Please sign in to comment.