Skip to content

Commit

Permalink
Add gcp secret parameter to container op (#261)
Browse files Browse the repository at this point in the history
* add secret

* add secret to contianer op

* update comments

* address comments

* update logic

* fix
  • Loading branch information
IronPan authored and k8s-ci-robot committed Nov 15, 2018
1 parent d606b39 commit 7e34b12
Show file tree
Hide file tree
Showing 5 changed files with 223 additions and 3 deletions.
31 changes: 31 additions & 0 deletions sdk/python/kfp/compiler/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,19 @@ def _op_to_template(self, op):
if op.cpu_request:
template['container']['resources']['requests']['cpu'] = op.cpu_request

if op.gcp_secret:
template['container']['env'] = [
{
'name': 'GOOGLE_APPLICATION_CREDENTIALS',
'value': ('/secret/gcp-credentials/%s.json' % op.gcp_secret),
},
]
template['container']['volumeMounts'] = [
{
'name': op.name + '-gcp-credentials',
'mountPath': '/secret/gcp-credentials',
},
]
return template

def _get_groups_for_ops(self, root_group):
Expand Down Expand Up @@ -424,6 +437,21 @@ def _create_templates(self, pipeline):
templates.append(self._op_to_template(op))
return templates

def _create_volumes(self, pipeline):
"""Create volumes required for the templates"""
volumes = []
for op in pipeline.ops.values():
if op.gcp_secret:
volume = {
'name': op.name + '-gcp-credentials',
'secret': {
'secretName': op.gcp_secret,
}
}
volumes.append(volume)
volumes.sort(key=lambda x: x['name'])
return volumes

def _create_pipeline_workflow(self, args, pipeline):
"""Create workflow for the pipeline."""

Expand All @@ -443,6 +471,7 @@ def _create_pipeline_workflow(self, args, pipeline):
if first_group.type == 'exit_handler':
exit_handler = first_group.exit_op

volumes = self._create_volumes(pipeline)
workflow = {
'apiVersion': 'argoproj.io/v1alpha1',
'kind': 'Workflow',
Expand All @@ -456,6 +485,8 @@ def _create_pipeline_workflow(self, args, pipeline):
}
if exit_handler:
workflow['spec']['onExit'] = exit_handler.name
if volumes:
workflow['spec']['volumes'] = volumes
return workflow

def _validate_args(self, argspec):
Expand Down
7 changes: 5 additions & 2 deletions sdk/python/kfp/dsl/_container_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class ContainerOp(object):

def __init__(self, name: str, image: str, command: str=None, arguments: str=None,
file_inputs : Dict[_pipeline_param.PipelineParam, str]=None,
file_outputs : Dict[str, str]=None, is_exit_handler=False):
file_outputs : Dict[str, str]=None, gcp_secret: str=None, is_exit_handler=False):
"""Create a new instance of ContainerOp.
Args:
Expand All @@ -41,6 +41,8 @@ def __init__(self, name: str, image: str, command: str=None, arguments: str=None
file_outputs: Maps output labels to local file paths. At pipeline run time,
the value of a PipelineParam is saved to its corresponding local file. It's
one way for outside world to receive outputs of the container.
gcp_secret: Specifying what secret to mount to the container for accessing
GCP APIs.
is_exit_handler: Whether it is used as an exit handler.
"""

Expand All @@ -52,6 +54,7 @@ def __init__(self, name: str, image: str, command: str=None, arguments: str=None
self.image = image
self.command = command
self.arguments = arguments
self.gcp_secret = gcp_secret
self.is_exit_handler = is_exit_handler
self.memory_limit = None
self.memory_request = None
Expand Down Expand Up @@ -150,6 +153,6 @@ def set_cpu_limit(self, cpu):

self._validate_cpu_string(cpu)
self.cpu_limit = cpu

def __repr__(self):
return str({self.__class__.__name__: self.__dict__})
18 changes: 17 additions & 1 deletion sdk/python/tests/compiler/compiler_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,26 @@ def test_operator_to_template(self):
msg2 = dsl.PipelineParam('msg2', value='value2')
op = dsl.ContainerOp(name='echo', image='image', command=['sh', '-c'],
arguments=['echo %s %s | tee /tmp/message.txt' % (msg1, msg2)],
file_outputs={'merged': '/tmp/message.txt'})
file_outputs={'merged': '/tmp/message.txt'}, gcp_secret='user-gcp-sa')
golden_output = {
'container': {
'image': 'image',
'args': [
'echo {{inputs.parameters.msg1}} {{inputs.parameters.msg2}} | tee /tmp/message.txt'
],
'command': ['sh', '-c'],
'env': [
{
'name': 'GOOGLE_APPLICATION_CREDENTIALS',
'value': '/secret/gcp-credentials/user-gcp-sa.json'
}
],
'volumeMounts':[
{
'mountPath': '/secret/gcp-credentials',
'name': 'echo-gcp-credentials'
}
]
},
'inputs': {'parameters':
[
Expand Down Expand Up @@ -226,3 +238,7 @@ def test_py_compile_default_value(self):
"""Test a pipeline with a parameter with default value."""
self._test_py_compile('default_value')

def test_py_secret(self):
"""Test a pipeline with a GCP secret."""
self._test_py_compile('secret')

36 changes: 36 additions & 0 deletions sdk/python/tests/compiler/testdata/secret.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


import kfp.dsl as dsl


@dsl.pipeline(
name='GCP Secret',
description='A pipeline with gcp secret.'
)
def gcp_secret_pipeline():
op1 = dsl.ContainerOp(
name='download',
image='google/cloud-sdk',
command=['sh', '-c'],
arguments=['ls | tee /tmp/results.txt'],
gcp_secret='user-gcp-sa',
file_outputs={'downloaded': '/tmp/results.txt'})
op2 = dsl.ContainerOp(
name='echo',
image='library/bash',
command=['sh', '-c'],
arguments=['echo %s' % op1.output],
gcp_secret='admin-gcp-sa')
134 changes: 134 additions & 0 deletions sdk/python/tests/compiler/testdata/secret.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
generateName: gcp-secret-
spec:
arguments:
parameters: []
entrypoint: gcp-secret
serviceAccountName: pipeline-runner
templates:
- container:
args:
- ls | tee /tmp/results.txt
command:
- sh
- -c
env:
- name: GOOGLE_APPLICATION_CREDENTIALS
value: /secret/gcp-credentials/user-gcp-sa.json
image: google/cloud-sdk
volumeMounts:
- mountPath: /secret/gcp-credentials
name: download-gcp-credentials
name: download
outputs:
artifacts:
- name: mlpipeline-ui-metadata
path: /mlpipeline-ui-metadata.json
s3:
accessKeySecret:
key: accesskey
name: mlpipeline-minio-artifact
bucket: mlpipeline
endpoint: minio-service.kubeflow:9000
insecure: true
key: runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz
secretKeySecret:
key: secretkey
name: mlpipeline-minio-artifact
- name: mlpipeline-metrics
path: /mlpipeline-metrics.json
s3:
accessKeySecret:
key: accesskey
name: mlpipeline-minio-artifact
bucket: mlpipeline
endpoint: minio-service.kubeflow:9000
insecure: true
key: runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-metrics.tgz
secretKeySecret:
key: secretkey
name: mlpipeline-minio-artifact
parameters:
- name: download-downloaded
valueFrom:
path: /tmp/results.txt
- container:
args:
- echo {{inputs.parameters.download-downloaded}}
command:
- sh
- -c
env:
- name: GOOGLE_APPLICATION_CREDENTIALS
value: /secret/gcp-credentials/admin-gcp-sa.json
image: library/bash
volumeMounts:
- mountPath: /secret/gcp-credentials
name: echo-gcp-credentials
inputs:
parameters:
- name: download-downloaded
name: echo
outputs:
artifacts:
- name: mlpipeline-ui-metadata
path: /mlpipeline-ui-metadata.json
s3:
accessKeySecret:
key: accesskey
name: mlpipeline-minio-artifact
bucket: mlpipeline
endpoint: minio-service.kubeflow:9000
insecure: true
key: runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-ui-metadata.tgz
secretKeySecret:
key: secretkey
name: mlpipeline-minio-artifact
- name: mlpipeline-metrics
path: /mlpipeline-metrics.json
s3:
accessKeySecret:
key: accesskey
name: mlpipeline-minio-artifact
bucket: mlpipeline
endpoint: minio-service.kubeflow:9000
insecure: true
key: runs/{{workflow.uid}}/{{pod.name}}/mlpipeline-metrics.tgz
secretKeySecret:
key: secretkey
name: mlpipeline-minio-artifact
- dag:
tasks:
- name: download
template: download
- arguments:
parameters:
- name: download-downloaded
value: '{{tasks.download.outputs.parameters.download-downloaded}}'
dependencies:
- download
name: echo
template: echo
name: gcp-secret
volumes:
- name: download-gcp-credentials
secret:
secretName: user-gcp-sa
- name: echo-gcp-credentials
secret:
secretName: admin-gcp-sa

0 comments on commit 7e34b12

Please sign in to comment.