Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add compile step in the samples to generate zip files #1866

Merged
merged 2 commits into from
Aug 16, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions samples/core/ai-platform/Chicago Crime Pipeline.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@
"metadata": {},
"outputs": [],
"source": [
"pipeline_filename = PIPELINE_FILENAME_PREFIX + '.pipeline.tar.gz'\n",
"pipeline_filename = PIPELINE_FILENAME_PREFIX + '.pipeline.zip'\n",
"\n",
"compiler.Compiler().compile(pipeline_func, pipeline_filename)"
]
Expand Down Expand Up @@ -318,8 +318,17 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
},
"pycharm": {
"stem_cell": {
"cell_type": "raw",
"source": [],
"metadata": {
"collapsed": false
}
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
}
4 changes: 4 additions & 0 deletions samples/core/artifact_location/artifact_location.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
from kfp import dsl
from kubernetes.client import V1SecretKeySelector

Expand Down Expand Up @@ -40,3 +41,6 @@ def custom_artifact_location(

# artifacts in this op are stored to endpoint `minio-service.<namespace>:9000`
op = dsl.ContainerOp(name="foo", image="busybox:%s" % tag)

if __name__ == '__main__':
kfp.compiler.Compiler().compile(custom_artifact_location, __file__ + '.zip')
1 change: 0 additions & 1 deletion samples/core/condition/condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.


import kfp
from kfp import dsl

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@
" a = task_factory_a(field_l=12)\n",
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"compiler.Compiler().compile(pipeline_a, 'pipeline_a.tar.gz', type_check=True)"
"compiler.Compiler().compile(pipeline_a, 'pipeline_a.zip', type_check=True)"
]
},
{
Expand Down Expand Up @@ -365,7 +365,7 @@
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"try:\n",
" compiler.Compiler().compile(pipeline_b, 'pipeline_b.tar.gz', type_check=True)\n",
" compiler.Compiler().compile(pipeline_b, 'pipeline_b.zip', type_check=True)\n",
"except InconsistentTypeException as e:\n",
" print(e)"
]
Expand All @@ -384,7 +384,7 @@
"outputs": [],
"source": [
"# Disable the type_check\n",
"compiler.Compiler().compile(pipeline_b, 'pipeline_b.tar.gz', type_check=False)"
"compiler.Compiler().compile(pipeline_b, 'pipeline_b.zip', type_check=False)"
]
},
{
Expand Down Expand Up @@ -474,7 +474,7 @@
" a = task_factory_a(field_l=12)\n",
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"compiler.Compiler().compile(pipeline_c, 'pipeline_c.tar.gz', type_check=True)"
"compiler.Compiler().compile(pipeline_c, 'pipeline_c.zip', type_check=True)"
]
},
{
Expand Down Expand Up @@ -572,7 +572,7 @@
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"try:\n",
" compiler.Compiler().compile(pipeline_d, 'pipeline_d.tar.gz', type_check=True)\n",
" compiler.Compiler().compile(pipeline_d, 'pipeline_d.zip', type_check=True)\n",
"except InconsistentTypeException as e:\n",
" print(e)"
]
Expand All @@ -597,7 +597,7 @@
" a = task_factory_a(field_l=12)\n",
" # For each of the arguments, authors can also ignore the types by calling ignore_type function.\n",
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'].ignore_type())\n",
"compiler.Compiler().compile(pipeline_d, 'pipeline_d.tar.gz', type_check=True)"
"compiler.Compiler().compile(pipeline_d, 'pipeline_d.zip', type_check=True)"
]
},
{
Expand Down Expand Up @@ -684,7 +684,7 @@
" a = task_factory_a(field_l=12)\n",
" b = task_factory_b(field_x=a.outputs['field_n'], field_y=a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"compiler.Compiler().compile(pipeline_e, 'pipeline_e.tar.gz', type_check=True)"
"compiler.Compiler().compile(pipeline_e, 'pipeline_e.zip', type_check=True)"
]
},
{
Expand All @@ -707,7 +707,7 @@
" a = task_factory_a(field_l=12)\n",
" b = task_factory_b(a.outputs['field_n'], a.outputs['field_o'], field_z=a.outputs['field_m'])\n",
"\n",
"compiler.Compiler().compile(pipeline_f, 'pipeline_f.tar.gz', type_check=True)"
"compiler.Compiler().compile(pipeline_f, 'pipeline_f.zip', type_check=True)"
]
},
{
Expand Down Expand Up @@ -750,7 +750,7 @@
" task_factory_a(field_m=a, field_o=b)\n",
"\n",
"try:\n",
" compiler.Compiler().compile(pipeline_g, 'pipeline_g.tar.gz', type_check=True)\n",
" compiler.Compiler().compile(pipeline_g, 'pipeline_g.zip', type_check=True)\n",
"except InconsistentTypeException as e:\n",
" print(e)"
]
Expand All @@ -769,7 +769,7 @@
"outputs": [],
"source": [
"from pathlib import Path\n",
"for p in Path(\".\").glob(\"pipeline_[a-g].tar.gz\"):\n",
"for p in Path(\".\").glob(\"pipeline_[a-g].zip\"):\n",
" p.unlink()"
]
}
Expand All @@ -792,8 +792,17 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.5"
},
"pycharm": {
"stem_cell": {
"cell_type": "raw",
"source": [],
"metadata": {
"collapsed": false
}
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}
}
4 changes: 4 additions & 0 deletions samples/core/imagepullsecrets/imagepullsecrets.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
container registry.
"""

import kfp
import kfp.dsl as dsl
from kubernetes import client as k8s_client

Expand Down Expand Up @@ -52,3 +53,6 @@ def save_most_frequent_word(message: str):
# Call set_image_pull_secrets after get_pipeline_conf().
dsl.get_pipeline_conf()\
.set_image_pull_secrets([k8s_client.V1ObjectReference(name="secretA")])

if __name__ == '__main__':
kfp.compiler.Compiler().compile(save_most_frequent_word, __file__ + '.zip')
2 changes: 1 addition & 1 deletion samples/core/recursion/recursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,4 +66,4 @@ def flipcoin():


if __name__ == '__main__':
kfp.compiler.Compiler().compile(flipcoin, __file__ + '.tar.gz')
kfp.compiler.Compiler().compile(flipcoin, __file__ + '.zip')
8 changes: 3 additions & 5 deletions samples/core/resource_ops/resourceop_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
It is not a good practice to put password as a pipeline argument, since it will
be visible on KFP UI.
"""

import kfp
from kubernetes import client as k8s_client
import kfp.dsl as dsl

Expand Down Expand Up @@ -54,7 +54,5 @@ def resourceop_basic(username, password):
pvolumes={"/etc/secret-volume": secret}
)


if __name__ == "__main__":
import kfp.compiler as compiler
compiler.Compiler().compile(resourceop_basic, __file__ + ".tar.gz")
if __name__ == '__main__':
kfp.compiler.Compiler().compile(resourceop_basic, __file__ + '.zip')
5 changes: 4 additions & 1 deletion samples/core/sidecar/sidecar.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import kfp
import kfp.dsl as dsl


@dsl.pipeline(
name="pipeline_with_sidecar",
description="A pipeline that demonstrates how to add a sidecar to an operation."
Expand Down Expand Up @@ -47,3 +47,6 @@ def pipeline_with_sidecar(sleep_ms: int = 10):
command=["sh", "-c"],
arguments=["echo %s" % op1.output], # print out content of op1 output
)

if __name__ == '__main__':
kfp.compiler.Compiler().compile(pipeline_with_sidecar, __file__ + '.zip')
8 changes: 3 additions & 5 deletions samples/core/volume_ops/volumeop.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.


import kfp
import kfp.dsl as dsl


Expand All @@ -36,7 +36,5 @@ def volumeop_basic(size):
pvolumes={"/mnt": vop.volume}
)


if __name__ == "__main__":
import kfp.compiler as compiler
compiler.Compiler().compile(volumeop_basic, __file__ + ".tar.gz")
if __name__ == '__main__':

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: Actually for top layer component two blank lines would be better

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure I understand, leaving two blank lines above main?

kfp.compiler.Compiler().compile(volumeop_basic, __file__ + '.zip')
9 changes: 3 additions & 6 deletions samples/core/volume_snapshot_ops/volume_snapshot_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.


import kfp
import kfp.dsl as dsl


Expand Down Expand Up @@ -80,8 +80,5 @@ def volume_snapshotop_sequential(url):
pvolumes={"/data": step3.pvolume}
)


if __name__ == "__main__":
import kfp.compiler as compiler
compiler.Compiler().compile(volume_snapshotop_sequential,
__file__ + ".tar.gz")
if __name__ == '__main__':
kfp.compiler.Compiler().compile(volume_snapshotop_sequential, __file__ + '.zip')
4 changes: 2 additions & 2 deletions samples/core/xgboost_training_cm/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ Preprocessing uses Google Cloud DataProc. Therefore, you must enable the [DataPr

## Compile

Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK and compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.tar.gz` file.
Follow the guide to [building a pipeline](https://www.kubeflow.org/docs/guides/pipelines/build-pipeline/) to install the Kubeflow Pipelines SDK and compile the sample Python into a workflow specification. The specification takes the form of a YAML file compressed into a `.zip` file.

## Deploy

Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.tar.gz` file) as a new pipeline template.
Open the Kubeflow pipelines UI. Create a new pipeline, and then upload the compiled specification (`.zip` file) as a new pipeline template.

## Run

Expand Down