Skip to content

Commit

Permalink
Change how OSS Keras import python deps.
Browse files Browse the repository at this point in the history
1. Remove tf as its workspace deps.
2. All the python code will be retrieved via pip install, rather than pip rule. This is possible since we remove the tf from workspace, which has high priority than the PIP package from local venv.
3. Install all the required python deps via "pip install -r requirements.txt".

This setting should enable us to have "pip uninstall keras-nightly" once we add it to tf-nightly as a deps. Since all the deps are controlled in local venv within the build script.

PiperOrigin-RevId: 364604880
  • Loading branch information
qlzh727 authored and tensorflower-gardener committed Mar 23, 2021
1 parent 2d2e738 commit 9372aa7
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 83 deletions.
50 changes: 12 additions & 38 deletions BUILD
Original file line number Diff line number Diff line change
@@ -1,14 +1,10 @@
load("@keras_deps//:requirements.bzl", "requirement")

py_library(
name = "expect_absl_installed",
# This is a dummy rule used as a absl dependency in open-source.
# We expect absl to already be installed on the system, e.g. via
# `pip install absl`
visibility = ["//visibility:public"],
deps = [
requirement("absl-py"),
],
deps = [],
)

py_library(
Expand All @@ -17,9 +13,7 @@ py_library(
# We expect h5py to already be installed on the system, e.g. via
# `pip install h5py'
visibility = ["//visibility:public"],
deps = [
requirement("h5py"),
],
deps = [],
)

py_library(
Expand All @@ -28,9 +22,7 @@ py_library(
# We expect keras_preprocessing to already be installed on the system, e.g. via
# `pip install keras_preprocessing`
visibility = ["//visibility:public"],
deps = [
requirement("keras_preprocessing"),
],
deps = [],
)

py_library(
Expand All @@ -39,9 +31,7 @@ py_library(
# We expect numpy to already be installed on the system, e.g. via
# `pip install numpy`
visibility = ["//visibility:public"],
deps = [
requirement("numpy"),
],
deps = [],
)

py_library(
Expand All @@ -50,9 +40,7 @@ py_library(
# We expect pandas to already be installed on the system, e.g. via
# `pip install pandas'
visibility = ["//visibility:public"],
deps = [
requirement("pandas"),
],
deps = [],
)

py_library(
Expand All @@ -61,9 +49,7 @@ py_library(
# We expect pillow to already be installed on the system, e.g. via
# `pip install Pillow'
visibility = ["//visibility:public"],
deps = [
requirement("Pillow"),
],
deps = [],
)

# Note that this dependency is for testing only.
Expand All @@ -73,9 +59,7 @@ py_library(
# We expect portpicker to already be installed on the system, e.g. via
# `pip install portpicker'
visibility = ["//visibility:public"],
deps = [
requirement("portpicker"),
],
deps = [],
)

py_library(
Expand All @@ -84,9 +68,7 @@ py_library(
# We expect pydot to already be installed on the system, e.g. via
# `pip install pydot'
visibility = ["//visibility:public"],
deps = [
requirement("pydot"),
],
deps = [],
)

py_library(
Expand All @@ -95,9 +77,7 @@ py_library(
# We expect scipy to already be installed on the system, e.g. via
# `pip install scipy'
visibility = ["//visibility:public"],
deps = [
requirement("scipy"),
],
deps = [],
)

py_library(
Expand All @@ -106,9 +86,7 @@ py_library(
# We expect six to already be installed on the system, e.g. via
# `pip install six`
visibility = ["//visibility:public"],
deps = [
requirement("six"),
],
deps = [],
)

py_library(
Expand All @@ -126,9 +104,7 @@ py_library(
# We expect tensorflow to already be installed on the system, e.g. via
# `pip install tensorflow`
visibility = ["//visibility:public"],
deps = [
requirement("tf-nightly"),
],
deps = [],
)

py_library(
Expand All @@ -137,7 +113,5 @@ py_library(
# We expect yaml to already be installed on the system, e.g. via
# `pip install yaml`
visibility = ["//visibility:public"],
deps = [
requirement("pyyaml"),
],
deps = [],
)
39 changes: 0 additions & 39 deletions WORKSPACE
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,6 @@ workspace(name = "org_keras")

load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")

# The rules_python is used to install pip package for tensorflow, that will be
# used as python dependency.
http_archive(
name = "rules_python",
url = "https://github.com/bazelbuild/rules_python/releases/download/0.1.0/rules_python-0.1.0.tar.gz",
sha256 = "b6d46438523a3ec0f3cead544190ee13223a52f6a6765a29eae7b7cc24cc83a0",
)

http_archive(
name = "io_bazel_rules_closure",
sha256 = "5b00383d08dd71f28503736db0500b6fb4dda47489ff5fc6bed42557c07c6ba9",
strip_prefix = "rules_closure-308b05b2419edb5c8ee0471b67a40403df940149",
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz",
"https://github.com/bazelbuild/rules_closure/archive/308b05b2419edb5c8ee0471b67a40403df940149.tar.gz", # 2019-06-13
],
)

http_archive(
name = "com_google_protobuf",
sha256 = "b10bf4e2d1a7586f54e64a5d9e7837e5188fc75ae69e36f215eb01def4f9721b",
Expand All @@ -28,24 +10,3 @@ http_archive(
)
load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps")
protobuf_deps()

# TF package is used as dependency for protobuf and bzl files. It is NOT used
# for python dependency.
http_archive(
name = "org_tensorflow",
strip_prefix = "tensorflow-2.3.0",
sha256 = "1a6f24d9e3b1cf5cc55ecfe076d3a61516701bc045925915b26a9d39f4084c34",
urls = [
"https://github.com/tensorflow/tensorflow/archive/v2.3.0.zip"
],
)

load("@org_tensorflow//tensorflow:workspace.bzl", "tf_workspace")
tf_workspace(tf_repo_name="@org_tensorflow")

load("@rules_python//python:pip.bzl", "pip_install", "pip_repositories")
pip_repositories()
pip_install(
name = "keras_deps",
requirements = "//:requirements.txt",
)
10 changes: 7 additions & 3 deletions keras/kokoro/github/ubuntu/cpu/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,16 @@ source venv/bin/activate
python --version
python3 --version

# numpy is needed by tensorflow as setup dependency.
pip install -U pip setuptools numpy

cd "src/github/keras"

# Keep pip version at 20.1.1 to avoid the slow resolver issue.
pip install -U pip==20.1.1 setuptools
pip install -r requirements.txt

# TODO(scottzhu): Using --define=use_fast_cpp_protos=false to suppress the
# protobuf build issue for now. We should have a proper solution for this.
bazel test --test_timeout 300,450,1200,3600 --test_output=errors --keep_going \
--define=use_fast_cpp_protos=false \
--build_tests_only \
--build_tag_filters="-no_oss" \
--test_tag_filters="-no_oss" \
Expand Down
10 changes: 7 additions & 3 deletions keras/kokoro/github/ubuntu/gpu/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,12 @@ source venv/bin/activate
python --version
python3 --version

# numpy is needed by tensorflow as setup dependency.
pip install -U pip setuptools numpy

cd "src/github/keras"

# Keep pip version at 20.1.1 to avoid the slow resolver issue.
pip install -U pip==20.1.1 setuptools
pip install -r requirements.txt

export LD_LIBRARY_PATH="/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64"
export TF_CUDA_COMPUTE_CAPABILITIES=6.0
TF_CUDA_CONFIG_REPO="@ubuntu16.04-py3-gcc7_manylinux2010-cuda10.1-cudnn7-tensorrt6.0_config_cuda"
Expand All @@ -45,7 +46,10 @@ TF_GPU_COUNT=4
TF_TESTS_PER_GPU=8
LOCAL_TEST_JOBS=32 # TF_GPU_COUNT * TF_TESTS_PER_GPU

# TODO(scottzhu): Using --define=use_fast_cpp_protos=false to suppress the
# protobuf build issue for now. We should have a proper solution for this.
bazel test --test_timeout 300,600,1200,3600 --test_output=errors --keep_going \
--define=use_fast_cpp_protos=false \
--build_tests_only \
--action_env=TF_CUDA_COMPUTE_CAPABILITIES="${TF_CUDA_COMPUTE_CAPABILITIES}" \
--action_env=TF_CUDA_CONFIG_REPO="${TF_CUDA_CONFIG_REPO}" \
Expand Down

0 comments on commit 9372aa7

Please sign in to comment.