Skip to content
This repository has been archived by the owner on Nov 15, 2021. It is now read-only.

Commit

Permalink
Merge with ray master
Browse files Browse the repository at this point in the history
  • Loading branch information
stefanpantic committed Jun 26, 2019
2 parents 2e0eec9 + d639737 commit 108bfa2
Show file tree
Hide file tree
Showing 103 changed files with 2,338 additions and 2,039 deletions.
96 changes: 47 additions & 49 deletions BUILD.bazel
Original file line number Diff line number Diff line change
@@ -1,22 +1,55 @@
# Bazel build
# C/C++ documentation: https://docs.bazel.build/versions/master/be/c-cpp.html

load("@com_github_grpc_grpc//bazel:grpc_build_system.bzl", "grpc_proto_library")
load("@com_github_grpc_grpc//bazel:cc_grpc_library.bzl", "cc_grpc_library")
load("@build_stack_rules_proto//python:python_proto_compile.bzl", "python_proto_compile")
load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
load("@//bazel:ray.bzl", "flatbuffer_py_library")
load("@//bazel:cython_library.bzl", "pyx_library")

COPTS = ["-DRAY_USE_GLOG"]

# Node manager gRPC lib.
grpc_proto_library(
name = "node_manager_grpc_lib",
# === Begin of protobuf definitions ===

proto_library(
name = "gcs_proto",
srcs = ["src/ray/protobuf/gcs.proto"],
visibility = ["//java:__subpackages__"],
)

cc_proto_library(
name = "gcs_cc_proto",
deps = [":gcs_proto"],
)

python_proto_compile(
name = "gcs_py_proto",
deps = [":gcs_proto"],
)

proto_library(
name = "node_manager_proto",
srcs = ["src/ray/protobuf/node_manager.proto"],
)

cc_proto_library(
name = "node_manager_cc_proto",
deps = ["node_manager_proto"],
)

# === End of protobuf definitions ===

# Node manager gRPC lib.
cc_grpc_library(
name = "node_manager_cc_grpc",
srcs = [":node_manager_proto"],
grpc_only = True,
deps = [":node_manager_cc_proto"],
)

# Node manager server and client.
cc_library(
name = "node_manager_rpc_lib",
name = "node_manager_rpc",
srcs = glob([
"src/ray/rpc/*.cc",
]),
Expand All @@ -25,7 +58,7 @@ cc_library(
]),
copts = COPTS,
deps = [
":node_manager_grpc_lib",
":node_manager_cc_grpc",
":ray_common",
"@boost//:asio",
"@com_github_grpc_grpc//:grpc++",
Expand Down Expand Up @@ -114,7 +147,7 @@ cc_library(
":gcs",
":gcs_fbs",
":node_manager_fbs",
":node_manager_rpc_lib",
":node_manager_rpc",
":object_manager",
":ray_common",
":ray_util",
Expand Down Expand Up @@ -422,9 +455,11 @@ cc_library(
"src/ray/gcs/format",
],
deps = [
":gcs_cc_proto",
":gcs_fbs",
":hiredis",
":node_manager_fbs",
":node_manager_rpc",
":ray_common",
":ray_util",
":stats_lib",
Expand Down Expand Up @@ -555,46 +590,6 @@ filegroup(
visibility = ["//java:__subpackages__"],
)

flatbuffer_py_library(
name = "python_gcs_fbs",
srcs = [
":gcs_fbs_file",
],
outs = [
"ActorCheckpointIdData.py",
"ActorState.py",
"ActorTableData.py",
"Arg.py",
"ClassTableData.py",
"ClientTableData.py",
"ConfigTableData.py",
"CustomSerializerData.py",
"DriverTableData.py",
"EntryType.py",
"ErrorTableData.py",
"ErrorType.py",
"FunctionTableData.py",
"GcsEntry.py",
"HeartbeatBatchTableData.py",
"HeartbeatTableData.py",
"Language.py",
"ObjectTableData.py",
"ProfileEvent.py",
"ProfileTableData.py",
"RayResource.py",
"ResourcePair.py",
"SchedulingState.py",
"TablePrefix.py",
"TablePubsub.py",
"TaskInfo.py",
"TaskLeaseData.py",
"TaskReconstructionData.py",
"TaskTableData.py",
"TaskTableTestAndUpdate.py",
],
out_prefix = "python/ray/core/generated/",
)

flatbuffer_py_library(
name = "python_node_manager_fbs",
srcs = [
Expand Down Expand Up @@ -679,6 +674,7 @@ cc_binary(
linkstatic = 1,
visibility = ["//java:__subpackages__"],
deps = [
":gcs_cc_proto",
":ray_common",
],
)
Expand All @@ -688,7 +684,7 @@ genrule(
srcs = [
"python/ray/_raylet.so",
"//:python_sources",
"//:python_gcs_fbs",
"//:gcs_py_proto",
"//:python_node_manager_fbs",
"//:redis-server",
"//:redis-cli",
Expand All @@ -710,11 +706,13 @@ genrule(
cp -f $(location //:raylet_monitor) $$WORK_DIR/python/ray/core/src/ray/raylet/ &&
cp -f $(location @plasma//:plasma_store_server) $$WORK_DIR/python/ray/core/src/plasma/ &&
cp -f $(location //:raylet) $$WORK_DIR/python/ray/core/src/ray/raylet/ &&
for f in $(locations //:python_gcs_fbs); do cp -f $$f $$WORK_DIR/python/ray/core/generated/; done &&
mkdir -p $$WORK_DIR/python/ray/core/generated/ray/protocol/ &&
for f in $(locations //:python_node_manager_fbs); do
cp -f $$f $$WORK_DIR/python/ray/core/generated/ray/protocol/;
done &&
for f in $(locations //:gcs_py_proto); do
cp -f $$f $$WORK_DIR/python/ray/core/generated/;
done &&
echo $$WORK_DIR > $@
""",
local = 1,
Expand Down
4 changes: 4 additions & 0 deletions bazel/ray_deps_build_all.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ load("@com_github_jupp0r_prometheus_cpp//:repositories.bzl", "prometheus_cpp_rep
load("@com_github_ray_project_ray//bazel:python_configure.bzl", "python_configure")
load("@com_github_checkstyle_java//:repo.bzl", "checkstyle_deps")
load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps")
load("@build_stack_rules_proto//java:deps.bzl", "java_proto_compile")
load("@build_stack_rules_proto//python:deps.bzl", "python_proto_compile")


def ray_deps_build_all():
Expand All @@ -13,4 +15,6 @@ def ray_deps_build_all():
prometheus_cpp_repositories()
python_configure(name = "local_config_python")
grpc_deps()
java_proto_compile()
python_proto_compile()

11 changes: 9 additions & 2 deletions bazel/ray_deps_setup.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,14 @@ def ray_deps_setup():
http_archive(
name = "com_github_grpc_grpc",
urls = [
"https://github.com/grpc/grpc/archive/7741e806a213cba63c96234f16d712a8aa101a49.tar.gz",
"https://github.com/grpc/grpc/archive/76a381869413834692b8ed305fbe923c0f9c4472.tar.gz",
],
strip_prefix = "grpc-7741e806a213cba63c96234f16d712a8aa101a49",
strip_prefix = "grpc-76a381869413834692b8ed305fbe923c0f9c4472",
)

http_archive(
name = "build_stack_rules_proto",
urls = ["https://github.com/stackb/rules_proto/archive/b93b544f851fdcd3fc5c3d47aee3b7ca158a8841.tar.gz"],
sha256 = "c62f0b442e82a6152fcd5b1c0b7c4028233a9e314078952b6b04253421d56d61",
strip_prefix = "rules_proto-b93b544f851fdcd3fc5c3d47aee3b7ca158a8841",
)
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pushd "$ROOT_DIR"

python -m pip install pytest-benchmark

pip install -U https://s3-us-west-2.amazonaws.com/ray-wheels/latest/ray-0.8.0.dev1-cp27-cp27mu-manylinux1_x86_64.whl
pip install -U https://ray-wheels.s3-us-west-2.amazonaws.com/latest/ray-0.8.0.dev1-cp36-cp36m-manylinux1_x86_64.whl
python -m pytest --benchmark-autosave --benchmark-min-rounds=10 --benchmark-columns="min, max, mean" $ROOT_DIR/../../../python/ray/tests/perf_integration_tests/test_perf_integration.py

pushd $ROOT_DIR/../../../python
Expand Down
8 changes: 4 additions & 4 deletions ci/jenkins_tests/run_tune_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -78,16 +78,16 @@ $SUPPRESS_OUTPUT docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE}
--smoke-test

# Runs only on Python3
# docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
# python3 /ray/python/ray/tune/examples/nevergrad_example.py \
# --smoke-test
$SUPPRESS_OUTPUT docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/nevergrad_example.py \
--smoke-test

$SUPPRESS_OUTPUT docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/tune_mnist_keras.py \
--smoke-test

$SUPPRESS_OUTPUT docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/mnist_pytorch.py --smoke-test --no-cuda
python /ray/python/ray/tune/examples/mnist_pytorch.py --smoke-test

$SUPPRESS_OUTPUT docker run --rm --shm-size=${SHM_SIZE} --memory=${MEMORY_SIZE} $DOCKER_SHA \
python /ray/python/ray/tune/examples/mnist_pytorch_trainable.py \
Expand Down
15 changes: 1 addition & 14 deletions doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,7 @@
"gym.spaces",
"ray._raylet",
"ray.core.generated",
"ray.core.generated.ActorCheckpointIdData",
"ray.core.generated.ClientTableData",
"ray.core.generated.DriverTableData",
"ray.core.generated.EntryType",
"ray.core.generated.ErrorTableData",
"ray.core.generated.ErrorType",
"ray.core.generated.GcsEntry",
"ray.core.generated.HeartbeatBatchTableData",
"ray.core.generated.HeartbeatTableData",
"ray.core.generated.Language",
"ray.core.generated.ObjectTableData",
"ray.core.generated.ProfileTableData",
"ray.core.generated.TablePrefix",
"ray.core.generated.TablePubsub",
"ray.core.generated.gcs_pb2",
"ray.core.generated.ray.protocol.Task",
"scipy",
"scipy.signal",
Expand Down
6 changes: 6 additions & 0 deletions doc/source/tune-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -355,6 +355,12 @@ Then, after you run a experiment, you can visualize your experiment with TensorB
$ tensorboard --logdir=~/ray_results/my_experiment
If you are running Ray on a remote multi-user cluster where you do not have sudo access, you can run the following commands to make sure tensorboard is able to write to the tmp directory:

.. code-block:: bash
$ export TMPDIR=/tmp/$USER; mkdir -p $TMPDIR; tensorboard --logdir=~/ray_results
.. image:: ray-tune-tensorboard.png

To use rllab's VisKit (you may have to install some dependencies), run:
Expand Down
2 changes: 1 addition & 1 deletion docker/base-deps/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ RUN apt-get update \
&& apt-get clean \
&& echo 'export PATH=/opt/conda/bin:$PATH' > /etc/profile.d/conda.sh \
&& wget \
--quiet 'https://repo.continuum.io/archive/Anaconda2-5.2.0-Linux-x86_64.sh' \
--quiet 'https://repo.continuum.io/archive/Anaconda3-5.2.0-Linux-x86_64.sh' \
-O /tmp/anaconda.sh \
&& /bin/bash /tmp/anaconda.sh -b -p /opt/conda \
&& rm /tmp/anaconda.sh \
Expand Down
5 changes: 4 additions & 1 deletion docker/examples/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,14 @@ FROM ray-project/deploy
# This updates numpy to 1.14 and mutes errors from other libraries
RUN conda install -y numpy
RUN apt-get install -y zlib1g-dev
# The following is needed to support TensorFlow 1.14
RUN conda remove -y --force wrapt
RUN pip install gym[atari] opencv-python-headless tensorflow lz4 keras pytest-timeout smart_open
RUN pip install -U h5py # Mutes FutureWarnings
RUN pip install --upgrade bayesian-optimization
RUN pip install --upgrade git+git://github.com/hyperopt/hyperopt.git
RUN pip install --upgrade sigopt
# RUN pip install --upgrade nevergrad
RUN pip install --upgrade nevergrad
RUN pip install --upgrade scikit-optimize
RUN pip install -U pytest-remotedata>=0.3.1
RUN conda install pytorch-cpu torchvision-cpu -c pytorch
2 changes: 1 addition & 1 deletion docker/stress_test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ FROM ray-project/base-deps

# We install ray and boto3 to enable the ray autoscaler as
# a test runner.
RUN pip install -U https://s3-us-west-2.amazonaws.com/ray-wheels/latest/ray-0.8.0.dev1-cp27-cp27mu-manylinux1_x86_64.whl boto3
RUN pip install -U https://ray-wheels.s3-us-west-2.amazonaws.com/latest/ray-0.8.0.dev1-cp36-cp36m-manylinux1_x86_64.whl boto3
RUN mkdir -p /root/.ssh/

# We port the source code in so that we run the most up-to-date stress tests.
Expand Down
11 changes: 8 additions & 3 deletions docker/tune_test/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,22 +4,27 @@ FROM ray-project/base-deps

# We install ray and boto3 to enable the ray autoscaler as
# a test runner.
RUN pip install -U https://s3-us-west-2.amazonaws.com/ray-wheels/latest/ray-0.8.0.dev1-cp27-cp27mu-manylinux1_x86_64.whl boto3
RUN conda install -y -c anaconda wrapt=1.11.1
RUN conda install -y -c anaconda numpy=1.16.4
RUN pip install -U https://ray-wheels.s3-us-west-2.amazonaws.com/latest/ray-0.8.0.dev1-cp36-cp36m-manylinux1_x86_64.whl boto3
# We install this after the latest wheels -- this should not override the latest wheels.
RUN apt-get install -y zlib1g-dev
# The following is needed to support TensorFlow 1.14
RUN conda remove -y --force wrapt
RUN pip install gym[atari]==0.10.11 opencv-python-headless tensorflow lz4 keras pytest-timeout smart_open
RUN pip install --upgrade bayesian-optimization
RUN pip install --upgrade git+git://github.com/hyperopt/hyperopt.git
RUN pip install --upgrade sigopt
# RUN pip install --upgrade nevergrad
RUN pip install --upgrade nevergrad
RUN pip install --upgrade scikit-optimize
RUN pip install -U pytest-remotedata>=0.3.1
RUN conda install pytorch-cpu torchvision-cpu -c pytorch

# RUN mkdir -p /root/.ssh/

# We port the source code in so that we run the most up-to-date stress tests.
ADD ray.tar /ray
ADD git-rev /ray/git-rev
RUN python /ray/python/ray/rllib/setup-rllib-dev.py --yes
RUN python /ray/python/ray/setup-dev.py --yes

WORKDIR /ray
Loading

0 comments on commit 108bfa2

Please sign in to comment.