diff --git a/dockerfiles/Dockerfile.cuda b/dockerfiles/Dockerfile.cuda
index 0358629b28e8f..1813e3fb7c3be 100644
--- a/dockerfiles/Dockerfile.cuda
+++ b/dockerfiles/Dockerfile.cuda
@@ -4,8 +4,8 @@
# --------------------------------------------------------------
# Dockerfile to run ONNXRuntime with CUDA, CUDNN integration
-# nVidia cuda 10.0 Base Image
-FROM nvidia/cuda:10.0-cudnn7-devel
+# nVidia cuda 10.1 Base Image
+FROM nvidia/cuda:10.1-cudnn7-devel
MAINTAINER Vinitra Swamy "viswamy@microsoft.com"
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
@@ -17,10 +17,9 @@ RUN apt-get update &&\
WORKDIR /code
ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:/code/cmake-3.14.3-Linux-x86_64/bin:/opt/miniconda/bin:${PATH}
-# Prepare onnxruntime repository & build onnxruntime with TensorRT
-ADD scripts /tmp/scripts
-RUN /bin/sh /tmp/scripts/install_common_deps.sh && \
- git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+# Prepare onnxruntime repository & build onnxruntime with CUDA
+RUN git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+ /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh &&\
cp onnxruntime/ThirdPartyNotices.txt /code/ThirdPartyNotices.txt &&\
cp onnxruntime/dockerfiles/LICENSE-IMAGE.txt /code/LICENSE-IMAGE.txt &&\
cd onnxruntime &&\
diff --git a/dockerfiles/Dockerfile.openvino b/dockerfiles/Dockerfile.openvino
index 3574725654b4c..86ce2a25d5502 100644
--- a/dockerfiles/Dockerfile.openvino
+++ b/dockerfiles/Dockerfile.openvino
@@ -56,9 +56,8 @@ ENV LANG en_US.UTF-8
WORKDIR /code
ENV PATH /opt/miniconda/bin:/code/cmake-3.14.3-Linux-x86_64/bin:$PATH
-ADD scripts /tmp/scripts
-RUN /bin/sh /tmp/scripts/install_common_deps.sh && \
- git clone --recursive -b $ONNXRUNTIME_BRANCH $ONNXRUNTIME_REPO /onnxruntime && \
+RUN git clone --recursive -b $ONNXRUNTIME_BRANCH $ONNXRUNTIME_REPO /onnxruntime && \
+ /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh && \
cd /onnxruntime/cmake/external/onnx && python3 setup.py install && \
cp /onnxruntime/dockerfiles/LICENSE-IMAGE.txt /code/LICENSE-IMAGE.txt && \
cp /onnxruntime/ThirdPartyNotices.txt /code/ThirdPartyNotices.txt && \
diff --git a/dockerfiles/Dockerfile.source b/dockerfiles/Dockerfile.source
index a0880ee68d84e..7da71b1a204e5 100644
--- a/dockerfiles/Dockerfile.source
+++ b/dockerfiles/Dockerfile.source
@@ -17,10 +17,9 @@ RUN apt-get update &&\
WORKDIR /code
ENV PATH /opt/miniconda/bin:/code/cmake-3.14.3-Linux-x86_64/bin:${PATH}
-ADD scripts /tmp/scripts
# Prepare onnxruntime repository & build onnxruntime
-RUN /bin/sh /tmp/scripts/install_common_deps.sh &&\
- git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+RUN git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+ /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh &&\
cd onnxruntime &&\
/bin/sh ./build.sh --config Release --build_wheel --update --build --cmake_extra_defines ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) &&\
pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\
diff --git a/dockerfiles/Dockerfile.tensorrt b/dockerfiles/Dockerfile.tensorrt
index 6ffd3b19f3f1a..831d66baf2ecb 100644
--- a/dockerfiles/Dockerfile.tensorrt
+++ b/dockerfiles/Dockerfile.tensorrt
@@ -5,7 +5,7 @@
# Dockerfile to run ONNXRuntime with TensorRT integration
# nVidia TensorRT Base Image
-FROM nvcr.io/nvidia/tensorrt:19.02-py3
+FROM nvcr.io/nvidia/tensorrt:19.06-py3
MAINTAINER Vinitra Swamy "viswamy@microsoft.com"
ARG ONNXRUNTIME_REPO=https://github.com/Microsoft/onnxruntime
@@ -17,10 +17,9 @@ RUN apt-get update &&\
WORKDIR /code
ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:/code/cmake-3.14.3-Linux-x86_64/bin:/opt/miniconda/bin:${PATH}
-ADD scripts /tmp/scripts
# Prepare onnxruntime repository & build onnxruntime with TensorRT
-RUN /bin/sh /tmp/scripts/install_common_deps.sh && \
- git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+RUN git clone --single-branch --branch ${ONNXRUNTIME_SERVER_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\
+ /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh &&\
cp onnxruntime/dockerfiles/LICENSE-IMAGE.txt /code/LICENSE-IMAGE.txt &&\
cp onnxruntime/ThirdPartyNotices.txt /code/ThirdPartyNotices.txt &&\
cd onnxruntime &&\
diff --git a/dockerfiles/README.md b/dockerfiles/README.md
index 8d10f8dadaf3c..edcf7b8c87407 100644
--- a/dockerfiles/README.md
+++ b/dockerfiles/README.md
@@ -1,5 +1,7 @@
# Docker containers for ONNX Runtime
+#### Build Instructions
+
- [Arm 32v7](Dockerfile.arm32v7)
- [Build from source (CPU)](Dockerfile.source)
- [CUDA + CUDNN](Dockerfile.cuda)
@@ -9,7 +11,20 @@
- [ONNX Runtime Server](Dockerfile.server)
- [Nuphar](Dockerfile.nuphar)
-**Preparation step:** download `scripts` to your local folder before running the `docker build` command for any of the options below.
+#### Published Microsoft Container Registry (MCR) Images
+
+Use `docker pull` with any of the images and tags below to pull an image and try for yourself. Note that the build from source (CPU), CUDA, and TensorRT images include additional dependencies like miniconda for compatibility with AzureML image deployment.
+
+**Example**: Run `docker pull mcr.microsoft.com/azureml/onnxruntime:latest-cuda` to pull the latest released docker image with ONNX Runtime GPU, CUDA, and CUDNN support.
+
+| Build Flavor | Base Image | ONNX Runtime Docker Image tags | Latest |
+|-------------------|---------------------------------------|--------------------------------------------------|------------------|
+| Source (CPU) | mcr.microsoft.com/azureml/onnxruntime | :v0.4.0, :v0.5.0 | :latest |
+| CUDA (GPU) | mcr.microsoft.com/azureml/onnxruntime | :v0.4.0-cuda10.0-cudnn7, :v0.5.0-cuda10.1-cudnn7 | :latest-cuda |
+| TensorRT (x86) | mcr.microsoft.com/azureml/onnxruntime | :v0.4.0-tensorrt19.03, :v0.5.0-tensorrt19.06 | :latest-tensorrt |
+| OpenVino (VAD-M) | mcr.microsoft.com/azureml/onnxruntime | TBA | TBA |
+| OpenVino (MYRIAD) | mcr.microsoft.com/azureml/onnxruntime | TBA | TBA |
+| Server | mcr.microsoft.com/onnxruntime/server | :v0.4.0, :v0.5.0 | :latest |
## Build from Source
#### Linux 16.04, CPU, Python Bindings
@@ -86,7 +101,7 @@
## OpenVINO (Public Preview)
#### Linux 16.04, Python Bindings
-1. Build the onnxruntime image for all the accelerators supported as below
+1. Build the onnxruntime image for all the accelerators supported as below
Retrieve your docker image in one of the following ways.
@@ -95,7 +110,7 @@
docker build -t onnxruntime --build-arg DEVICE=$DEVICE .
```
- Pull the official image from DockerHub.
-
+
2. DEVICE: Specifies the hardware target for building OpenVINO Execution Provider. Below are the options for different Intel target devices.
@@ -107,12 +122,12 @@
| MYRIAD_FP16
| Intel MovidiusTM USB sticks |
| VAD-M_FP16
| Intel Vision Accelerator Design based on MovidiusTM MyriadX VPUs |
-## CPU
+## CPU
1. Retrieve your docker image in one of the following ways.
- - Build the docker image from the DockerFile in this repository.
-
+ - Build the docker image from the DockerFile in this repository.
+
```
docker build -t onnxruntime-cpu --build-arg DEVICE=CPU_FP32 --network host .
```
@@ -127,10 +142,10 @@
## GPU
-1. Retrieve your docker image in one of the following ways.
+1. Retrieve your docker image in one of the following ways.
- Build the docker image from the DockerFile in this repository.
- ```
- docker build -t onnxruntime-gpu --build-arg DEVICE=GPU_FP32 --network host .
+ ```
+ docker build -t onnxruntime-gpu --build-arg DEVICE=GPU_FP32 --network host .
```
- Pull the official image from DockerHub.
```
@@ -141,12 +156,12 @@
```
docker run -it --device /dev/dri:/dev/dri onnxruntime-gpu:latest
```
-## Myriad VPU Accelerator
+## Myriad VPU Accelerator
-1. Retrieve your docker image in one of the following ways.
+1. Retrieve your docker image in one of the following ways.
- Build the docker image from the DockerFile in this repository.
- ```
- docker build -t onnxruntime-myriad --build-arg DEVICE=MYRIAD_FP16 --network host .
+ ```
+ docker build -t onnxruntime-myriad --build-arg DEVICE=MYRIAD_FP16 --network host .
```
- Pull the official image from DockerHub.
```
@@ -159,12 +174,12 @@
```
=======
-## VAD-M Accelerator Version
+## VAD-M Accelerator Version
-1. Retrieve your docker image in one of the following ways.
+1. Retrieve your docker image in one of the following ways.
- Build the docker image from the DockerFile in this repository.
- ```
- docker build -t onnxruntime-vadr --build-arg DEVICE=VAD-M_FP16 --network host .
+ ```
+ docker build -t onnxruntime-vadr --build-arg DEVICE=VAD-M_FP16 --network host .
```
- Pull the official image from DockerHub.
```
@@ -212,4 +227,4 @@
# If you have a Linux machine, preface this command with "sudo"
docker run -it onnxruntime-nuphar
- ```
\ No newline at end of file
+ ```