Skip to content
This repository has been archived by the owner on Dec 4, 2024. It is now read-only.

Commit

Permalink
Temporarily pinned the hdfs version to get 'test_s3' CI test passing …
Browse files Browse the repository at this point in the history
…again. (#195)
  • Loading branch information
susanxhuynh authored Oct 23, 2017
1 parent 7c09bfd commit e686c93
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,12 @@ def is_strict():
def require_hdfs():
LOGGER.info("Ensuring HDFS is installed.")

_require_package(HDFS_PACKAGE_NAME, _get_hdfs_options())
_require_package(
HDFS_PACKAGE_NAME,
_get_hdfs_options(),
# Remove after HDFS-483 is fixed
package_version='2.0.1-2.6.0-cdh5.11.0'
)
_wait_for_hdfs()


Expand All @@ -51,7 +56,7 @@ def require_spark(options={}, service_name=None):


# This should be in shakedown (DCOS_OSS-679)
def _require_package(pkg_name, service_name=None, options={}):
def _require_package(pkg_name, service_name=None, options={}, package_version=None):
pkg_manager = dcos.package.get_package_manager()
installed_pkgs = dcos.package.installed_packages(
pkg_manager,
Expand All @@ -71,7 +76,8 @@ def _require_package(pkg_name, service_name=None, options={}):
shakedown.install_package(
pkg_name,
options_json=options,
wait_for_completion=True)
wait_for_completion=True,
package_version=package_version)


def _wait_for_spark(service_name=None):
Expand Down

0 comments on commit e686c93

Please sign in to comment.