diff --git a/.github/workflows/CI.yaml b/.github/workflows/CI.yaml index 2648acfd6f..a3fe4f176c 100644 --- a/.github/workflows/CI.yaml +++ b/.github/workflows/CI.yaml @@ -16,10 +16,10 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 3.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 3.7 + python-version: 3.8 - name: Install dependencies run: | @@ -31,7 +31,8 @@ jobs: - name: Check Formatting run: | - black --line-length=100 --target-version=py27 --check --diff . + # TODO(kleesc): Re-enable after buildman rewrite + black --line-length=100 --target-version=py38 --check --diff --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist|buildman)/" . unit: name: Unit Test @@ -39,20 +40,20 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 2.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 2.7 + python-version: 3.8 - name: Install dependencies run: | sudo apt-get update - sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev + sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev swig python -m pip install --upgrade pip cat requirements-dev.txt | grep tox | xargs pip install - name: tox - run: tox -e py27-unit + run: tox -e py38-unit registry: name: E2E Registry Tests @@ -60,20 +61,20 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 2.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 2.7 + python-version: 3.8 - name: Install dependencies run: | sudo apt-get update - sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev + sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev swig python -m pip install --upgrade pip cat requirements-dev.txt | grep tox | xargs pip install - name: tox - run: tox -e py27-registry + run: tox -e py38-registry docker: name: Docker Build @@ -89,15 +90,15 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 2.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 2.7 + python-version: 3.8 - name: Install dependencies run: | sudo apt-get update - sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev docker.io + sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev swig docker.io sudo systemctl unmask docker sudo systemctl start docker docker version @@ -105,7 +106,7 @@ jobs: cat requirements-dev.txt | grep tox | xargs pip install - name: tox - run: tox -e py27-mysql + run: tox -e py38-mysql psql: name: E2E Postgres Test @@ -113,15 +114,15 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 2.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 2.7 + python-version: 3.8 - name: Install dependencies run: | sudo apt-get update - sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev docker.io + sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev swig docker.io sudo systemctl unmask docker sudo systemctl start docker docker version @@ -129,7 +130,7 @@ jobs: cat requirements-dev.txt | grep tox | xargs pip install - name: tox - run: tox -e py27-psql + run: tox -e py38-psql oci: name: OCI Conformance @@ -142,10 +143,10 @@ jobs: repository: opencontainers/distribution-spec path: dist-spec - - name: Set up Python 2.7 + - name: Set up Python 3.8 uses: actions/setup-python@v1 with: - python-version: 2.7 + python-version: 3.8 - name: Set up Go 1.14 uses: actions/setup-go@v1 @@ -162,7 +163,7 @@ jobs: run: | # Quay sudo apt-get update - sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev + sudo apt-get install libgpgme-dev libldap2-dev libsasl2-dev swig python -m pip install --upgrade pip pip install -r <(cat requirements.txt requirements-dev.txt) @@ -172,4 +173,4 @@ jobs: CGO_ENABLED=0 go test -c -o conformance.test - name: conformance - run: TEST=true PYTHONPATH=. pytest test/registry/conformance_tests.py -s -vv + run: TEST=true PYTHONPATH=. pytest test/registry/conformance_tests.py -s -vv --ignore=buildman # TODO(kleesc): Remove --ignore=buildman after rewrite diff --git a/Dockerfile b/Dockerfile index 57b27d366b..8dfba105eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,14 @@ -FROM centos:7 +FROM centos:8 LABEL maintainer "thomasmckay@redhat.com" ENV OS=linux \ ARCH=amd64 \ - PYTHON_VERSION=2.7 \ + PYTHON_VERSION=3.6 \ PATH=$HOME/.local/bin/:$PATH \ PYTHONUNBUFFERED=1 \ PYTHONIOENCODING=UTF-8 \ - LC_ALL=en_US.UTF-8 \ - LANG=en_US.UTF-8 \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ PIP_NO_CACHE_DIR=off ENV QUAYDIR /quay-registry @@ -19,54 +19,36 @@ RUN mkdir $QUAYDIR WORKDIR $QUAYDIR RUN INSTALL_PKGS="\ - python27 \ - python27-python-pip \ - rh-nginx112 rh-nginx112-nginx \ + python3 \ + nginx \ openldap \ - scl-utils \ gcc-c++ git \ openldap-devel \ - gpgme-devel \ + python3-devel \ + python3-gpg \ dnsmasq \ memcached \ openssl \ skopeo \ " && \ - yum install -y yum-utils && \ - yum install -y epel-release centos-release-scl && \ yum -y --setopt=tsflags=nodocs --setopt=skip_missing_names_on_install=False install $INSTALL_PKGS && \ yum -y update && \ yum -y clean all COPY . . -RUN scl enable python27 "\ - pip install --upgrade setuptools==44 pip && \ - pip install -r requirements.txt --no-cache && \ - pip install -r requirements-dev.txt --no-cache && \ - pip freeze && \ +RUN alternatives --set python /usr/bin/python3 && \ + python -m pip install --upgrade setuptools pip && \ + python -m pip install -r requirements.txt --no-cache && \ + python -m pip freeze && \ mkdir -p $QUAYDIR/static/webfonts && \ mkdir -p $QUAYDIR/static/fonts && \ mkdir -p $QUAYDIR/static/ldn && \ - PYTHONPATH=$QUAYPATH python -m external_libraries \ - " - -RUN cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ + PYTHONPATH=$QUAYPATH python -m external_libraries && \ + cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ cp -r $QUAYDIR/static/fonts $QUAYDIR/config_app/static/fonts && \ cp -r $QUAYDIR/static/webfonts $QUAYDIR/config_app/static/webfonts -# Check python dependencies for GPL -# Due to the following bug, pip results must be piped to a file before grepping: -# https://github.com/pypa/pip/pull/3304 -# 'docutils' is a setup dependency of botocore required by s3transfer. It's under -# GPLv3, and so is manually removed. -RUN rm -Rf /opt/rh/python27/root/usr/lib/python2.7/site-packages/docutils && \ - scl enable python27 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ - scl enable python27 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ - test -z "$(cat pipinfo.txt | grep GPL | grep -v LGPL)" && \ - rm -f piplist.txt pipinfo.txt - -# # Front-end RUN curl --silent --location https://rpm.nodesource.com/setup_12.x | bash - && \ yum install -y nodejs && \ curl --silent --location https://dl.yarnpkg.com/rpm/yarn.repo | tee /etc/yum.repos.d/yarn.repo && \ @@ -76,14 +58,11 @@ RUN curl --silent --location https://rpm.nodesource.com/setup_12.x | bash - && \ yarn build && \ yarn build-config-app -# TODO: Build jwtproxy in dist-git -# https://jira.coreos.com/browse/QUAY-1315 + ENV JWTPROXY_VERSION=0.0.3 RUN curl -fsSL -o /usr/local/bin/jwtproxy "https://github.com/coreos/jwtproxy/releases/download/v${JWTPROXY_VERSION}/jwtproxy-${OS}-${ARCH}" && \ chmod +x /usr/local/bin/jwtproxy -# TODO: Build pushgateway in dist-git -# https://jira.coreos.com/browse/QUAY-1324 ENV PUSHGATEWAY_VERSION=1.0.0 RUN curl -fsSL "https://github.com/prometheus/pushgateway/releases/download/v${PUSHGATEWAY_VERSION}/pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}.tar.gz" | \ tar xz "pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}/pushgateway" && \ @@ -95,16 +74,16 @@ RUN curl -fsSL "https://github.com/prometheus/pushgateway/releases/download/v${P RUN curl -fsSL https://ip-ranges.amazonaws.com/ip-ranges.json -o util/ipresolver/aws-ip-ranges.json RUN ln -s $QUAYCONF /conf && \ - mkdir /var/log/nginx && \ ln -sf /dev/stdout /var/log/nginx/access.log && \ ln -sf /dev/stdout /var/log/nginx/error.log && \ chmod -R a+rwx /var/log/nginx # Cleanup RUN UNINSTALL_PKGS="\ - gcc-c++ \ + gcc-c++ git \ openldap-devel \ gpgme-devel \ + python3-devel \ optipng \ kernel-headers \ " && \ @@ -118,24 +97,12 @@ RUN chgrp -R 0 $QUAYDIR && \ chmod -R g=u $QUAYDIR RUN mkdir /datastorage && chgrp 0 /datastorage && chmod g=u /datastorage && \ - mkdir -p /var/log/nginx && chgrp 0 /var/log/nginx && chmod g=u /var/log/nginx && \ + chgrp 0 /var/log/nginx && chmod g=u /var/log/nginx && \ mkdir -p /conf/stack && chgrp 0 /conf/stack && chmod g=u /conf/stack && \ mkdir -p /tmp && chgrp 0 /tmp && chmod g=u /tmp && \ mkdir /certificates && chgrp 0 /certificates && chmod g=u /certificates && \ chmod g=u /etc/passwd -RUN chgrp 0 /var/opt/rh/rh-nginx112/log/nginx && chmod g=u /var/opt/rh/rh-nginx112/log/nginx - -# Allow TLS certs to be created and installed as non-root user -RUN chgrp -R 0 /etc/pki/ca-trust/extracted && \ - chmod -R g=u /etc/pki/ca-trust/extracted && \ - chgrp -R 0 /etc/pki/ca-trust/source/anchors && \ - chmod -R g=u /etc/pki/ca-trust/source/anchors && \ - chgrp -R 0 /opt/rh/python27/root/usr/lib/python2.7/site-packages/requests && \ - chmod -R g=u /opt/rh/python27/root/usr/lib/python2.7/site-packages/requests && \ - chgrp -R 0 /opt/rh/python27/root/usr/lib/python2.7/site-packages/certifi && \ - chmod -R g=u /opt/rh/python27/root/usr/lib/python2.7/site-packages/certifi - VOLUME ["/var/log", "/datastorage", "/tmp", "/conf/stack"] USER 1001 diff --git a/Dockerfile.centos7.osbs b/Dockerfile.centos7.osbs index 0822841ac6..70a51e4670 100644 --- a/Dockerfile.centos7.osbs +++ b/Dockerfile.centos7.osbs @@ -19,8 +19,8 @@ RUN mkdir $QUAYDIR WORKDIR $QUAYDIR RUN INSTALL_PKGS="\ - python27 \ - python27-python-pip \ + python36 \ + python36-python-pip \ rh-nginx112 rh-nginx112-nginx \ openldap \ scl-utils \ @@ -40,7 +40,7 @@ RUN INSTALL_PKGS="\ COPY . . -RUN scl enable python27 "\ +RUN scl enable python36 "\ pip install --upgrade setuptools pip && \ pip install -r requirements.txt --no-cache && \ pip install -r requirements-dev.txt --no-cache && \ @@ -61,8 +61,8 @@ RUN cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ # 'docutils' is a setup dependency of botocore required by s3transfer. It's under # GPLv3, and so is manually removed. RUN rm -Rf /opt/rh/python27/root/usr/lib/python2.7/site-packages/docutils && \ - scl enable python27 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ - scl enable python27 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ + scl enable python36 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ + scl enable python36 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ test -z "$(cat pipinfo.txt | grep GPL | grep -v LGPL)" && \ rm -f piplist.txt pipinfo.txt diff --git a/Dockerfile.cirun b/Dockerfile.cirun deleted file mode 100644 index e8fbc50cb6..0000000000 --- a/Dockerfile.cirun +++ /dev/null @@ -1,7 +0,0 @@ -FROM quay-ci-base -RUN mkdir -p conf/stack -RUN rm -rf test/data/test.db -ADD cirun.config.yaml conf/stack/config.yaml -RUN /usr/bin/scl enable python27 rh-nginx112 "LOGGING_LEVEL=INFO python initdb.py" -ENTRYPOINT ["/quay-registry/quay-entrypoint.sh"] -CMD ["registry"] diff --git a/Dockerfile.osbs b/Dockerfile.osbs index 73ec6a52d4..9c29795d47 100644 --- a/Dockerfile.osbs +++ b/Dockerfile.osbs @@ -19,8 +19,8 @@ RUN mkdir $QUAYDIR WORKDIR $QUAYDIR RUN INSTALL_PKGS="\ - python27 \ - python27-python-pip \ + python36 \ + python36-python-pip \ rh-nginx112 rh-nginx112-nginx \ openldap \ scl-utils \ @@ -46,8 +46,8 @@ RUN INSTALL_PKGS="\ COPY . . -RUN scl enable python27 "\ - pip install --upgrade setuptools==44 pip && \ +RUN scl enable python36 "\ + pip install --upgrade setuptools pip && \ pip install -r requirements.txt --no-cache && \ pip freeze && \ mkdir -p $QUAYDIR/static/webfonts && \ @@ -66,8 +66,8 @@ RUN cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ # 'docutils' is a setup dependency of botocore required by s3transfer. It's under # GPLv3, and so is manually removed. RUN rm -Rf /opt/rh/python27/root/usr/lib/python2.7/site-packages/docutils && \ - scl enable python27 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ - scl enable python27 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ + scl enable python36 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ + scl enable python36 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ test -z "$(cat pipinfo.txt | grep GPL | grep -v LGPL)" && \ rm -f piplist.txt pipinfo.txt diff --git a/Dockerfile.rhel7 b/Dockerfile.rhel7 index f46784e72e..89dfb20fe6 100644 --- a/Dockerfile.rhel7 +++ b/Dockerfile.rhel7 @@ -19,8 +19,8 @@ RUN mkdir $QUAYDIR WORKDIR $QUAYDIR RUN INSTALL_PKGS="\ - python27 \ - python27-python-pip \ + python36 \ + python36-python-pip \ rh-nginx112 rh-nginx112-nginx \ openldap \ scl-utils \ @@ -46,8 +46,8 @@ RUN INSTALL_PKGS="\ COPY . . -RUN scl enable python27 "\ - pip install --upgrade setuptools==44 pip && \ +RUN scl enable python36 "\ + pip install --upgrade setuptools pip && \ pip install -r requirements.txt --no-cache && \ pip freeze && \ mkdir -p $QUAYDIR/static/webfonts && \ @@ -66,8 +66,8 @@ RUN cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ # 'docutils' is a setup dependency of botocore required by s3transfer. It's under # GPLv3, and so is manually removed. RUN rm -Rf /opt/rh/python27/root/usr/lib/python2.7/site-packages/docutils && \ - scl enable python27 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ - scl enable python27 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ + scl enable python36 "pip freeze" | grep -v '^-e' | awk -F == '{print $1}' | grep -v docutils > piplist.txt && \ + scl enable python36 "xargs -a piplist.txt pip --disable-pip-version-check show" > pipinfo.txt && \ test -z "$(cat pipinfo.txt | grep GPL | grep -v LGPL)" && \ rm -f piplist.txt pipinfo.txt diff --git a/Dockerfile.rhel8 b/Dockerfile.rhel8 new file mode 100644 index 0000000000..cd2bfadeea --- /dev/null +++ b/Dockerfile.rhel8 @@ -0,0 +1,115 @@ +FROM registry.access.redhat.com/ubi8:8.1 +LABEL maintainer "thomasmckay@redhat.com" + +ENV OS=linux \ + ARCH=amd64 \ + PYTHON_VERSION=3.6 \ + PATH=$HOME/.local/bin/:$PATH \ + PYTHONUNBUFFERED=1 \ + PYTHONIOENCODING=UTF-8 \ + LC_ALL=C.UTF-8 \ + LANG=C.UTF-8 \ + PIP_NO_CACHE_DIR=off + +ENV QUAYDIR /quay-registry +ENV QUAYCONF /quay-registry/conf +ENV QUAYPATH "." + +RUN mkdir $QUAYDIR +WORKDIR $QUAYDIR + +RUN INSTALL_PKGS="\ + python3 \ + nginx \ + openldap \ + gcc-c++ git \ + openldap-devel \ + gpgme-devel \ + python3-devel \ + python3-gpg \ + dnsmasq \ + memcached \ + openssl \ + skopeo \ + " && \ + yum -y --setopt=tsflags=nodocs --setopt=skip_missing_names_on_install=False install $INSTALL_PKGS && \ + yum -y update && \ + yum -y clean all + +COPY . . + +RUN alternatives --set python /usr/bin/python3 && \ + python -m pip install --upgrade setuptools pip && \ + python -m pip install -r requirements.txt --no-cache && \ + python -m pip freeze && \ + mkdir -p $QUAYDIR/static/webfonts && \ + mkdir -p $QUAYDIR/static/fonts && \ + mkdir -p $QUAYDIR/static/ldn && \ + PYTHONPATH=$QUAYPATH python -m external_libraries && \ + cp -r $QUAYDIR/static/ldn $QUAYDIR/config_app/static/ldn && \ + cp -r $QUAYDIR/static/fonts $QUAYDIR/config_app/static/fonts && \ + cp -r $QUAYDIR/static/webfonts $QUAYDIR/config_app/static/webfonts + + +RUN curl --silent --location https://rpm.nodesource.com/setup_8.x | bash - && \ + yum install -y nodejs && \ + curl --silent --location https://dl.yarnpkg.com/rpm/yarn.repo | tee /etc/yum.repos.d/yarn.repo && \ + rpm --import https://dl.yarnpkg.com/rpm/pubkey.gpg && \ + yum install -y yarn && \ + yarn install --ignore-engines && \ + yarn build && \ + yarn build-config-app + + +ENV JWTPROXY_VERSION=0.0.3 +RUN curl -fsSL -o /usr/local/bin/jwtproxy "https://github.com/coreos/jwtproxy/releases/download/v${JWTPROXY_VERSION}/jwtproxy-${OS}-${ARCH}" && \ + chmod +x /usr/local/bin/jwtproxy + +ENV PUSHGATEWAY_VERSION=1.0.0 +RUN curl -fsSL "https://github.com/prometheus/pushgateway/releases/download/v${PUSHGATEWAY_VERSION}/pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}.tar.gz" | \ + tar xz "pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}/pushgateway" && \ + mv "pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}/pushgateway" /usr/local/bin/pushgateway && \ + rm -rf "pushgateway-${PUSHGATEWAY_VERSION}.${OS}-${ARCH}" && \ + chmod +x /usr/local/bin/pushgateway + +# Update local copy of AWS IP Ranges. +RUN curl -fsSL https://ip-ranges.amazonaws.com/ip-ranges.json -o util/ipresolver/aws-ip-ranges.json + +RUN ln -s $QUAYCONF /conf && \ + ln -sf /dev/stdout /var/log/nginx/access.log && \ + ln -sf /dev/stdout /var/log/nginx/error.log && \ + chmod -R a+rwx /var/log/nginx + +# Cleanup +RUN UNINSTALL_PKGS="\ + gcc-c++ git \ + openldap-devel \ + gpgme-devel \ + python3-devel \ + optipng \ + kernel-headers \ + " && \ + yum remove -y $UNINSTALL_PKGS && \ + yum clean all && \ + rm -rf /var/cache/yum /tmp/* /var/tmp/* /root/.cache + +EXPOSE 8080 8443 7443 9091 + +RUN chgrp -R 0 $QUAYDIR && \ + chmod -R g=u $QUAYDIR + +RUN mkdir /datastorage && chgrp 0 /datastorage && chmod g=u /datastorage && \ + chgrp 0 /var/log/nginx && chmod g=u /var/log/nginx && \ + mkdir -p /conf/stack && chgrp 0 /conf/stack && chmod g=u /conf/stack && \ + mkdir -p /tmp && chgrp 0 /tmp && chmod g=u /tmp && \ + mkdir /certificates && chgrp 0 /certificates && chmod g=u /certificates && \ + chmod g=u /etc/passwd + +VOLUME ["/var/log", "/datastorage", "/tmp", "/conf/stack"] + +ENTRYPOINT ["/quay-registry/quay-entrypoint.sh"] +CMD ["registry"] + +# root required to create and install certs +# https://jira.coreos.com/browse/QUAY-1468 +# USER 1001 diff --git a/Makefile b/Makefile index df54579b4b..29cf6926e2 100644 --- a/Makefile +++ b/Makefile @@ -173,4 +173,5 @@ yapf-test: black: - black --line-length 100 --target-version py27 . + black --line-length 100 --target-version py36 --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|_build|buck-out|build|dist|buildman)/" . # TODO(kleesc): Re-enable after buildman rewrite + diff --git a/app.py b/app.py index 4c2e5cf01c..69d278ace0 100644 --- a/app.py +++ b/app.py @@ -136,7 +136,7 @@ HELM_CHART_LAYER_TYPES = ["application/tar+gzip"] register_artifact_type(HELM_CHART_CONFIG_TYPE, HELM_CHART_LAYER_TYPES) -CONFIG_DIGEST = hashlib.sha256(json.dumps(app.config, default=str)).hexdigest()[0:8] +CONFIG_DIGEST = hashlib.sha256(json.dumps(app.config, default=str).encode("utf-8")).hexdigest()[0:8] logger.debug("Loaded config", extra={"config": app.config}) diff --git a/auth/auth_context_type.py b/auth/auth_context_type.py index cb425e0e00..174ef44074 100644 --- a/auth/auth_context_type.py +++ b/auth/auth_context_type.py @@ -150,7 +150,7 @@ def __init__( self.signed_data = signed_data def tuple(self): - return vars(self).values() + return list(vars(self).values()) def __eq__(self, other): return self.tuple() == other.tuple() diff --git a/auth/basic.py b/auth/basic.py index f99f8a81e9..0d49bc7864 100644 --- a/auth/basic.py +++ b/auth/basic.py @@ -33,7 +33,7 @@ def validate_basic_auth(auth_header): logger.debug("Attempt to process basic auth header") # Parse the basic auth header. - assert isinstance(auth_header, basestring) + assert isinstance(auth_header, str) credentials, err = _parse_basic_auth_header(auth_header) if err is not None: logger.debug("Got invalid basic auth header: %s", auth_header) @@ -53,7 +53,7 @@ def _parse_basic_auth_header(auth): return None, "Invalid basic auth header" try: - credentials = [part.decode("utf-8") for part in b64decode(normalized[1]).split(":", 1)] + credentials = [part.decode("utf-8") for part in b64decode(normalized[1]).split(b":", 1)] except (TypeError, UnicodeDecodeError, ValueError): logger.exception("Exception when parsing basic auth header: %s", auth) return None, "Could not parse basic auth header" diff --git a/auth/registry_jwt_auth.py b/auth/registry_jwt_auth.py index 62244e40f1..5c817a48e7 100644 --- a/auth/registry_jwt_auth.py +++ b/auth/registry_jwt_auth.py @@ -155,7 +155,7 @@ def wrapper(*args, **kwargs): abort( 401, - message=ije.message, + message=str(ije), headers=get_auth_headers(repository=repository, scopes=scopes), ) else: diff --git a/auth/test/test_basic.py b/auth/test/test_basic.py index 8b312689a6..08975f530d 100644 --- a/auth/test/test_basic.py +++ b/auth/test/test_basic.py @@ -17,9 +17,10 @@ def _token(username, password): - assert isinstance(username, basestring) - assert isinstance(password, basestring) - return "basic " + b64encode("%s:%s" % (username, password)) + assert isinstance(username, str) + assert isinstance(password, str) + token_bytes = b"%s:%s" % (username.encode("utf-8"), password.encode("utf-8")) + return "basic " + b64encode(token_bytes).decode("ascii") @pytest.mark.parametrize( @@ -62,6 +63,10 @@ def _token(username, password): error_message="This user has been disabled. Please contact your administrator.", ), ), + ( + _token("usér", "passwôrd"), + ValidateResult(AuthKind.basic, error_message="Invalid Username or Password"), + ), ], ) def test_validate_basic_auth_token(token, expected_result, app): @@ -110,15 +115,15 @@ def test_valid_app_specific_token(app): def test_invalid_unicode(app): - token = "\xebOH" - header = "basic " + b64encode(token) + token = b"\xebOH" + header = "basic " + b64encode(token).decode("ascii") result = validate_basic_auth(header) assert result == ValidateResult(AuthKind.basic, missing=True) def test_invalid_unicode_2(app): - token = "“4JPCOLIVMAY32Q3XGVPHC4CBF8SKII5FWNYMASOFDIVSXTC5I5NBU”" - header = "basic " + b64encode("devtable+somerobot:%s" % token) + token = "“4JPCOLIVMAY32Q3XGVPHC4CBF8SKII5FWNYMASOFDIVSXTC5I5NBU”".encode("utf-8") + header = "basic " + b64encode(b"devtable+somerobot:%s" % token).decode("ascii") result = validate_basic_auth(header) assert result == ValidateResult( AuthKind.basic, @@ -128,7 +133,9 @@ def test_invalid_unicode_2(app): def test_invalid_unicode_3(app): token = "sometoken" - header = "basic " + b64encode("“devtable+somerobot”:%s" % token) + auth = "“devtable+somerobot”:" + token + auth = auth.encode("utf-8") + header = "basic " + b64encode(auth).decode("ascii") result = validate_basic_auth(header) assert result == ValidateResult( AuthKind.basic, error_message="Could not find robot with specified username", diff --git a/auth/test/test_credentials.py b/auth/test/test_credentials.py index f3d1225ab2..ea92f57cba 100644 --- a/auth/test/test_credentials.py +++ b/auth/test/test_credentials.py @@ -156,7 +156,7 @@ def test_invalid_unicode_robot(app): result, kind = validate_credentials("devtable+somerobot", token) assert kind == CredentialKind.robot assert not result.auth_valid - msg = "Could not find robot with specified username" + msg = "Could not find robot with username: devtable+somerobot and supplied password." assert result == ValidateResult(AuthKind.credentials, error_message=msg) diff --git a/auth/test/test_registry_jwt.py b/auth/test/test_registry_jwt.py index 53b09ecfeb..5665587201 100644 --- a/auth/test/test_registry_jwt.py +++ b/auth/test/test_registry_jwt.py @@ -73,7 +73,7 @@ def _token(token_data, key_id=None, private_key=None, skip_header=False, alg=Non token_headers = {} token_data = jwt.encode(token_data, private_key, alg or "RS256", headers=token_headers) - return "Bearer {0}".format(token_data) + return "Bearer {0}".format(token_data.decode("ascii")) def _parse_token(token): @@ -228,7 +228,7 @@ def test_mixing_keys_e2e(initialized_db): _parse_token(deleted_key_token) -@pytest.mark.parametrize("token", [u"someunicodetoken✡", u"\xc9\xad\xbd",]) +@pytest.mark.parametrize("token", ["someunicodetoken✡", "\xc9\xad\xbd",]) def test_unicode_token(token): with pytest.raises(InvalidJWTException): _parse_token(token) diff --git a/avatars/avatars.py b/avatars/avatars.py index ca7e323fd7..c139b811f4 100644 --- a/avatars/avatars.py +++ b/avatars/avatars.py @@ -4,6 +4,8 @@ from requests.exceptions import RequestException +from util.bytes import Bytes + logger = logging.getLogger(__name__) @@ -104,7 +106,8 @@ def get_data(self, name, email_or_id, kind="user"): # Note: email_or_id may be None if gotten from external auth when email is disabled, # so use the username in that case. username_email_or_id = email_or_id or name - hash_value = hashlib.md5(username_email_or_id.strip().lower()).hexdigest() + username_email_or_id = Bytes.for_string_or_unicode(username_email_or_id).as_unicode() + hash_value = hashlib.md5(username_email_or_id.strip().lower().encode("utf-8")).hexdigest() byte_count = int(math.ceil(math.log(len(colors), 16))) byte_data = hash_value[0:byte_count] diff --git a/bill-of-materials.json b/bill-of-materials.json index 9f552239f1..9f52a0c956 100644 --- a/bill-of-materials.json +++ b/bill-of-materials.json @@ -194,16 +194,6 @@ "license": "Unlicense", "project": "furl" }, - { - "format": "Python", - "license": "MIT License", - "project": "future" - }, - { - "format": "Python", - "license": "PSF License", - "project": "futures" - }, { "format": "Python", "license": "Apache Software License 2.0", @@ -629,11 +619,6 @@ "license": "Apache Software License 2.0", "project": "toposort" }, - { - "format": "Python", - "license": "Apache Software License 2.0", - "project": "trollius" - }, { "format": "Python", "license": "MIT License", @@ -3854,4 +3839,4 @@ "license": "MIT License", "project": "zeroclipboard" } -] \ No newline at end of file +] diff --git a/boot.py b/boot.py index 3d5ccc2e5f..25b4950ece 100755 --- a/boot.py +++ b/boot.py @@ -1,7 +1,7 @@ #!/usr/bin/env python from datetime import datetime, timedelta -from urlparse import urlunparse +from urllib.parse import urlunparse from jinja2 import Template from cachetools.func import lru_cache @@ -96,7 +96,7 @@ def setup_jwt_proxy(): with open(app.config["INSTANCE_SERVICE_KEY_LOCATION"], mode="w") as f: f.truncate(0) - f.write(quay_key.exportKey()) + f.write(quay_key.exportKey().decode("utf-8")) # Generate the JWT proxy configuration. audience = get_audience() diff --git a/buildman/asyncutil.py b/buildman/asyncutil.py index 39a5b8c73a..31d687adc0 100644 --- a/buildman/asyncutil.py +++ b/buildman/asyncutil.py @@ -1,12 +1,12 @@ +import asyncio + from concurrent.futures import ThreadPoolExecutor from functools import partial -from trollius import get_event_loop, coroutine - def wrap_with_threadpool(obj, worker_threads=1): """ - Wraps a class in an async executor so that it can be safely used in an event loop like trollius. + Wraps a class in an async executor so that it can be safely used in an event loop like asyncio. """ async_executor = ThreadPoolExecutor(worker_threads) return AsyncWrapper(obj, executor=async_executor), async_executor @@ -14,12 +14,12 @@ def wrap_with_threadpool(obj, worker_threads=1): class AsyncWrapper(object): """ - Wrapper class which will transform a syncronous library to one that can be used with trollius + Wrapper class which will transform a syncronous library to one that can be used with asyncio coroutines. """ def __init__(self, delegate, loop=None, executor=None): - self._loop = loop if loop is not None else get_event_loop() + self._loop = loop if loop is not None else asyncio.get_event_loop() self._delegate = delegate self._executor = executor @@ -39,7 +39,6 @@ def wrapper(*args, **kwargs): return wrapper - @coroutine - def __call__(self, *args, **kwargs): + async def __call__(self, *args, **kwargs): callable_delegate_attr = partial(self._delegate, *args, **kwargs) return self._loop.run_in_executor(self._executor, callable_delegate_attr) diff --git a/buildman/builder.py b/buildman/builder.py index 7a36b5ee9a..ce08ce1baf 100644 --- a/buildman/builder.py +++ b/buildman/builder.py @@ -12,7 +12,7 @@ from buildman.manager.ephemeral import EphemeralBuilderManager from buildman.server import BuilderServer -from trollius import SSLContext +from ssl import SSLContext from raven.handlers.logging import SentryHandler from raven.conf import setup_logging diff --git a/buildman/component/buildcomponent.py b/buildman/component/buildcomponent.py index 76b575debf..4cf5ab6765 100644 --- a/buildman/component/buildcomponent.py +++ b/buildman/component/buildcomponent.py @@ -3,10 +3,9 @@ import time import logging import json -import trollius +import asyncio from autobahn.wamp.exception import ApplicationError -from trollius import From, Return from buildman.server import BuildJobResult from buildman.component.basecomponent import BaseComponent @@ -73,22 +72,18 @@ def kind(self): def onConnect(self): self.join(self.builder_realm) - @trollius.coroutine - def onJoin(self, details): + async def onJoin(self, details): logger.debug("Registering methods and listeners for component %s", self.builder_realm) - yield From(self.register(self._on_ready, u"io.quay.buildworker.ready")) - yield From( - self.register(self._determine_cache_tag, u"io.quay.buildworker.determinecachetag") - ) - yield From(self.register(self._ping, u"io.quay.buildworker.ping")) - yield From(self.register(self._on_log_message, u"io.quay.builder.logmessagesynchronously")) + await self.register(self._on_ready, "io.quay.buildworker.ready") + await (self.register(self._determine_cache_tag, "io.quay.buildworker.determinecachetag")) + await self.register(self._ping, "io.quay.buildworker.ping") + await self.register(self._on_log_message, "io.quay.builder.logmessagesynchronously") - yield From(self.subscribe(self._on_heartbeat, u"io.quay.builder.heartbeat")) + await self.subscribe(self._on_heartbeat, "io.quay.builder.heartbeat") - yield From(self._set_status(ComponentStatus.WAITING)) + await self._set_status(ComponentStatus.WAITING) - @trollius.coroutine - def start_build(self, build_job): + async def start_build(self, build_job): """ Starts a build. """ @@ -100,7 +95,7 @@ def start_build(self, build_job): self._worker_version, self._component_status, ) - raise Return() + return logger.debug( "Starting build for component %s (build %s, worker version: %s)", @@ -113,7 +108,7 @@ def start_build(self, build_job): self._build_status = StatusHandler(self.build_logs, build_job.repo_build.uuid) self._image_info = {} - yield From(self._set_status(ComponentStatus.BUILDING)) + await self._set_status(ComponentStatus.BUILDING) # Send the notification that the build has started. build_job.send_notification("build_start") @@ -122,8 +117,8 @@ def start_build(self, build_job): try: build_config = build_job.build_config except BuildJobLoadException as irbe: - yield From(self._build_failure("Could not load build job information", irbe)) - raise Return() + await self._build_failure("Could not load build job information", irbe) + return base_image_information = {} @@ -189,8 +184,8 @@ def start_build(self, build_job): self._current_job.repo_build.uuid, build_arguments, ) - yield From(self._build_failure("Insufficient build arguments. No buildpack available.")) - raise Return() + await self._build_failure("Insufficient build arguments. No buildpack available.") + return # Invoke the build. logger.debug("Invoking build: %s", self.builder_realm) @@ -200,7 +195,7 @@ def build_complete_callback(result): """ This function is used to execute a coroutine as the callback. """ - trollius.ensure_future(self._build_complete(result)) + asyncio.create_task(self._build_complete(result)) self.call("io.quay.builder.build", **build_arguments).add_done_callback( build_complete_callback @@ -285,8 +280,7 @@ def _process_pushpull_status(status_dict, current_phase, docker_data, images): images, max(len(images), num_images) ) - @trollius.coroutine - def _on_log_message(self, phase, json_data): + async def _on_log_message(self, phase, json_data): """ Tails log messages and updates the build status. """ @@ -320,7 +314,7 @@ def _on_log_message(self, phase, json_data): # the pull/push progress, as well as the current step index. with self._build_status as status_dict: try: - changed_phase = yield From( + changed_phase = await ( self._build_status.set_phase(phase, log_data.get("status_data")) ) if changed_phase: @@ -330,11 +324,11 @@ def _on_log_message(self, phase, json_data): logger.debug( "Trying to move cancelled build into phase: %s with id: %s", phase, build_id ) - raise Return(False) + return False except InvalidRepositoryBuildException: build_id = self._current_job.repo_build.uuid logger.warning("Build %s was not found; repo was probably deleted", build_id) - raise Return(False) + return False BuildComponent._process_pushpull_status(status_dict, phase, log_data, self._image_info) @@ -345,16 +339,15 @@ def _on_log_message(self, phase, json_data): # If the json data contains an error, then something went wrong with a push or pull. if "error" in log_data: - yield From(self._build_status.set_error(log_data["error"])) + await self._build_status.set_error(log_data["error"]) if current_step is not None: - yield From(self._build_status.set_command(current_status_string)) + await self._build_status.set_command(current_status_string) elif phase == BUILD_PHASE.BUILDING: - yield From(self._build_status.append_log(current_status_string)) - raise Return(True) + await self._build_status.append_log(current_status_string) + return True - @trollius.coroutine - def _determine_cache_tag( + async def _determine_cache_tag( self, command_comments, base_image_name, base_image_tag, base_image_id ): with self._build_status as status_dict: @@ -369,14 +362,13 @@ def _determine_cache_tag( ) tag_found = self._current_job.determine_cached_tag(base_image_id, command_comments) - raise Return(tag_found or "") + return tag_found or "" - @trollius.coroutine - def _build_failure(self, error_message, exception=None): + async def _build_failure(self, error_message, exception=None): """ Handles and logs a failed build. """ - yield From( + await ( self._build_status.set_error( error_message, {"internal_error": str(exception) if exception else None} ) @@ -386,10 +378,9 @@ def _build_failure(self, error_message, exception=None): logger.warning("Build %s failed with message: %s", build_id, error_message) # Mark that the build has finished (in an error state) - yield From(self._build_finished(BuildJobResult.ERROR)) + await self._build_finished(BuildJobResult.ERROR) - @trollius.coroutine - def _build_complete(self, result): + async def _build_complete(self, result): """ Wraps up a completed build. @@ -411,12 +402,12 @@ def _build_complete(self, result): pass try: - yield From(self._build_status.set_phase(BUILD_PHASE.COMPLETE)) + await self._build_status.set_phase(BUILD_PHASE.COMPLETE) except InvalidRepositoryBuildException: logger.warning("Build %s was not found; repo was probably deleted", build_id) - raise Return() + return - yield From(self._build_finished(BuildJobResult.COMPLETE)) + await self._build_finished(BuildJobResult.COMPLETE) # Label the pushed manifests with the build metadata. manifest_digests = kwargs.get("digests") or [] @@ -444,7 +435,7 @@ def _build_complete(self, result): worker_error = WorkerError(aex.error, aex.kwargs.get("base_error")) # Write the error to the log. - yield From( + await ( self._build_status.set_error( worker_error.public_message(), worker_error.extra_data(), @@ -465,23 +456,22 @@ def _build_complete(self, result): build_id, worker_error.public_message(), ) - yield From(self._build_finished(BuildJobResult.INCOMPLETE)) + await self._build_finished(BuildJobResult.INCOMPLETE) else: logger.debug("Got remote failure exception for build %s: %s", build_id, aex) - yield From(self._build_finished(BuildJobResult.ERROR)) + await self._build_finished(BuildJobResult.ERROR) # Remove the current job. self._current_job = None - @trollius.coroutine - def _build_finished(self, job_status): + async def _build_finished(self, job_status): """ Alerts the parent that a build has completed and sets the status back to running. """ - yield From(self.parent_manager.job_completed(self._current_job, job_status, self)) + await self.parent_manager.job_completed(self._current_job, job_status, self) # Set the component back to a running state. - yield From(self._set_status(ComponentStatus.RUNNING)) + await self._set_status(ComponentStatus.RUNNING) @staticmethod def _ping(): @@ -490,8 +480,7 @@ def _ping(): """ return "pong" - @trollius.coroutine - def _on_ready(self, token, version): + async def _on_ready(self, token, version): logger.debug('On ready called (token "%s")', token) self._worker_version = version @@ -499,30 +488,29 @@ def _on_ready(self, token, version): logger.warning( 'Build component (token "%s") is running an out-of-date version: %s', token, version ) - raise Return(False) + return False if self._component_status != ComponentStatus.WAITING: logger.warning('Build component (token "%s") is already connected', self.expected_token) - raise Return(False) + return False if token != self.expected_token: logger.warning( 'Builder token mismatch. Expected: "%s". Found: "%s"', self.expected_token, token ) - raise Return(False) + return False - yield From(self._set_status(ComponentStatus.RUNNING)) + await self._set_status(ComponentStatus.RUNNING) # Start the heartbeat check and updating loop. - loop = trollius.get_event_loop() + loop = asyncio.get_event_loop() loop.create_task(self._heartbeat()) logger.debug("Build worker %s is connected and ready", self.builder_realm) - raise Return(True) + return True - @trollius.coroutine - def _set_status(self, phase): + async def _set_status(self, phase): if phase == ComponentStatus.RUNNING: - yield From(self.parent_manager.build_component_ready(self)) + await self.parent_manager.build_component_ready(self) self._component_status = phase @@ -536,15 +524,14 @@ def _on_heartbeat(self): logger.debug("Got heartbeat on realm %s", self.builder_realm) self._last_heartbeat = datetime.datetime.utcnow() - @trollius.coroutine - def _heartbeat(self): + async def _heartbeat(self): """ Coroutine that runs every HEARTBEAT_TIMEOUT seconds, both checking the worker's heartbeat and updating the heartbeat in the build status dictionary (if applicable). This allows the build system to catch crashes from either end. """ - yield From(trollius.sleep(INITIAL_TIMEOUT)) + await asyncio.sleep(INITIAL_TIMEOUT) while True: # If the component is no longer running or actively building, nothing more to do. @@ -552,7 +539,7 @@ def _heartbeat(self): self._component_status != ComponentStatus.RUNNING and self._component_status != ComponentStatus.BUILDING ): - raise Return() + return # If there is an active build, write the heartbeat to its status. if self._build_status is not None: @@ -562,7 +549,7 @@ def _heartbeat(self): # Mark the build item. current_job = self._current_job if current_job is not None: - yield From(self.parent_manager.job_heartbeat(current_job)) + await self.parent_manager.job_heartbeat(current_job) # Check the heartbeat from the worker. logger.debug("Checking heartbeat on realm %s", self.builder_realm) @@ -576,8 +563,8 @@ def _heartbeat(self): self._last_heartbeat, ) - yield From(self._timeout()) - raise Return() + await self._timeout() + return logger.debug( "Heartbeat on realm %s is valid: %s (%s).", @@ -586,20 +573,19 @@ def _heartbeat(self): self._component_status, ) - yield From(trollius.sleep(HEARTBEAT_TIMEOUT)) + await asyncio.sleep(HEARTBEAT_TIMEOUT) - @trollius.coroutine - def _timeout(self): + async def _timeout(self): if self._component_status == ComponentStatus.TIMED_OUT: - raise Return() + return - yield From(self._set_status(ComponentStatus.TIMED_OUT)) + await self._set_status(ComponentStatus.TIMED_OUT) logger.warning("Build component with realm %s has timed out", self.builder_realm) # If we still have a running job, then it has not completed and we need to tell the parent # manager. if self._current_job is not None: - yield From( + await ( self._build_status.set_error( "Build worker timed out", internal_error=True, @@ -609,7 +595,7 @@ def _timeout(self): build_id = self._current_job.build_uuid logger.error("[BUILD INTERNAL ERROR: Timeout] Build ID: %s", build_id) - yield From( + await ( self.parent_manager.job_completed( self._current_job, BuildJobResult.INCOMPLETE, self ) @@ -621,8 +607,7 @@ def _timeout(self): # Remove the job reference. self._current_job = None - @trollius.coroutine - def cancel_build(self): + async def cancel_build(self): self.parent_manager.build_component_disposed(self, True) self._current_job = None - yield From(self._set_status(ComponentStatus.RUNNING)) + await self._set_status(ComponentStatus.RUNNING) diff --git a/buildman/jobutil/buildstatus.py b/buildman/jobutil/buildstatus.py index 416495cff0..5ed8a6eddd 100644 --- a/buildman/jobutil/buildstatus.py +++ b/buildman/jobutil/buildstatus.py @@ -2,7 +2,6 @@ import logging from redis import RedisError -from trollius import From, Return, coroutine from data.database import BUILD_PHASE from data import model @@ -35,54 +34,47 @@ def __init__(self, build_logs, repository_build_uuid): # Write the initial status. self.__exit__(None, None, None) - @coroutine - def _append_log_message(self, log_message, log_type=None, log_data=None): + async def _append_log_message(self, log_message, log_type=None, log_data=None): log_data = log_data or {} log_data["datetime"] = str(datetime.datetime.now()) try: - yield From( - self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data) - ) + await (self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data)) except RedisError: logger.exception("Could not save build log for build %s: %s", self._uuid, log_message) - @coroutine - def append_log(self, log_message, extra_data=None): + async def append_log(self, log_message, extra_data=None): if log_message is None: return - yield From(self._append_log_message(log_message, log_data=extra_data)) + await self._append_log_message(log_message, log_data=extra_data) - @coroutine - def set_command(self, command, extra_data=None): + async def set_command(self, command, extra_data=None): if self._current_command == command: - raise Return() + return self._current_command = command - yield From(self._append_log_message(command, self._build_logs.COMMAND, extra_data)) + await self._append_log_message(command, self._build_logs.COMMAND, extra_data) - @coroutine - def set_error(self, error_message, extra_data=None, internal_error=False, requeued=False): + async def set_error(self, error_message, extra_data=None, internal_error=False, requeued=False): error_phase = ( BUILD_PHASE.INTERNAL_ERROR if internal_error and requeued else BUILD_PHASE.ERROR ) - yield From(self.set_phase(error_phase)) + await self.set_phase(error_phase) extra_data = extra_data or {} extra_data["internal_error"] = internal_error - yield From(self._append_log_message(error_message, self._build_logs.ERROR, extra_data)) + await self._append_log_message(error_message, self._build_logs.ERROR, extra_data) - @coroutine - def set_phase(self, phase, extra_data=None): + async def set_phase(self, phase, extra_data=None): if phase == self._current_phase: - raise Return(False) + return False self._current_phase = phase - yield From(self._append_log_message(phase, self._build_logs.PHASE, extra_data)) + await self._append_log_message(phase, self._build_logs.PHASE, extra_data) # Update the repository build with the new phase - raise Return(self._build_model.update_phase_then_close(self._uuid, phase)) + return self._build_model.update_phase_then_close(self._uuid, phase) def __enter__(self): return self._status diff --git a/buildman/manager/basemanager.py b/buildman/manager/basemanager.py index eea969797d..7ab6d628e1 100644 --- a/buildman/manager/basemanager.py +++ b/buildman/manager/basemanager.py @@ -1,11 +1,22 @@ -from trollius import coroutine +from abc import abstractmethod, ABC +import inspect -class BaseManager(object): +class BaseManager(ABC): """ Base for all worker managers. """ + def __new__(cls, *args, **kwargs): + """Hack to ensure method defined as async are implemented as such. """ + coroutines = inspect.getmembers(BaseManager, predicate=inspect.iscoroutinefunction) + for coroutine in coroutines: + implemented_method = getattr(cls, coroutine[0]) + if not inspect.iscoroutinefunction(implemented_method): + raise RuntimeError("The method %s must be a coroutine" % implemented_method) + + return super().__new__(cls, *args, **kwargs) + def __init__( self, register_component, @@ -22,8 +33,7 @@ def __init__( self.manager_hostname = manager_hostname self.heartbeat_period_sec = heartbeat_period_sec - @coroutine - def job_heartbeat(self, build_job): + async def job_heartbeat(self, build_job): """ Method invoked to tell the manager that a job is still running. @@ -31,13 +41,15 @@ def job_heartbeat(self, build_job): """ self.job_heartbeat_callback(build_job) + @abstractmethod def overall_setup_time(self): """ Returns the number of seconds that the build system should wait before allowing the job to be picked up again after called 'schedule'. """ - raise NotImplementedError + pass + @abstractmethod def shutdown(self): """ Indicates that the build controller server is in a shutdown state and that no new jobs or @@ -45,43 +57,45 @@ def shutdown(self): Existing workers should be cleaned up once their jobs have completed """ - raise NotImplementedError + pass - @coroutine - def schedule(self, build_job): + @abstractmethod + async def schedule(self, build_job): """ Schedules a queue item to be built. Returns a 2-tuple with (True, None) if the item was properly scheduled and (False, a retry timeout in seconds) if all workers are busy or an error occurs. """ - raise NotImplementedError + pass + @abstractmethod def initialize(self, manager_config): """ Runs any initialization code for the manager. Called once the server is in a ready state. """ - raise NotImplementedError + pass - @coroutine - def build_component_ready(self, build_component): + @abstractmethod + async def build_component_ready(self, build_component): """ Method invoked whenever a build component announces itself as ready. """ - raise NotImplementedError + pass + @abstractmethod def build_component_disposed(self, build_component, timed_out): """ Method invoked whenever a build component has been disposed. The timed_out boolean indicates whether the component's heartbeat timed out. """ - raise NotImplementedError + pass - @coroutine - def job_completed(self, build_job, job_status, build_component): + @abstractmethod + async def job_completed(self, build_job, job_status, build_component): """ Method invoked once a job_item has completed, in some manner. @@ -89,12 +103,13 @@ def job_completed(self, build_job, job_status, build_component): should call coroutine self.job_complete_callback with a status of Incomplete if they wish for the job to be automatically requeued. """ - raise NotImplementedError + pass + @abstractmethod def num_workers(self): """ Returns the number of active build workers currently registered. This includes those that are currently busy and awaiting more work. """ - raise NotImplementedError + pass diff --git a/buildman/manager/enterprise.py b/buildman/manager/enterprise.py index efd73db47f..ac5d799ff4 100644 --- a/buildman/manager/enterprise.py +++ b/buildman/manager/enterprise.py @@ -5,8 +5,6 @@ from buildman.component.buildcomponent import BuildComponent from buildman.manager.basemanager import BaseManager -from trollius import From, Return, coroutine - REGISTRATION_REALM = "registration" RETRY_TIMEOUT = 5 logger = logging.getLogger(__name__) @@ -20,9 +18,9 @@ class DynamicRegistrationComponent(BaseComponent): def onConnect(self): self.join(REGISTRATION_REALM) - def onJoin(self, details): + async def onJoin(self, details): logger.debug("Registering registration method") - yield From(self.register(self._worker_register, u"io.quay.buildworker.register")) + await self.register(self._worker_register, "io.quay.buildworker.register") def _worker_register(self): realm = self.parent_manager.add_build_component() @@ -65,30 +63,27 @@ def add_build_component(self): self.all_components.add(new_component) return realm - @coroutine - def schedule(self, build_job): + async def schedule(self, build_job): """ Schedules a build for an Enterprise Registry. """ if self.shutting_down or not self.ready_components: - raise Return(False, RETRY_TIMEOUT) + return False, RETRY_TIMEOUT component = self.ready_components.pop() - yield From(component.start_build(build_job)) + await component.start_build(build_job) - raise Return(True, None) + return True, None - @coroutine - def build_component_ready(self, build_component): + async def build_component_ready(self, build_component): self.ready_components.add(build_component) def shutdown(self): self.shutting_down = True - @coroutine - def job_completed(self, build_job, job_status, build_component): - yield From(self.job_complete_callback(build_job, job_status)) + async def job_completed(self, build_job, job_status, build_component): + await self.job_complete_callback(build_job, job_status) def build_component_disposed(self, build_component, timed_out): self.all_components.remove(build_component) diff --git a/buildman/manager/ephemeral.py b/buildman/manager/ephemeral.py index 9135033ab0..0e5cd36b4d 100644 --- a/buildman/manager/ephemeral.py +++ b/buildman/manager/ephemeral.py @@ -1,3 +1,4 @@ +import asyncio import logging import uuid import calendar @@ -9,7 +10,6 @@ from six import iteritems from prometheus_client import Counter, Histogram -from trollius import From, coroutine, Return, async, sleep from buildman.orchestrator import ( orchestrator_from_config, @@ -98,8 +98,7 @@ def __init__(self, *args, **kwargs): def overall_setup_time(self): return EPHEMERAL_SETUP_TIMEOUT - @coroutine - def _mark_job_incomplete(self, build_job, build_info): + async def _mark_job_incomplete(self, build_job, build_info): """ Marks a job as incomplete, in response to a failure to start or a timeout. """ @@ -113,11 +112,11 @@ def _mark_job_incomplete(self, build_job, build_info): # Take a lock to ensure that only one manager reports the build as incomplete for this # execution. lock_key = slash_join(self._expired_lock_prefix, build_job.build_uuid, execution_id) - acquired_lock = yield From(self._orchestrator.lock(lock_key)) + acquired_lock = await self._orchestrator.lock(lock_key) if acquired_lock: try: # Clean up the bookkeeping for the job. - yield From(self._orchestrator.delete_key(self._job_key(build_job))) + await self._orchestrator.delete_key(self._job_key(build_job)) except KeyError: logger.debug( "Could not delete job key %s; might have been removed already", @@ -130,7 +129,7 @@ def _mark_job_incomplete(self, build_job, build_info): executor_name, execution_id, ) - yield From( + await ( self.job_complete_callback( build_job, BuildJobResult.INCOMPLETE, executor_name, update_phase=True ) @@ -138,8 +137,7 @@ def _mark_job_incomplete(self, build_job, build_info): else: logger.debug("Did not get lock for job-expiration for job %s", build_job.build_uuid) - @coroutine - def _job_callback(self, key_change): + async def _job_callback(self, key_change): """ This is the callback invoked when keys related to jobs are changed. It ignores all events related to the creation of new jobs. Deletes or expirations cause checks to ensure they've @@ -149,7 +147,7 @@ def _job_callback(self, key_change): :type key_change: :class:`KeyChange` """ if key_change.event in (KeyEvent.CREATE, KeyEvent.SET): - raise Return() + return elif key_change.event in (KeyEvent.DELETE, KeyEvent.EXPIRE): # Handle the expiration/deletion. @@ -166,13 +164,13 @@ def _job_callback(self, key_change): build_job.build_uuid, job_metadata, ) - raise Return() + return if key_change.event != KeyEvent.EXPIRE: # If the etcd action was not an expiration, then it was already deleted by some manager and # the execution was therefore already shutdown. All that's left is to remove the build info. self._build_uuid_to_info.pop(build_job.build_uuid, None) - raise Return() + return logger.debug( "got expiration for job %s with metadata: %s", build_job.build_uuid, job_metadata @@ -181,7 +179,7 @@ def _job_callback(self, key_change): if not job_metadata.get("had_heartbeat", False): # If we have not yet received a heartbeat, then the node failed to boot in some way. # We mark the job as incomplete here. - yield From(self._mark_job_incomplete(build_job, build_info)) + await self._mark_job_incomplete(build_job, build_info) # Finally, we terminate the build execution for the job. We don't do this under a lock as # terminating a node is an atomic operation; better to make sure it is terminated than not. @@ -190,14 +188,13 @@ def _job_callback(self, key_change): build_job.build_uuid, build_info.execution_id, ) - yield From(self.kill_builder_executor(build_job.build_uuid)) + await self.kill_builder_executor(build_job.build_uuid) else: logger.warning( "Unexpected KeyEvent (%s) on job key: %s", key_change.event, key_change.key ) - @coroutine - def _realm_callback(self, key_change): + async def _realm_callback(self, key_change): logger.debug("realm callback for key: %s", key_change.key) if key_change.event == KeyEvent.CREATE: # Listen on the realm created by ourselves or another worker. @@ -231,7 +228,7 @@ def _realm_callback(self, key_change): # Cleanup the job, since it never started. logger.debug("Job %s for incomplete marking: %s", build_uuid, build_info) if build_info is not None: - yield From(self._mark_job_incomplete(build_job, build_info)) + await self._mark_job_incomplete(build_job, build_info) # Cleanup the executor. logger.info( @@ -241,7 +238,7 @@ def _realm_callback(self, key_change): executor_name, execution_id, ) - yield From(self.terminate_executor(executor_name, execution_id)) + await self.terminate_executor(executor_name, execution_id) else: logger.warning( @@ -278,10 +275,9 @@ def _register_realm(self, realm_spec): def registered_executors(self): return self._ordered_executors - @coroutine - def _register_existing_realms(self): + async def _register_existing_realms(self): try: - all_realms = yield From(self._orchestrator.get_prefixed_keys(self._realm_prefix)) + all_realms = await self._orchestrator.get_prefixed_keys(self._realm_prefix) # Register all existing realms found. encountered = { @@ -400,22 +396,21 @@ def initialize(self, manager_config): ) # Load components for all realms currently known to the cluster - async(self._register_existing_realms()) + asyncio.create_task(self._register_existing_realms()) def shutdown(self): logger.debug("Shutting down worker.") if self._orchestrator is not None: self._orchestrator.shutdown() - @coroutine - def schedule(self, build_job): + async def schedule(self, build_job): build_uuid = build_job.job_details["build_uuid"] logger.debug("Calling schedule with job: %s", build_uuid) # Check if there are worker slots available by checking the number of jobs in the orchestrator allowed_worker_count = self._manager_config.get("ALLOWED_WORKER_COUNT", 1) try: - active_jobs = yield From(self._orchestrator.get_prefixed_keys(self._job_prefix)) + active_jobs = await self._orchestrator.get_prefixed_keys(self._job_prefix) workers_alive = len(active_jobs) except KeyError: workers_alive = 0 @@ -423,12 +418,12 @@ def schedule(self, build_job): logger.exception( "Could not read job count from orchestrator for job due to orchestrator being down" ) - raise Return(False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) + return False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION except OrchestratorError: logger.exception( "Exception when reading job count from orchestrator for job: %s", build_uuid ) - raise Return(False, RETRY_IMMEDIATELY_SLEEP_DURATION) + return False, RETRY_IMMEDIATELY_SLEEP_DURATION logger.debug("Total jobs (scheduling job %s): %s", build_uuid, workers_alive) @@ -439,7 +434,7 @@ def schedule(self, build_job): workers_alive, allowed_worker_count, ) - raise Return(False, TOO_MANY_WORKERS_SLEEP_DURATION) + return False, TOO_MANY_WORKERS_SLEEP_DURATION job_key = self._job_key(build_job) @@ -466,7 +461,7 @@ def schedule(self, build_job): ) try: - yield From( + await ( self._orchestrator.set_key( job_key, lock_payload, overwrite=False, expiration=EPHEMERAL_SETUP_TIMEOUT ) @@ -475,15 +470,15 @@ def schedule(self, build_job): logger.warning( "Job: %s already exists in orchestrator, timeout may be misconfigured", build_uuid ) - raise Return(False, EPHEMERAL_API_TIMEOUT) + return False, EPHEMERAL_API_TIMEOUT except OrchestratorConnectionError: logger.exception( "Exception when writing job %s to orchestrator; could not connect", build_uuid ) - raise Return(False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) + return False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION except OrchestratorError: logger.exception("Exception when writing job %s to orchestrator", build_uuid) - raise Return(False, RETRY_IMMEDIATELY_SLEEP_DURATION) + return False, RETRY_IMMEDIATELY_SLEEP_DURATION # Got a lock, now lets boot the job via one of the registered executors. started_with_executor = None @@ -519,7 +514,7 @@ def schedule(self, build_job): ) try: - execution_id = yield From(executor.start_builder(realm, token, build_uuid)) + execution_id = await executor.start_builder(realm, token, build_uuid) except: logger.exception("Exception when starting builder for job: %s", build_uuid) continue @@ -534,8 +529,8 @@ def schedule(self, build_job): logger.error("Could not start ephemeral worker for build %s", build_uuid) # Delete the associated build job record. - yield From(self._orchestrator.delete_key(job_key)) - raise Return(False, EPHEMERAL_API_TIMEOUT) + await self._orchestrator.delete_key(job_key) + return False, EPHEMERAL_API_TIMEOUT # Job was started! logger.debug( @@ -551,7 +546,7 @@ def schedule(self, build_job): ) try: - yield From( + await ( self._orchestrator.set_key( self._metric_key(realm), metric_spec, @@ -591,7 +586,7 @@ def schedule(self, build_job): execution_id, setup_time, ) - yield From( + await ( self._orchestrator.set_key( self._realm_key(realm), realm_spec, expiration=setup_time ) @@ -600,12 +595,12 @@ def schedule(self, build_job): logger.exception( "Exception when writing realm %s to orchestrator for job %s", realm, build_uuid ) - raise Return(False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) + return False, ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION except OrchestratorError: logger.exception( "Exception when writing realm %s to orchestrator for job %s", realm, build_uuid ) - raise Return(False, setup_time) + return False, setup_time logger.debug( "Builder spawn complete for job %s using executor %s with ID %s ", @@ -613,10 +608,9 @@ def schedule(self, build_job): started_with_executor.name, execution_id, ) - raise Return(True, None) + return True, None - @coroutine - def build_component_ready(self, build_component): + async def build_component_ready(self, build_component): logger.debug( "Got component ready for component with realm %s", build_component.builder_realm ) @@ -631,7 +625,7 @@ def build_component_ready(self, build_component): "Could not find job for the build component on realm %s; component is ready", build_component.builder_realm, ) - raise Return() + return # Start the build job. logger.debug( @@ -639,15 +633,13 @@ def build_component_ready(self, build_component): job.build_uuid, build_component.builder_realm, ) - yield From(build_component.start_build(job)) + await build_component.start_build(job) - yield From(self._write_duration_metric(build_ack_duration, build_component.builder_realm)) + await self._write_duration_metric(build_ack_duration, build_component.builder_realm) # Clean up the bookkeeping for allowing any manager to take the job. try: - yield From( - self._orchestrator.delete_key(self._realm_key(build_component.builder_realm)) - ) + await (self._orchestrator.delete_key(self._realm_key(build_component.builder_realm))) except KeyError: logger.warning("Could not delete realm key %s", build_component.builder_realm) @@ -655,13 +647,12 @@ def build_component_disposed(self, build_component, timed_out): logger.debug("Calling build_component_disposed.") self.unregister_component(build_component) - @coroutine - def job_completed(self, build_job, job_status, build_component): + async def job_completed(self, build_job, job_status, build_component): logger.debug( "Calling job_completed for job %s with status: %s", build_job.build_uuid, job_status ) - yield From( + await ( self._write_duration_metric( build_duration, build_component.builder_realm, job_status=job_status ) @@ -671,66 +662,61 @@ def job_completed(self, build_job, job_status, build_component): # to ask for the phase to be updated as well. build_info = self._build_uuid_to_info.get(build_job.build_uuid, None) executor_name = build_info.executor_name if build_info else None - yield From( - self.job_complete_callback(build_job, job_status, executor_name, update_phase=False) - ) + await (self.job_complete_callback(build_job, job_status, executor_name, update_phase=False)) # Kill the ephemeral builder. - yield From(self.kill_builder_executor(build_job.build_uuid)) + await self.kill_builder_executor(build_job.build_uuid) # Delete the build job from the orchestrator. try: job_key = self._job_key(build_job) - yield From(self._orchestrator.delete_key(job_key)) + await self._orchestrator.delete_key(job_key) except KeyError: logger.debug("Builder is asking for job to be removed, but work already completed") except OrchestratorConnectionError: logger.exception("Could not remove job key as orchestrator is not available") - yield From(sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION)) - raise Return() + await asyncio.sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) + return # Delete the metric from the orchestrator. try: metric_key = self._metric_key(build_component.builder_realm) - yield From(self._orchestrator.delete_key(metric_key)) + await self._orchestrator.delete_key(metric_key) except KeyError: logger.debug("Builder is asking for metric to be removed, but key not found") except OrchestratorConnectionError: logger.exception("Could not remove metric key as orchestrator is not available") - yield From(sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION)) - raise Return() + await asyncio.sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) + return logger.debug("job_completed for job %s with status: %s", build_job.build_uuid, job_status) - @coroutine - def kill_builder_executor(self, build_uuid): + async def kill_builder_executor(self, build_uuid): logger.info("Starting termination of executor for job %s", build_uuid) build_info = self._build_uuid_to_info.pop(build_uuid, None) if build_info is None: logger.debug( "Build information not found for build %s; skipping termination", build_uuid ) - raise Return() + return # Remove the build's component. self._component_to_job.pop(build_info.component, None) # Stop the build node/executor itself. - yield From(self.terminate_executor(build_info.executor_name, build_info.execution_id)) + await self.terminate_executor(build_info.executor_name, build_info.execution_id) - @coroutine - def terminate_executor(self, executor_name, execution_id): + async def terminate_executor(self, executor_name, execution_id): executor = self._executor_name_to_executor.get(executor_name) if executor is None: logger.error("Could not find registered executor %s", executor_name) - raise Return() + return # Terminate the executor's execution. logger.info("Terminating executor %s with execution id %s", executor_name, execution_id) - yield From(executor.stop_builder(execution_id)) + await executor.stop_builder(execution_id) - @coroutine - def job_heartbeat(self, build_job): + async def job_heartbeat(self, build_job): """ :param build_job: the identifier for the build :type build_job: str @@ -738,13 +724,12 @@ def job_heartbeat(self, build_job): self.job_heartbeat_callback(build_job) self._extend_job_in_orchestrator(build_job) - @coroutine - def _extend_job_in_orchestrator(self, build_job): + async def _extend_job_in_orchestrator(self, build_job): try: - job_data = yield From(self._orchestrator.get_key(self._job_key(build_job))) + job_data = await self._orchestrator.get_key(self._job_key(build_job)) except KeyError: logger.info("Job %s no longer exists in the orchestrator", build_job.build_uuid) - raise Return() + return except OrchestratorConnectionError: logger.exception("failed to connect when attempted to extend job") @@ -762,7 +747,7 @@ def _extend_job_in_orchestrator(self, build_job): } try: - yield From( + await ( self._orchestrator.set_key( self._job_key(build_job), json.dumps(payload), expiration=ttl ) @@ -771,15 +756,14 @@ def _extend_job_in_orchestrator(self, build_job): logger.exception( "Could not update heartbeat for job as the orchestrator is not available" ) - yield From(sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION)) + await asyncio.sleep(ORCHESTRATOR_UNAVAILABLE_SLEEP_DURATION) - @coroutine - def _write_duration_metric(self, metric, realm, job_status=None): + async def _write_duration_metric(self, metric, realm, job_status=None): """ :returns: True if the metric was written, otherwise False :rtype: bool """ try: - metric_data = yield From(self._orchestrator.get_key(self._metric_key(realm))) + metric_data = await self._orchestrator.get_key(self._metric_key(realm)) parsed_metric_data = json.loads(metric_data) start_time = parsed_metric_data["start_time"] executor = parsed_metric_data.get("executor_name", "unknown") @@ -799,25 +783,24 @@ def num_workers(self): """ return len(self._component_to_job) - @coroutine - def _cancel_callback(self, key_change): + async def _cancel_callback(self, key_change): if key_change.event not in (KeyEvent.CREATE, KeyEvent.SET): - raise Return() + return build_uuid = key_change.value build_info = self._build_uuid_to_info.get(build_uuid, None) if build_info is None: logger.debug('No build info for "%s" job %s', key_change.event, build_uuid) - raise Return(False) + return False lock_key = slash_join(self._canceled_lock_prefix, build_uuid, build_info.execution_id) - lock_acquired = yield From(self._orchestrator.lock(lock_key)) + lock_acquired = await self._orchestrator.lock(lock_key) if lock_acquired: builder_realm = build_info.component.builder_realm - yield From(self.kill_builder_executor(build_uuid)) - yield From(self._orchestrator.delete_key(self._realm_key(builder_realm))) - yield From(self._orchestrator.delete_key(self._metric_key(builder_realm))) - yield From(self._orchestrator.delete_key(slash_join(self._job_prefix, build_uuid))) + await self.kill_builder_executor(build_uuid) + await self._orchestrator.delete_key(self._realm_key(builder_realm)) + await self._orchestrator.delete_key(self._metric_key(builder_realm)) + await self._orchestrator.delete_key(slash_join(self._job_prefix, build_uuid)) # This is outside the lock so we can un-register the component wherever it is registered to. - yield From(build_info.component.cancel_build()) + await build_info.component.cancel_build() diff --git a/buildman/manager/executor.py b/buildman/manager/executor.py index 199b538d3c..febd1aef6e 100644 --- a/buildman/manager/executor.py +++ b/buildman/manager/executor.py @@ -1,3 +1,4 @@ +import asyncio import datetime import hashlib import logging @@ -16,7 +17,6 @@ from container_cloud_config import CloudConfigContext from jinja2 import FileSystemLoader, Environment -from trollius import coroutine, sleep, From, Return, get_event_loop from prometheus_client import Histogram import release @@ -99,8 +99,7 @@ def setup_time(self): """ return self.executor_config.get("SETUP_TIME") - @coroutine - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): """ Create a builder with the specified config. @@ -108,8 +107,7 @@ def start_builder(self, realm, token, build_uuid): """ raise NotImplementedError - @coroutine - def stop_builder(self, builder_id): + async def stop_builder(self, builder_id): """ Stop a builder which is currently running. """ @@ -129,7 +127,7 @@ def allowed_for_namespace(self, namespace): # in the first X% of the character space, we allow this executor to be used. staged_rollout = self.executor_config.get("STAGED_ROLLOUT") if staged_rollout is not None: - bucket = int(hashlib.sha256(namespace).hexdigest()[-2:], 16) + bucket = int(hashlib.sha256(namespace.encode("utf-8")).hexdigest()[-2:], 16) return bucket < (256 * staged_rollout) # If there are no restrictions in place, we are free to use this executor. @@ -189,7 +187,7 @@ class EC2Executor(BuilderExecutor): ) def __init__(self, *args, **kwargs): - self._loop = get_event_loop() + self._loop = asyncio.get_event_loop() super(EC2Executor, self).__init__(*args, **kwargs) def _get_conn(self): @@ -214,16 +212,15 @@ def _get_coreos_ami(cls, ec2_region, coreos_channel): stack_amis = dict([stack.split("=") for stack in stack_list_string.split("|")]) return stack_amis[ec2_region] - @coroutine @async_observe(build_start_duration, "ec2") - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): region = self.executor_config["EC2_REGION"] channel = self.executor_config.get("COREOS_CHANNEL", "stable") coreos_ami = self.executor_config.get("COREOS_AMI", None) if coreos_ami is None: get_ami_callable = partial(self._get_coreos_ami, region, channel) - coreos_ami = yield From(self._loop.run_in_executor(None, get_ami_callable)) + coreos_ami = await self._loop.run_in_executor(None, get_ami_callable) user_data = self.generate_cloud_config( realm, token, build_uuid, channel, self.manager_hostname @@ -250,7 +247,7 @@ def start_builder(self, realm, token, build_uuid): interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface) try: - reservation = yield From( + reservation = await ( ec2_conn.run_instances( coreos_ami, instance_type=self.executor_config["EC2_INSTANCE_TYPE"], @@ -273,12 +270,12 @@ def start_builder(self, realm, token, build_uuid): launched = AsyncWrapper(reservation.instances[0]) # Sleep a few seconds to wait for AWS to spawn the instance. - yield From(sleep(_TAG_RETRY_SLEEP)) + await asyncio.sleep(_TAG_RETRY_SLEEP) # Tag the instance with its metadata. for i in range(0, _TAG_RETRY_COUNT): try: - yield From( + await ( launched.add_tags( { "Name": "Quay Ephemeral Builder", @@ -297,7 +294,7 @@ def start_builder(self, realm, token, build_uuid): build_uuid, i, ) - yield From(sleep(_TAG_RETRY_SLEEP)) + await asyncio.sleep(_TAG_RETRY_SLEEP) continue raise ExecutorException("Unable to find builder instance.") @@ -305,13 +302,12 @@ def start_builder(self, realm, token, build_uuid): logger.exception("Failed to write EC2 tags (attempt #%s)", i) logger.debug("Machine with ID %s started for build %s", launched.id, build_uuid) - raise Return(launched.id) + return launched.id - @coroutine - def stop_builder(self, builder_id): + async def stop_builder(self, builder_id): try: ec2_conn = self._get_conn() - terminated_instances = yield From(ec2_conn.terminate_instances([builder_id])) + terminated_instances = await ec2_conn.terminate_instances([builder_id]) except boto.exception.EC2ResponseError as ec2e: if ec2e.error_code == "InvalidInstanceID.NotFound": logger.debug("Instance %s already terminated", builder_id) @@ -333,9 +329,8 @@ def __init__(self, executor_config, manager_hostname): self._jobs = {} super(PopenExecutor, self).__init__(executor_config, manager_hostname) - @coroutine @async_observe(build_start_duration, "fork") - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): # Now start a machine for this job, adding the machine id to the etcd information logger.debug("Forking process for build") @@ -362,10 +357,9 @@ def start_builder(self, realm, token, build_uuid): builder_id = str(uuid.uuid4()) self._jobs[builder_id] = (spawned, logpipe) logger.debug("Builder spawned with id: %s", builder_id) - raise Return(builder_id) + return builder_id - @coroutine - def stop_builder(self, builder_id): + async def stop_builder(self, builder_id): if builder_id not in self._jobs: raise ExecutorException("Builder id not being tracked by executor.") @@ -384,14 +378,13 @@ class KubernetesExecutor(BuilderExecutor): def __init__(self, *args, **kwargs): super(KubernetesExecutor, self).__init__(*args, **kwargs) - self._loop = get_event_loop() + self._loop = asyncio.get_event_loop() self.namespace = self.executor_config.get("BUILDER_NAMESPACE", "builder") self.image = self.executor_config.get( "BUILDER_VM_CONTAINER_IMAGE", "quay.io/quay/quay-builder-qemu-coreos:stable" ) - @coroutine - def _request(self, method, path, **kwargs): + async def _request(self, method, path, **kwargs): request_options = dict(kwargs) tls_cert = self.executor_config.get("K8S_API_TLS_CERT") @@ -422,7 +415,7 @@ def _request(self, method, path, **kwargs): logger.debug("Kubernetes request: %s %s: %s", method, url, request_options) res = requests.request(method, url, **request_options) logger.debug("Kubernetes response: %s: %s", res.status_code, res.text) - raise Return(res) + return res def _jobs_path(self): return "/apis/batch/v1/namespaces/%s/jobs" % self.namespace @@ -566,9 +559,8 @@ def _job_resource(self, build_uuid, user_data, coreos_channel="stable"): return job_resource - @coroutine @async_observe(build_start_duration, "k8s") - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): # generate resource channel = self.executor_config.get("COREOS_CHANNEL", "stable") user_data = self.generate_cloud_config( @@ -579,7 +571,7 @@ def start_builder(self, realm, token, build_uuid): logger.debug("Generated kubernetes resource:\n%s", resource) # schedule - create_job = yield From(self._request("POST", self._jobs_path(), json=resource)) + create_job = await self._request("POST", self._jobs_path(), json=resource) if int(create_job.status_code / 100) != 2: raise ExecutorException( "Failed to create job: %s: %s: %s" @@ -587,24 +579,21 @@ def start_builder(self, realm, token, build_uuid): ) job = create_job.json() - raise Return(job["metadata"]["name"]) + return job["metadata"]["name"] - @coroutine - def stop_builder(self, builder_id): + async def stop_builder(self, builder_id): pods_path = "/api/v1/namespaces/%s/pods" % self.namespace # Delete the job itself. try: - yield From(self._request("DELETE", self._job_path(builder_id))) + await self._request("DELETE", self._job_path(builder_id)) except: logger.exception("Failed to send delete job call for job %s", builder_id) # Delete the pod(s) for the job. selectorString = "job-name=%s" % builder_id try: - yield From( - self._request("DELETE", pods_path, params=dict(labelSelector=selectorString)) - ) + await (self._request("DELETE", pods_path, params=dict(labelSelector=selectorString))) except: logger.exception("Failed to send delete pod call for job %s", builder_id) diff --git a/buildman/orchestrator.py b/buildman/orchestrator.py index 6b1b25f657..e210de19af 100644 --- a/buildman/orchestrator.py +++ b/buildman/orchestrator.py @@ -1,6 +1,7 @@ from abc import ABCMeta, abstractmethod from collections import namedtuple +import asyncio import datetime import json import logging @@ -9,7 +10,6 @@ from enum import IntEnum, unique from six import add_metaclass, iteritems -from trollius import async, coroutine, From, Return from urllib3.exceptions import ReadTimeoutError, ProtocolError import etcd @@ -62,18 +62,18 @@ def orchestrator_from_config(manager_config, canceller_only=False): } # Sanity check that legacy prefixes are no longer being used. - for key in manager_config["ORCHESTRATOR"].keys(): + for key in list(manager_config["ORCHESTRATOR"].keys()): words = key.split("_") if len(words) > 1 and words[-1].lower() == "prefix": raise AssertionError("legacy prefix used, use ORCHESTRATOR_PREFIX instead") def _dict_key_prefix(d): """ - :param d: the dict that has keys prefixed with underscore - :type d: {str: any} - :rtype: str - """ - return d.keys()[0].split("_", 1)[0].lower() + :param d: the dict that has keys prefixed with underscore + :type d: {str: any} + :rtype: str + """ + return list(d.keys())[0].split("_", 1)[0].lower() orchestrator_name = _dict_key_prefix(manager_config["ORCHESTRATOR"]) @@ -153,40 +153,37 @@ def on_key_change(self, key, callback, restarter=None): @abstractmethod def get_prefixed_keys(self, prefix): """ - - :returns: a dict of key value pairs beginning with prefix - :rtype: {str: str} - """ + :returns: a dict of key value pairs beginning with prefix + :rtype: {str: str} + """ pass @abstractmethod def get_key(self, key): """ - - :returns: the value stored at the provided key - :rtype: str - """ + :returns: the value stored at the provided key + :rtype: str + """ pass @abstractmethod def set_key(self, key, value, overwrite=False, expiration=None): """ - - :param key: the identifier for the value - :type key: str - :param value: the value being stored - :type value: str - :param overwrite: whether or not a KeyError is thrown if the key already exists - :type overwrite: bool - :param expiration: the duration in seconds that a key should be available - :type expiration: int - """ + :param key: the identifier for the value + :type key: str + :param value: the value being stored + :type value: str + :param overwrite: whether or not a KeyError is thrown if the key already exists + :type overwrite: bool + :param expiration: the duration in seconds that a key should be available + :type expiration: int + """ pass @abstractmethod def set_key_sync(self, key, value, overwrite=False, expiration=None): """ - set_key, but without trollius coroutines. + set_key, but without asyncio coroutines. """ pass @@ -224,8 +221,8 @@ def shutdown(): def _sleep_orchestrator(): """ - This function blocks the trollius event loop by sleeping in order to backoff if a failure such - as a ConnectionError has occurred. + This function blocks the asyncio event loop by sleeping in order to backoff if a failure + such as a ConnectionError has occurred. """ logger.exception( "Connecting to etcd failed; sleeping for %s and then trying again", @@ -262,7 +259,7 @@ def __init__( ca_cert=None, client_threads=5, canceller_only=False, - **kwargs + **kwargs, ): self.is_canceller_only = canceller_only @@ -322,7 +319,7 @@ def callback_wrapper(changed_key_future): logger.debug("Etcd moved forward too quickly. Restarting watch cycle.") new_index = None if restarter is not None: - async(restarter()) + asyncio.create_task(restarter()) except (KeyError, etcd.EtcdKeyError): logger.debug("Etcd key already cleared: %s", key) @@ -334,7 +331,7 @@ def callback_wrapper(changed_key_future): except etcd.EtcdException as eex: # TODO: This is a quick and dirty hack and should be replaced with a proper # exception check. - if str(eex.message).find("Read timed out") >= 0: + if str(eex).find("Read timed out") >= 0: logger.debug("Read-timeout on etcd watch %s, rescheduling", key) else: logger.exception("Exception on etcd watch: %s", key) @@ -346,7 +343,7 @@ def callback_wrapper(changed_key_future): self._watch_etcd(key, callback, start_index=new_index, restarter=restarter) if etcd_result and etcd_result.value is not None: - async(callback(self._etcd_result_to_keychange(etcd_result))) + asyncio.create_task(callback(self._etcd_result_to_keychange(etcd_result))) if not self._shutting_down: logger.debug("Scheduling watch of key: %s at start index %s", key, start_index) @@ -355,7 +352,7 @@ def callback_wrapper(changed_key_future): ) watch_future.add_done_callback(callback_wrapper) - self._watch_tasks[key] = async(watch_future) + self._watch_tasks[key] = asyncio.create_task(watch_future) @staticmethod def _etcd_result_to_keychange(etcd_result): @@ -384,13 +381,12 @@ def on_key_change(self, key, callback, restarter=None): logger.debug("creating watch on %s", key) self._watch_etcd(key, callback, restarter=restarter) - @coroutine - def get_prefixed_keys(self, prefix): + async def get_prefixed_keys(self, prefix): assert not self.is_canceller_only try: - etcd_result = yield From(self._etcd_client.read(prefix, recursive=True)) - raise Return({leaf.key: leaf.value for leaf in etcd_result.leaves}) + etcd_result = await self._etcd_client.read(prefix, recursive=True) + return {leaf.key: leaf.value for leaf in etcd_result.leaves} except etcd.EtcdKeyError: raise KeyError except etcd.EtcdConnectionFailed as ex: @@ -398,14 +394,13 @@ def get_prefixed_keys(self, prefix): except etcd.EtcdException as ex: raise OrchestratorError(ex) - @coroutine - def get_key(self, key): + async def get_key(self, key): assert not self.is_canceller_only try: # Ignore pylint: the value property on EtcdResult is added dynamically using setattr. - etcd_result = yield From(self._etcd_client.read(key)) - raise Return(etcd_result.value) + etcd_result = await self._etcd_client.read(key) + return etcd_result.value except etcd.EtcdKeyError: raise KeyError except etcd.EtcdConnectionFailed as ex: @@ -413,11 +408,10 @@ def get_key(self, key): except etcd.EtcdException as ex: raise OrchestratorError(ex) - @coroutine - def set_key(self, key, value, overwrite=False, expiration=None): + async def set_key(self, key, value, overwrite=False, expiration=None): assert not self.is_canceller_only - yield From( + await ( self._etcd_client.write( key, value, prevExists=overwrite, ttl=self._sanity_check_ttl(expiration) ) @@ -428,12 +422,11 @@ def set_key_sync(self, key, value, overwrite=False, expiration=None): key, value, prevExists=overwrite, ttl=self._sanity_check_ttl(expiration) ) - @coroutine - def delete_key(self, key): + async def delete_key(self, key): assert not self.is_canceller_only try: - yield From(self._etcd_client.delete(key)) + await self._etcd_client.delete(key) except etcd.EtcdKeyError: raise KeyError except etcd.EtcdConnectionFailed as ex: @@ -441,22 +434,21 @@ def delete_key(self, key): except etcd.EtcdException as ex: raise OrchestratorError(ex) - @coroutine - def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): + async def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): assert not self.is_canceller_only try: - yield From( + await ( self._etcd_client.write( key, {}, prevExist=False, ttl=self._sanity_check_ttl(expiration) ) ) - raise Return(True) + return True except (KeyError, etcd.EtcdKeyError): - raise Return(False) + return False except etcd.EtcdConnectionFailed: logger.exception("Could not get etcd atomic lock as etcd is down") - raise Return(False) + return False except etcd.EtcdException as ex: raise OrchestratorError(ex) @@ -467,7 +459,7 @@ def shutdown(self): if self.is_canceller_only: return - for (key, _), task in self._watch_tasks.items(): + for (key, _), task in list(self._watch_tasks.items()): if not task.done(): logger.debug("Canceling watch task for %s", key) task.cancel() @@ -487,16 +479,13 @@ def _callbacks_prefixed(self, prefix): def on_key_change(self, key, callback, restarter=None): self.callbacks[key] = callback - @coroutine - def get_prefixed_keys(self, prefix): - raise Return({k: value for (k, value) in self.state.items() if k.startswith(prefix)}) + async def get_prefixed_keys(self, prefix): + return {k: value for (k, value) in list(self.state.items()) if k.startswith(prefix)} - @coroutine - def get_key(self, key): - raise Return(self.state[key]) + async def get_key(self, key): + return self.state[key] - @coroutine - def set_key(self, key, value, overwrite=False, expiration=None): + async def set_key(self, key, value, overwrite=False, expiration=None): preexisting_key = "key" in self.state if preexisting_key and not overwrite: raise KeyError @@ -509,11 +498,11 @@ def set_key(self, key, value, overwrite=False, expiration=None): event = KeyEvent.CREATE if not preexisting_key else KeyEvent.SET for callback in self._callbacks_prefixed(key): - yield From(callback(KeyChange(event, key, value))) + await callback(KeyChange(event, key, value)) def set_key_sync(self, key, value, overwrite=False, expiration=None): """ - set_key, but without trollius coroutines. + set_key, but without asyncio coroutines. """ preexisting_key = "key" in self.state if preexisting_key and not overwrite: @@ -529,20 +518,18 @@ def set_key_sync(self, key, value, overwrite=False, expiration=None): for callback in self._callbacks_prefixed(key): callback(KeyChange(event, key, value)) - @coroutine - def delete_key(self, key): + async def delete_key(self, key): value = self.state[key] del self.state[key] for callback in self._callbacks_prefixed(key): - yield From(callback(KeyChange(KeyEvent.DELETE, key, value))) + await callback(KeyChange(KeyEvent.DELETE, key, value)) - @coroutine - def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): + async def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): if key in self.state: - raise Return(False) + return False self.state.set(key, None, expires=expiration) - raise Return(True) + return True def shutdown(self): self.state = None @@ -562,7 +549,7 @@ def __init__( ssl=False, skip_keyspace_event_setup=False, canceller_only=False, - **kwargs + **kwargs, ): self.is_canceller_only = canceller_only (cert, key) = tuple(cert_and_key) if cert_and_key is not None else (None, None) @@ -632,16 +619,16 @@ def published_callback_wrapper(event_future): keychange = self._publish_to_keychange(event_value) for watched_key, callback in iteritems(self._watched_keys): if keychange.key.startswith(watched_key): - async(callback(keychange)) + asyncio.create_task(callback(keychange)) if not self._shutting_down: logger.debug("Scheduling watch of publish stream") watch_future = self._pubsub.parse_response() watch_future.add_done_callback(published_callback_wrapper) - self._tasks["pub"] = async(watch_future) + self._tasks["pub"] = asyncio.create_task(watch_future) def _watch_expiring_key(self): - def expiring_callback_wrapper(event_future): + async def expiring_callback_wrapper(event_future): logger.debug("expiring callback called") event_result = None @@ -651,11 +638,11 @@ def expiring_callback_wrapper(event_future): if self._is_expired_keyspace_event(event_result): # Get the value of the original key before the expiration happened. key = self._key_from_expiration(event_future) - expired_value = yield From(self._client.get(key)) + expired_value = await self._client.get(key) # $KEY/expiring is gone, but the original key still remains, set an expiration for it # so that other managers have time to get the event and still read the expired value. - yield From(self._client.expire(key, ONE_DAY)) + await self._client.expire(key, ONE_DAY) except redis.ConnectionError: _sleep_orchestrator() except redis.RedisError: @@ -668,13 +655,15 @@ def expiring_callback_wrapper(event_future): if self._is_expired_keyspace_event(event_result) and expired_value is not None: for watched_key, callback in iteritems(self._watched_keys): if key.startswith(watched_key): - async(callback(KeyChange(KeyEvent.EXPIRE, key, expired_value))) + asyncio.create_task( + callback(KeyChange(KeyEvent.EXPIRE, key, expired_value)) + ) if not self._shutting_down: logger.debug("Scheduling watch of expiration") watch_future = self._pubsub_expiring.parse_response() watch_future.add_done_callback(expiring_callback_wrapper) - self._tasks["expire"] = async(watch_future) + self._tasks["expire"] = asyncio.create_task(watch_future) def on_key_change(self, key, callback, restarter=None): assert not self.is_canceller_only @@ -709,49 +698,46 @@ def _publish_to_keychange(event_value): e = json.loads(event_value) return KeyChange(KeyEvent(e["event"]), e["key"], e["value"]) - @coroutine - def get_prefixed_keys(self, prefix): + async def get_prefixed_keys(self, prefix): assert not self.is_canceller_only # TODO: This can probably be done with redis pipelines to make it transactional. - keys = yield From(self._client.keys(prefix + "*")) + keys = await self._client.keys(prefix + "*") # Yielding to the event loop is required, thus this cannot be written as a dict comprehension. results = {} for key in keys: if key.endswith(REDIS_EXPIRING_SUFFIX): continue - ttl = yield From(self._client.ttl(key)) + ttl = await self._client.ttl(key) if ttl != REDIS_NONEXPIRING_KEY: # Only redis keys without expirations are live build manager keys. - value = yield From(self._client.get(key)) + value = await self._client.get(key) results.update({key: value}) - raise Return(results) + return results - @coroutine - def get_key(self, key): + async def get_key(self, key): assert not self.is_canceller_only - value = yield From(self._client.get(key)) - raise Return(value) + value = await self._client.get(key) + return value - @coroutine - def set_key(self, key, value, overwrite=False, expiration=None): + async def set_key(self, key, value, overwrite=False, expiration=None): assert not self.is_canceller_only - already_exists = yield From(self._client.exists(key)) + already_exists = await self._client.exists(key) - yield From(self._client.set(key, value, xx=overwrite)) + await self._client.set(key, value, xx=overwrite) if expiration is not None: - yield From( + await ( self._client.set( slash_join(key, REDIS_EXPIRING_SUFFIX), value, xx=overwrite, ex=expiration ) ) key_event = KeyEvent.SET if already_exists else KeyEvent.CREATE - yield From(self._publish(event=key_event, key=key, value=value)) + await self._publish(event=key_event, key=key, value=value) def set_key_sync(self, key, value, overwrite=False, expiration=None): already_exists = self._sync_client.exists(key) @@ -773,31 +759,27 @@ def set_key_sync(self, key, value, overwrite=False, expiration=None): ), ) - @coroutine - def _publish(self, **kwargs): + async def _publish(self, **kwargs): kwargs["event"] = int(kwargs["event"]) event_json = json.dumps(kwargs) logger.debug("publishing event: %s", event_json) - yield From(self._client.publish(self._pubsub_key, event_json)) + await self._client.publish(self._pubsub_key, event_json) - @coroutine - def delete_key(self, key): + async def delete_key(self, key): assert not self.is_canceller_only - value = yield From(self._client.get(key)) - yield From(self._client.delete(key)) - yield From(self._client.delete(slash_join(key, REDIS_EXPIRING_SUFFIX))) - yield From(self._publish(event=KeyEvent.DELETE, key=key, value=value)) + value = await self._client.get(key) + await self._client.delete(key) + await self._client.delete(slash_join(key, REDIS_EXPIRING_SUFFIX)) + await self._publish(event=KeyEvent.DELETE, key=key, value=value) - @coroutine - def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): + async def lock(self, key, expiration=DEFAULT_LOCK_EXPIRATION): assert not self.is_canceller_only - yield From(self.set_key(key, "", ex=expiration)) - raise Return(True) + await self.set_key(key, "", ex=expiration) + return True - @coroutine - def shutdown(self): + async def shutdown(self): logger.debug("Shutting down redis client.") self._shutting_down = True diff --git a/buildman/server.py b/buildman/server.py index 9a66efdd98..ef2e53b722 100644 --- a/buildman/server.py +++ b/buildman/server.py @@ -4,14 +4,13 @@ from datetime import timedelta from threading import Event -import trollius +import asyncio from aiowsgi import create_server as create_wsgi_server from autobahn.asyncio.wamp import RouterFactory, RouterSessionFactory from autobahn.asyncio.websocket import WampWebSocketServerFactory from autobahn.wamp import types from flask import Flask -from trollius.coroutines import From from app import app from buildman.enums import BuildJobResult, BuildServerStatus, RESULT_PHASES @@ -108,7 +107,7 @@ def run(self, host, websocket_port, controller_port, ssl=None): self._lifecycle_manager.initialize(self._lifecycle_manager_config) logger.debug("Initializing all members of the event loop") - loop = trollius.get_event_loop() + loop = asyncio.get_event_loop() logger.debug( "Starting server on port %s, with controller on port %s", @@ -175,8 +174,7 @@ def _job_heartbeat(self, build_job): minimum_extension=MINIMUM_JOB_EXTENSION, ) - @trollius.coroutine - def _job_complete(self, build_job, job_status, executor_name=None, update_phase=False): + async def _job_complete(self, build_job, job_status, executor_name=None, update_phase=False): if job_status == BuildJobResult.INCOMPLETE: logger.warning( "[BUILD INCOMPLETE: job complete] Build ID: %s. No retry restore.", @@ -194,19 +192,18 @@ def _job_complete(self, build_job, job_status, executor_name=None, update_phase= if update_phase: status_handler = StatusHandler(self._build_logs, build_job.repo_build.uuid) - yield From(status_handler.set_phase(RESULT_PHASES[job_status])) + await status_handler.set_phase(RESULT_PHASES[job_status]) self._job_count = self._job_count - 1 if self._current_status == BuildServerStatus.SHUTDOWN and not self._job_count: self._shutdown_event.set() - @trollius.coroutine - def _work_checker(self): + async def _work_checker(self): logger.debug("Initializing work checker") while self._current_status == BuildServerStatus.RUNNING: with database.CloseForLongOperation(app.config): - yield From(trollius.sleep(WORK_CHECK_TIMEOUT)) + await asyncio.sleep(WORK_CHECK_TIMEOUT) logger.debug( "Checking for more work for %d active workers", @@ -237,9 +234,7 @@ def _work_checker(self): ) try: - schedule_success, retry_timeout = yield From( - self._lifecycle_manager.schedule(build_job) - ) + schedule_success, retry_timeout = await self._lifecycle_manager.schedule(build_job) except: logger.warning( "[BUILD INCOMPLETE: scheduling] Build ID: %s. Retry restored.", @@ -253,7 +248,7 @@ def _work_checker(self): if schedule_success: logger.debug("Marking build %s as scheduled", build_job.repo_build.uuid) status_handler = StatusHandler(self._build_logs, build_job.repo_build.uuid) - yield From(status_handler.set_phase(database.BUILD_PHASE.BUILD_SCHEDULED)) + await status_handler.set_phase(database.BUILD_PHASE.BUILD_SCHEDULED) self._job_count = self._job_count + 1 logger.debug( @@ -273,18 +268,16 @@ def _work_checker(self): ) self._queue.incomplete(job_item, restore_retry=True, retry_after=retry_timeout) - @trollius.coroutine - def _queue_metrics_updater(self): + async def _queue_metrics_updater(self): logger.debug("Initializing queue metrics updater") while self._current_status == BuildServerStatus.RUNNING: logger.debug("Writing metrics") self._queue.update_metrics() logger.debug("Metrics going to sleep for 30 seconds") - yield From(trollius.sleep(30)) + await asyncio.sleep(30) - @trollius.coroutine - def _initialize(self, loop, host, websocket_port, controller_port, ssl=None): + async def _initialize(self, loop, host, websocket_port, controller_port, ssl=None): self._loop = loop # Create the WAMP server. @@ -295,10 +288,10 @@ def _initialize(self, loop, host, websocket_port, controller_port, ssl=None): create_wsgi_server( self._controller_app, loop=loop, host=host, port=controller_port, ssl=ssl ) - yield From(loop.create_server(transport_factory, host, websocket_port, ssl=ssl)) + await loop.create_server(transport_factory, host, websocket_port, ssl=ssl) # Initialize the metrics updater - trollius.async(self._queue_metrics_updater()) + asyncio.create_task(self._queue_metrics_updater()) # Initialize the work queue checker. - yield From(self._work_checker()) + await self._work_checker() diff --git a/buildman/test/test_buildman.py b/buildman/test/test_buildman.py index c5a9976f3c..dfaba1b549 100644 --- a/buildman/test/test_buildman.py +++ b/buildman/test/test_buildman.py @@ -1,10 +1,10 @@ +import asyncio import unittest import json import uuid from mock import Mock, ANY from six import iteritems -from trollius import coroutine, get_event_loop, From, Future, Return from buildman.asyncutil import AsyncWrapper from buildman.component.buildcomponent import BuildComponent @@ -21,9 +21,9 @@ def async_test(f): def wrapper(*args, **kwargs): - coro = coroutine(f) + coro = asyncio.coroutine(f) future = coro(*args, **kwargs) - loop = get_event_loop() + loop = asyncio.get_event_loop() loop.run_until_complete(future) return wrapper @@ -33,19 +33,16 @@ class TestExecutor(BuilderExecutor): job_started = None job_stopped = None - @coroutine - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): self.job_started = str(uuid.uuid4()) - raise Return(self.job_started) + return self.job_started - @coroutine - def stop_builder(self, execution_id): + async def stop_builder(self, execution_id): self.job_stopped = execution_id class BadExecutor(BuilderExecutor): - @coroutine - def start_builder(self, realm, token, build_uuid): + async def start_builder(self, realm, token, build_uuid): raise ExecutorException("raised on purpose!") @@ -57,7 +54,7 @@ def __init__(self, *args, **kwargs): @staticmethod def _create_completed_future(result=None): def inner(*args, **kwargs): - new_future = Future() + new_future = asyncio.Future() new_future.set_result(result) return new_future @@ -69,9 +66,8 @@ def setUp(self): def tearDown(self): EphemeralBuilderManager.EXECUTORS = self._existing_executors - @coroutine - def _register_component(self, realm_spec, build_component, token): - raise Return("hello") + async def _register_component(self, realm_spec, build_component, token): + return "hello" def _create_build_job(self, namespace="namespace", retries=3): mock_job = Mock() @@ -99,7 +95,7 @@ def __init__(self, *args, **kwargs): def _create_completed_future(self, result=None): def inner(*args, **kwargs): - new_future = Future() + new_future = asyncio.Future() new_future.set_result(result) return new_future @@ -149,14 +145,13 @@ def tearDown(self): super(TestEphemeralLifecycle, self).tearDown() self.manager.shutdown() - @coroutine - def _setup_job_for_managers(self): + async def _setup_job_for_managers(self): test_component = Mock(spec=BuildComponent) test_component.builder_realm = REALM_ID test_component.start_build = Mock(side_effect=self._create_completed_future()) self.register_component_callback.return_value = test_component - is_scheduled = yield From(self.manager.schedule(self.mock_job)) + is_scheduled = await self.manager.schedule(self.mock_job) self.assertTrue(is_scheduled) self.assertEqual(self.test_executor.start_builder.call_count, 1) @@ -168,7 +163,7 @@ def _setup_job_for_managers(self): realm_for_build = self._find_realm_key(self.manager._orchestrator, BUILD_UUID) - raw_realm_data = yield From( + raw_realm_data = await ( self.manager._orchestrator.get_key(slash_join("realm", realm_for_build)) ) realm_data = json.loads(raw_realm_data) @@ -178,7 +173,7 @@ def _setup_job_for_managers(self): self.assertEqual(self.register_component_callback.call_count, 0) # Fire off a realm changed with the same data. - yield From( + await ( self.manager._realm_callback( KeyChange( KeyEvent.CREATE, slash_join(REALM_PREFIX, REALM_ID), json.dumps(realm_data) @@ -193,7 +188,7 @@ def _setup_job_for_managers(self): # Ensure that the build info exists. self.assertIsNotNone(self.manager._build_uuid_to_info.get(BUILD_UUID)) - raise Return(test_component) + return test_component @staticmethod def _find_realm_key(orchestrator, build_uuid): @@ -209,15 +204,15 @@ def _find_realm_key(orchestrator, build_uuid): @async_test def test_schedule_and_complete(self): # Test that a job is properly registered with all of the managers - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() # Take the job ourselves - yield From(self.manager.build_component_ready(test_component)) + await self.manager.build_component_ready(test_component) self.assertIsNotNone(self.manager._build_uuid_to_info.get(BUILD_UUID)) # Finish the job - yield From( + await ( self.manager.job_completed(self.mock_job, BuildJobResult.COMPLETE, test_component) ) @@ -231,9 +226,9 @@ def test_schedule_and_complete(self): @async_test def test_another_manager_takes_job(self): # Prepare a job to be taken by another manager - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() - yield From( + await ( self.manager._realm_callback( KeyChange( KeyEvent.DELETE, @@ -260,7 +255,7 @@ def test_another_manager_takes_job(self): self.assertIsNotNone(self.manager._build_uuid_to_info.get(BUILD_UUID)) # Delete the job once it has "completed". - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.DELETE, @@ -281,7 +276,7 @@ def test_job_started_by_other_manager(self): self.assertIn(JOB_PREFIX, callback_keys) # Send a signal to the callback that the job has been created. - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.CREATE, @@ -301,7 +296,7 @@ def test_expiring_worker_not_started(self): self.assertIn(JOB_PREFIX, callback_keys) # Send a signal to the callback that a worker has expired - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.EXPIRE, @@ -316,13 +311,13 @@ def test_expiring_worker_not_started(self): @async_test def test_expiring_worker_started(self): - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() # Ensure that that the building callbacks have been registered callback_keys = [key for key in self.manager._orchestrator.callbacks] self.assertIn(JOB_PREFIX, callback_keys) - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.EXPIRE, @@ -337,14 +332,14 @@ def test_expiring_worker_started(self): @async_test def test_buildjob_deleted(self): - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() # Ensure that that the building callbacks have been registered callback_keys = [key for key in self.manager._orchestrator.callbacks] self.assertIn(JOB_PREFIX, callback_keys) # Send a signal to the callback that a worker has expired - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.DELETE, @@ -360,14 +355,14 @@ def test_buildjob_deleted(self): @async_test def test_builder_never_starts(self): - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() # Ensure that that the building callbacks have been registered callback_keys = [key for key in self.manager._orchestrator.callbacks] self.assertIn(JOB_PREFIX, callback_keys) # Send a signal to the callback that a worker has expired - yield From( + await ( self.manager._job_callback( KeyChange( KeyEvent.EXPIRE, @@ -382,7 +377,7 @@ def test_builder_never_starts(self): # Ensure the job was marked as incomplete, with an update_phase to True (so the DB record and # logs are updated as well) - yield From( + await ( self.job_complete_callback.assert_called_once_with( ANY, BuildJobResult.INCOMPLETE, "MockExecutor", update_phase=True ) @@ -396,10 +391,10 @@ def test_change_worker(self): @async_test def test_realm_expired(self): - test_component = yield From(self._setup_job_for_managers()) + test_component = await self._setup_job_for_managers() # Send a signal to the callback that a realm has expired - yield From( + await ( self.manager._realm_callback( KeyChange( KeyEvent.EXPIRE, @@ -433,9 +428,8 @@ def setUp(self): unregister_component_callback = Mock() job_heartbeat_callback = Mock() - @coroutine - def job_complete_callback(*args, **kwargs): - raise Return() + async def job_complete_callback(*args, **kwargs): + return self.manager = EphemeralBuilderManager( self._register_component, @@ -542,12 +536,12 @@ def test_schedule_job_namespace_filter(self): # Try with a build job in an invalid namespace. build_job = self._create_build_job(namespace="somethingelse") - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertFalse(result[0]) # Try with a valid namespace. build_job = self._create_build_job(namespace="something") - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) @async_test @@ -562,12 +556,12 @@ def test_schedule_job_retries_filter(self): # Try with a build job that has too few retries. build_job = self._create_build_job(retries=1) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertFalse(result[0]) # Try with a valid job. build_job = self._create_build_job(retries=2) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) @async_test @@ -593,7 +587,7 @@ def test_schedule_job_executor_fallback(self): # Try a job not matching the primary's namespace filter. Should schedule on secondary. build_job = self._create_build_job(namespace="somethingelse") - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) self.assertIsNone(self.manager.registered_executors[0].job_started) @@ -604,7 +598,7 @@ def test_schedule_job_executor_fallback(self): # Try a job not matching the primary's retry minimum. Should schedule on secondary. build_job = self._create_build_job(namespace="something", retries=2) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) self.assertIsNone(self.manager.registered_executors[0].job_started) @@ -615,7 +609,7 @@ def test_schedule_job_executor_fallback(self): # Try a job matching the primary. Should schedule on the primary. build_job = self._create_build_job(namespace="something", retries=3) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) self.assertIsNotNone(self.manager.registered_executors[0].job_started) @@ -626,7 +620,7 @@ def test_schedule_job_executor_fallback(self): # Try a job not matching either's restrictions. build_job = self._create_build_job(namespace="somethingelse", retries=1) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertFalse(result[0]) self.assertIsNone(self.manager.registered_executors[0].job_started) @@ -649,14 +643,14 @@ def test_schedule_job_single_executor(self): ) build_job = self._create_build_job(namespace="something", retries=3) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) self.assertIsNotNone(self.manager.registered_executors[0].job_started) self.manager.registered_executors[0].job_started = None build_job = self._create_build_job(namespace="something", retries=0) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) self.assertIsNotNone(self.manager.registered_executors[0].job_started) @@ -671,7 +665,7 @@ def test_executor_exception(self): ) build_job = self._create_build_job(namespace="something", retries=3) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertFalse(result[0]) @async_test @@ -684,14 +678,14 @@ def test_schedule_and_stop(self): # Start the build job. build_job = self._create_build_job(namespace="something", retries=3) - result = yield From(self.manager.schedule(build_job)) + result = await self.manager.schedule(build_job) self.assertTrue(result[0]) executor = self.manager.registered_executors[0] self.assertIsNotNone(executor.job_started) # Register the realm so the build information is added. - yield From( + await ( self.manager._register_realm( { "realm": str(uuid.uuid4()), @@ -705,7 +699,7 @@ def test_schedule_and_stop(self): ) # Stop the build job. - yield From(self.manager.kill_builder_executor(build_job.build_uuid)) + await self.manager.kill_builder_executor(build_job.build_uuid) self.assertEqual(executor.job_stopped, executor.job_started) diff --git a/buildtrigger/basehandler.py b/buildtrigger/basehandler.py index 5263cbd523..35878167a5 100644 --- a/buildtrigger/basehandler.py +++ b/buildtrigger/basehandler.py @@ -277,7 +277,7 @@ def filename_is_dockerfile(cls, file_name): """ Returns whether the file is named Dockerfile or follows the convention .Dockerfile. """ - return file_name.endswith(".Dockerfile") or u"Dockerfile" == file_name + return file_name.endswith(".Dockerfile") or "Dockerfile" == file_name @classmethod def service_name(cls): diff --git a/buildtrigger/customhandler.py b/buildtrigger/customhandler.py index 010d4107d8..bd4d0450a8 100644 --- a/buildtrigger/customhandler.py +++ b/buildtrigger/customhandler.py @@ -147,7 +147,7 @@ def service_name(cls): return "custom-git" def is_active(self): - return self.config.has_key("credentials") + return "credentials" in self.config def _metadata_from_payload(self, payload, git_url): # Parse the JSON payload. diff --git a/buildtrigger/githubhandler.py b/buildtrigger/githubhandler.py index c0b03e6b3c..48760fef13 100644 --- a/buildtrigger/githubhandler.py +++ b/buildtrigger/githubhandler.py @@ -352,8 +352,7 @@ def list_build_subdirs(self): elem.path for elem in commit_tree.tree if ( - elem.type == u"blob" - and self.filename_is_dockerfile(os.path.basename(elem.path)) + elem.type == "blob" and self.filename_is_dockerfile(os.path.basename(elem.path)) ) ] except GithubException as ghe: diff --git a/buildtrigger/gitlabhandler.py b/buildtrigger/gitlabhandler.py index a333dd20a7..d375040ee1 100644 --- a/buildtrigger/gitlabhandler.py +++ b/buildtrigger/gitlabhandler.py @@ -231,7 +231,10 @@ def activate(self, standard_webhook_url): ] key = gl_project.keys.create( - {"title": "%s Builder" % app.config["REGISTRY_TITLE"], "key": public_key,} + { + "title": "%s Builder" % app.config["REGISTRY_TITLE"], + "key": public_key.decode("ascii"), + } ) if not key: diff --git a/buildtrigger/test/gitlabmock.py b/buildtrigger/test/gitlabmock.py index 6156043f85..e4bc86e88e 100644 --- a/buildtrigger/test/gitlabmock.py +++ b/buildtrigger/test/gitlabmock.py @@ -342,7 +342,7 @@ def dockerfile_handler(_, request): "file_path": "Dockerfile", "size": 10, "encoding": "base64", - "content": base64.b64encode("hello world"), + "content": base64.b64encode(b"hello world").decode("ascii"), "ref": "master", "blob_id": "79f7bbd25901e8334750839545a9bd021f0e4c83", "commit_id": "d5a3ff139356ce33e37e73add446f16869741b50", @@ -368,7 +368,7 @@ def sub_dockerfile_handler(_, request): "file_path": "somesubdir/Dockerfile", "size": 10, "encoding": "base64", - "content": base64.b64encode("hi universe"), + "content": base64.b64encode(b"hi universe").decode("ascii"), "ref": "master", "blob_id": "79f7bbd25901e8334750839545a9bd021f0e4c83", "commit_id": "d5a3ff139356ce33e37e73add446f16869741b50", diff --git a/buildtrigger/test/test_basehandler.py b/buildtrigger/test/test_basehandler.py index fca413831c..ee9c413c1c 100644 --- a/buildtrigger/test/test_basehandler.py +++ b/buildtrigger/test/test_basehandler.py @@ -8,10 +8,10 @@ [ ("Dockerfile", True), ("server.Dockerfile", True), - (u"Dockerfile", True), - (u"server.Dockerfile", True), + ("Dockerfile", True), + ("server.Dockerfile", True), + ("bad file name", False), ("bad file name", False), - (u"bad file name", False), ], ) def test_path_is_dockerfile(input, output): diff --git a/buildtrigger/test/test_customhandler.py b/buildtrigger/test/test_customhandler.py index 984eb27cea..50a8cd3bb4 100644 --- a/buildtrigger/test/test_customhandler.py +++ b/buildtrigger/test/test_customhandler.py @@ -18,7 +18,7 @@ ( '{"commit": "foo", "ref": "refs/heads/something", "default_branch": "baz"}', InvalidPayloadException, - "u'foo' does not match '^([A-Fa-f0-9]{7,})$'", + "'foo' does not match '^([A-Fa-f0-9]{7,})$'", ), ( '{"commit": "11d6fbc", "ref": "refs/heads/something", "default_branch": "baz"}', @@ -48,6 +48,7 @@ def test_handle_trigger_request(payload, expected_error, expected_message): if expected_error is not None: with pytest.raises(expected_error) as ipe: trigger.handle_trigger_request(request) + assert str(ipe.value) == expected_message else: assert isinstance(trigger.handle_trigger_request(request), PreparedBuild) diff --git a/buildtrigger/test/test_githosthandler.py b/buildtrigger/test/test_githosthandler.py index 33ddd755cf..6137c1ddef 100644 --- a/buildtrigger/test/test_githosthandler.py +++ b/buildtrigger/test/test_githosthandler.py @@ -93,9 +93,9 @@ def test_list_build_source_namespaces(): ] found = get_bitbucket_trigger().list_build_source_namespaces() - found.sort() + found = sorted(found, key=lambda d: sorted(d.items())) - namespaces_expected.sort() + namespaces_expected = sorted(namespaces_expected, key=lambda d: sorted(d.items())) assert found == namespaces_expected diff --git a/buildtrigger/test/test_githubhandler.py b/buildtrigger/test/test_githubhandler.py index 399cd22f4e..04e37b4b17 100644 --- a/buildtrigger/test/test_githubhandler.py +++ b/buildtrigger/test/test_githubhandler.py @@ -129,7 +129,7 @@ def test_list_build_source_namespaces(github_trigger): ] found = github_trigger.list_build_source_namespaces() - found.sort() + sorted(found, key=lambda d: sorted(d.items())) - namespaces_expected.sort() + sorted(namespaces_expected, key=lambda d: sorted(d.items())) assert found == namespaces_expected diff --git a/buildtrigger/test/test_gitlabhandler.py b/buildtrigger/test/test_gitlabhandler.py index 5372443ca9..d6eaca1132 100644 --- a/buildtrigger/test/test_gitlabhandler.py +++ b/buildtrigger/test/test_gitlabhandler.py @@ -27,8 +27,8 @@ def test_list_build_subdirs(gitlab_trigger): @pytest.mark.parametrize( "dockerfile_path, contents", [ - ("/Dockerfile", "hello world"), - ("somesubdir/Dockerfile", "hi universe"), + ("/Dockerfile", b"hello world"), + ("somesubdir/Dockerfile", b"hi universe"), ("unknownpath", None), ], ) @@ -68,19 +68,19 @@ def test_list_build_sources(): assert sources == [ { "last_updated": 1380548762, - "name": u"someproject", - "url": u"http://example.com/someorg/someproject", + "name": "someproject", + "url": "http://example.com/someorg/someproject", "private": True, - "full_name": u"someorg/someproject", + "full_name": "someorg/someproject", "has_admin_permissions": False, "description": "", }, { "last_updated": 1380548762, - "name": u"anotherproject", - "url": u"http://example.com/someorg/anotherproject", + "name": "anotherproject", + "url": "http://example.com/someorg/anotherproject", "private": False, - "full_name": u"someorg/anotherproject", + "full_name": "someorg/anotherproject", "has_admin_permissions": True, "description": "", }, @@ -93,8 +93,8 @@ def test_null_avatar(): expected = { "avatar_url": None, "personal": False, - "title": u"someorg", - "url": u"http://gitlab.com/groups/someorg", + "title": "someorg", + "url": "http://gitlab.com/groups/someorg", "score": 1, "id": "2", } @@ -239,10 +239,10 @@ def test_list_field_values(name, expected, gitlab_trigger): [ { "last_updated": 1380548762, - "name": u"anotherproject", - "url": u"http://example.com/knownuser/anotherproject", + "name": "anotherproject", + "url": "http://example.com/knownuser/anotherproject", "private": False, - "full_name": u"knownuser/anotherproject", + "full_name": "knownuser/anotherproject", "has_admin_permissions": True, "description": "", }, @@ -253,19 +253,19 @@ def test_list_field_values(name, expected, gitlab_trigger): [ { "last_updated": 1380548762, - "name": u"someproject", - "url": u"http://example.com/someorg/someproject", + "name": "someproject", + "url": "http://example.com/someorg/someproject", "private": True, - "full_name": u"someorg/someproject", + "full_name": "someorg/someproject", "has_admin_permissions": False, "description": "", }, { "last_updated": 1380548762, - "name": u"anotherproject", - "url": u"http://example.com/someorg/anotherproject", + "name": "anotherproject", + "url": "http://example.com/someorg/anotherproject", "private": False, - "full_name": u"someorg/anotherproject", + "full_name": "someorg/anotherproject", "has_admin_permissions": True, "description": "", }, diff --git a/buildtrigger/test/test_prepare_trigger.py b/buildtrigger/test/test_prepare_trigger.py index 14c72775fd..63d6aa31c8 100644 --- a/buildtrigger/test/test_prepare_trigger.py +++ b/buildtrigger/test/test_prepare_trigger.py @@ -38,25 +38,25 @@ def assertSchema(filename, expected, processor, *args, **kwargs): def test_custom_custom(): expected = { - u"commit": u"1c002dd", - u"commit_info": { - u"url": u"gitsoftware.com/repository/commits/1234567", - u"date": u"timestamp", - u"message": u"initial commit", - u"committer": { - u"username": u"user", - u"url": u"gitsoftware.com/users/user", - u"avatar_url": u"gravatar.com/user.png", + "commit": "1c002dd", + "commit_info": { + "url": "gitsoftware.com/repository/commits/1234567", + "date": "timestamp", + "message": "initial commit", + "committer": { + "username": "user", + "url": "gitsoftware.com/users/user", + "avatar_url": "gravatar.com/user.png", }, - u"author": { - u"username": u"user", - u"url": u"gitsoftware.com/users/user", - u"avatar_url": u"gravatar.com/user.png", + "author": { + "username": "user", + "url": "gitsoftware.com/users/user", + "avatar_url": "gravatar.com/user.png", }, }, - u"ref": u"refs/heads/master", - u"default_branch": u"master", - u"git_url": u"foobar", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "foobar", } assertSchema("custom_webhook", expected, custom_trigger_payload, git_url="foobar") @@ -64,13 +64,13 @@ def test_custom_custom(): def test_custom_gitlab(): expected = { - "commit": u"fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "ref": u"refs/heads/master", - "git_url": u"git@gitlab.com:jsmith/somerepo.git", + "commit": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "ref": "refs/heads/master", + "git_url": "git@gitlab.com:jsmith/somerepo.git", "commit_info": { - "url": u"https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "date": u"2015-08-13T19:33:18+00:00", - "message": u"Fix link\n", + "url": "https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "date": "2015-08-13T19:33:18+00:00", + "message": "Fix link\n", }, } @@ -84,16 +84,16 @@ def test_custom_gitlab(): def test_custom_github(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/master", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", - "committer": {"username": u"jsmith",}, - "author": {"username": u"jsmith",}, + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", + "committer": {"username": "jsmith",}, + "author": {"username": "jsmith",}, }, } @@ -107,20 +107,20 @@ def test_custom_github(): def test_custom_bitbucket(): expected = { - "commit": u"af64ae7188685f8424040b4735ad12941b980d75", - "ref": u"refs/heads/master", - "git_url": u"git@bitbucket.org:jsmith/another-repo.git", + "commit": "af64ae7188685f8424040b4735ad12941b980d75", + "ref": "refs/heads/master", + "git_url": "git@bitbucket.org:jsmith/another-repo.git", "commit_info": { - "url": u"https://bitbucket.org/jsmith/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75", - "date": u"2015-09-10T20:40:54+00:00", - "message": u"Dockerfile edited online with Bitbucket", + "url": "https://bitbucket.org/jsmith/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75", + "date": "2015-09-10T20:40:54+00:00", + "message": "Dockerfile edited online with Bitbucket", "author": { - "username": u"John Smith", - "avatar_url": u"https://bitbucket.org/account/jsmith/avatar/32/", + "username": "John Smith", + "avatar_url": "https://bitbucket.org/account/jsmith/avatar/32/", }, "committer": { - "username": u"John Smith", - "avatar_url": u"https://bitbucket.org/account/jsmith/avatar/32/", + "username": "John Smith", + "avatar_url": "https://bitbucket.org/account/jsmith/avatar/32/", }, }, } @@ -180,15 +180,15 @@ def lookup_author(_): return {"user": {"display_name": "cooluser", "avatar": "http://some/avatar/url"}} expected = { - "commit": u"abdeaf1b2b4a6b9ddf742c1e1754236380435a62", - "ref": u"refs/heads/somebranch", - "git_url": u"git@bitbucket.org:foo/bar.git", - "default_branch": u"somebranch", + "commit": "abdeaf1b2b4a6b9ddf742c1e1754236380435a62", + "ref": "refs/heads/somebranch", + "git_url": "git@bitbucket.org:foo/bar.git", + "default_branch": "somebranch", "commit_info": { - "url": u"https://bitbucket.org/foo/bar/commits/abdeaf1b2b4a6b9ddf742c1e1754236380435a62", - "date": u"2012-07-24 00:26:36", - "message": u"making some changes\n", - "author": {"avatar_url": u"http://some/avatar/url", "username": u"cooluser",}, + "url": "https://bitbucket.org/foo/bar/commits/abdeaf1b2b4a6b9ddf742c1e1754236380435a62", + "date": "2012-07-24 00:26:36", + "message": "making some changes\n", + "author": {"avatar_url": "http://some/avatar/url", "username": "cooluser",}, }, } @@ -199,20 +199,20 @@ def lookup_author(_): def test_bitbucket_webhook_payload(): expected = { - "commit": u"af64ae7188685f8424040b4735ad12941b980d75", - "ref": u"refs/heads/master", - "git_url": u"git@bitbucket.org:jsmith/another-repo.git", + "commit": "af64ae7188685f8424040b4735ad12941b980d75", + "ref": "refs/heads/master", + "git_url": "git@bitbucket.org:jsmith/another-repo.git", "commit_info": { - "url": u"https://bitbucket.org/jsmith/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75", - "date": u"2015-09-10T20:40:54+00:00", - "message": u"Dockerfile edited online with Bitbucket", + "url": "https://bitbucket.org/jsmith/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75", + "date": "2015-09-10T20:40:54+00:00", + "message": "Dockerfile edited online with Bitbucket", "author": { - "username": u"John Smith", - "avatar_url": u"https://bitbucket.org/account/jsmith/avatar/32/", + "username": "John Smith", + "avatar_url": "https://bitbucket.org/account/jsmith/avatar/32/", }, "committer": { - "username": u"John Smith", - "avatar_url": u"https://bitbucket.org/account/jsmith/avatar/32/", + "username": "John Smith", + "avatar_url": "https://bitbucket.org/account/jsmith/avatar/32/", }, }, } @@ -222,16 +222,16 @@ def test_bitbucket_webhook_payload(): def test_github_webhook_payload_slash_branch(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/slash/branch", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/slash/branch", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", - "committer": {"username": u"jsmith",}, - "author": {"username": u"jsmith",}, + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", + "committer": {"username": "jsmith",}, + "author": {"username": "jsmith",}, }, } @@ -240,16 +240,16 @@ def test_github_webhook_payload_slash_branch(): def test_github_webhook_payload(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/master", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", - "committer": {"username": u"jsmith",}, - "author": {"username": u"jsmith",}, + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", + "committer": {"username": "jsmith",}, + "author": {"username": "jsmith",}, }, } @@ -258,23 +258,23 @@ def test_github_webhook_payload(): def test_github_webhook_payload_with_lookup(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/master", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", "committer": { - "username": u"jsmith", - "url": u"http://github.com/jsmith", - "avatar_url": u"http://some/avatar/url", + "username": "jsmith", + "url": "http://github.com/jsmith", + "avatar_url": "http://some/avatar/url", }, "author": { - "username": u"jsmith", - "url": u"http://github.com/jsmith", - "avatar_url": u"http://some/avatar/url", + "username": "jsmith", + "url": "http://github.com/jsmith", + "avatar_url": "http://some/avatar/url", }, }, } @@ -287,14 +287,14 @@ def lookup_user(_): def test_github_webhook_payload_missing_fields_with_lookup(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/master", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", }, } @@ -309,13 +309,13 @@ def lookup_user(username): def test_gitlab_webhook_payload(): expected = { - "commit": u"fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "ref": u"refs/heads/master", - "git_url": u"git@gitlab.com:jsmith/somerepo.git", + "commit": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "ref": "refs/heads/master", + "git_url": "git@gitlab.com:jsmith/somerepo.git", "commit_info": { - "url": u"https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "date": u"2015-08-13T19:33:18+00:00", - "message": u"Fix link\n", + "url": "https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "date": "2015-08-13T19:33:18+00:00", + "message": "Fix link\n", }, } @@ -340,14 +340,14 @@ def test_github_webhook_payload_known_issue(): def test_github_webhook_payload_missing_fields(): expected = { - "commit": u"410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "ref": u"refs/heads/master", - "default_branch": u"master", - "git_url": u"git@github.com:jsmith/anothertest.git", + "commit": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "ref": "refs/heads/master", + "default_branch": "master", + "git_url": "git@github.com:jsmith/anothertest.git", "commit_info": { - "url": u"https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", - "date": u"2015-09-11T14:26:16-04:00", - "message": u"Update Dockerfile", + "url": "https://github.com/jsmith/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "date": "2015-09-11T14:26:16-04:00", + "message": "Update Dockerfile", }, } @@ -360,13 +360,13 @@ def test_gitlab_webhook_nocommit_payload(): def test_gitlab_webhook_multiple_commits(): expected = { - "commit": u"9a052a0b2fbe01d4a1a88638dd9fe31c1c56ef53", - "ref": u"refs/heads/master", - "git_url": u"git@gitlab.com:jsmith/some-test-project.git", + "commit": "9a052a0b2fbe01d4a1a88638dd9fe31c1c56ef53", + "ref": "refs/heads/master", + "git_url": "git@gitlab.com:jsmith/some-test-project.git", "commit_info": { - "url": u"https://gitlab.com/jsmith/some-test-project/commit/9a052a0b2fbe01d4a1a88638dd9fe31c1c56ef53", - "date": u"2016-09-29T15:02:41+00:00", - "message": u"Merge branch 'foobar' into 'master'\r\n\r\nAdd changelog\r\n\r\nSome merge thing\r\n\r\nSee merge request !1", + "url": "https://gitlab.com/jsmith/some-test-project/commit/9a052a0b2fbe01d4a1a88638dd9fe31c1c56ef53", + "date": "2016-09-29T15:02:41+00:00", + "message": "Merge branch 'foobar' into 'master'\r\n\r\nAdd changelog\r\n\r\nSome merge thing\r\n\r\nSee merge request !1", "author": { "username": "jsmith", "url": "http://gitlab.com/jsmith", @@ -387,7 +387,7 @@ def lookup_user(_): def test_gitlab_webhook_for_tag(): expected = { - "commit": u"82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", + "commit": "82b3d5ae55f7080f1e6022629cdb57bfae7cccc7", "commit_info": { "author": { "avatar_url": "http://some/avatar/url", @@ -398,8 +398,8 @@ def test_gitlab_webhook_for_tag(): "message": "Fix link\n", "url": "https://some/url", }, - "git_url": u"git@example.com:jsmith/example.git", - "ref": u"refs/tags/v1.0.0", + "git_url": "git@example.com:jsmith/example.git", + "ref": "refs/tags/v1.0.0", } def lookup_user(_): @@ -441,13 +441,13 @@ def test_gitlab_webhook_for_tag_commit_sha_null(): def test_gitlab_webhook_for_tag_known_issue(): expected = { - "commit": u"770830e7ca132856991e6db4f7fc0f4dbe20bd5f", - "ref": u"refs/tags/thirdtag", - "git_url": u"git@gitlab.com:someuser/some-test-project.git", + "commit": "770830e7ca132856991e6db4f7fc0f4dbe20bd5f", + "ref": "refs/tags/thirdtag", + "git_url": "git@gitlab.com:someuser/some-test-project.git", "commit_info": { - "url": u"https://gitlab.com/someuser/some-test-project/commit/770830e7ca132856991e6db4f7fc0f4dbe20bd5f", - "date": u"2019-10-17T18:07:48Z", - "message": u"Update Dockerfile", + "url": "https://gitlab.com/someuser/some-test-project/commit/770830e7ca132856991e6db4f7fc0f4dbe20bd5f", + "date": "2019-10-17T18:07:48Z", + "message": "Update Dockerfile", "author": { "username": "someuser", "url": "http://gitlab.com/someuser", @@ -468,13 +468,13 @@ def lookup_user(_): def test_gitlab_webhook_payload_known_issue(): expected = { - "commit": u"770830e7ca132856991e6db4f7fc0f4dbe20bd5f", - "ref": u"refs/tags/fourthtag", - "git_url": u"git@gitlab.com:someuser/some-test-project.git", + "commit": "770830e7ca132856991e6db4f7fc0f4dbe20bd5f", + "ref": "refs/tags/fourthtag", + "git_url": "git@gitlab.com:someuser/some-test-project.git", "commit_info": { - "url": u"https://gitlab.com/someuser/some-test-project/commit/770830e7ca132856991e6db4f7fc0f4dbe20bd5f", - "date": u"2019-10-17T18:07:48Z", - "message": u"Update Dockerfile", + "url": "https://gitlab.com/someuser/some-test-project/commit/770830e7ca132856991e6db4f7fc0f4dbe20bd5f", + "date": "2019-10-17T18:07:48Z", + "message": "Update Dockerfile", }, } @@ -501,13 +501,13 @@ def test_gitlab_webhook_for_other(): def test_gitlab_webhook_payload_with_lookup(): expected = { - "commit": u"fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "ref": u"refs/heads/master", - "git_url": u"git@gitlab.com:jsmith/somerepo.git", + "commit": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "ref": "refs/heads/master", + "git_url": "git@gitlab.com:jsmith/somerepo.git", "commit_info": { - "url": u"https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", - "date": u"2015-08-13T19:33:18+00:00", - "message": u"Fix link\n", + "url": "https://gitlab.com/jsmith/somerepo/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "date": "2015-08-13T19:33:18+00:00", + "message": "Fix link\n", "author": { "username": "jsmith", "url": "http://gitlab.com/jsmith", @@ -528,20 +528,20 @@ def lookup_user(_): def test_github_webhook_payload_deleted_commit(): expected = { - "commit": u"456806b662cb903a0febbaed8344f3ed42f27bab", + "commit": "456806b662cb903a0febbaed8344f3ed42f27bab", "commit_info": { - "author": {"username": u"jsmith"}, - "committer": {"username": u"jsmith"}, - "date": u"2015-12-08T18:07:03-05:00", + "author": {"username": "jsmith"}, + "committer": {"username": "jsmith"}, + "date": "2015-12-08T18:07:03-05:00", "message": ( - u"Merge pull request #1044 from jsmith/errerror\n\n" + "Merge pull request #1044 from jsmith/errerror\n\n" + "Assign the exception to a variable to log it" ), - "url": u"https://github.com/jsmith/somerepo/commit/456806b662cb903a0febbaed8344f3ed42f27bab", + "url": "https://github.com/jsmith/somerepo/commit/456806b662cb903a0febbaed8344f3ed42f27bab", }, - "git_url": u"git@github.com:jsmith/somerepo.git", - "ref": u"refs/heads/master", - "default_branch": u"master", + "git_url": "git@github.com:jsmith/somerepo.git", + "ref": "refs/heads/master", + "default_branch": "master", } def lookup_user(_): diff --git a/conf/gunicorn_registry.py b/conf/gunicorn_registry.py index 2202fdc33f..72313e0eae 100644 --- a/conf/gunicorn_registry.py +++ b/conf/gunicorn_registry.py @@ -1,3 +1,8 @@ +# NOTE: Must be before we import or call anything that may be synchronous. +from gevent import monkey + +monkey.patch_all() + import sys import os diff --git a/conf/gunicorn_secscan.py b/conf/gunicorn_secscan.py index 8070834837..f7e84fcb40 100644 --- a/conf/gunicorn_secscan.py +++ b/conf/gunicorn_secscan.py @@ -1,3 +1,8 @@ +# NOTE: Must be before we import or call anything that may be synchronous. +from gevent import monkey + +monkey.patch_all() + import sys import os diff --git a/conf/gunicorn_web.py b/conf/gunicorn_web.py index a6d0a55c18..a4db6836ab 100644 --- a/conf/gunicorn_web.py +++ b/conf/gunicorn_web.py @@ -1,3 +1,8 @@ +# NOTE: Must be before we import or call anything that may be synchronous. +from gevent import monkey + +monkey.patch_all() + import sys import os diff --git a/conf/init/02_get_kube_certs.py b/conf/init/02_get_kube_certs.py index 4f78437639..e57a8249ab 100644 --- a/conf/init/02_get_kube_certs.py +++ b/conf/init/02_get_kube_certs.py @@ -63,7 +63,7 @@ def main(): service_token = f.read() secret_data = _lookup_secret(service_token).get("data", {}) - cert_keys = filter(is_extra_cert, secret_data.keys()) + cert_keys = list(filter(is_extra_cert, list(secret_data.keys()))) for cert_key in cert_keys: if not os.path.exists(KUBE_EXTRA_CA_CERTDIR): @@ -71,7 +71,7 @@ def main(): cert_value = base64.b64decode(secret_data[cert_key]) cert_filename = cert_key.replace(EXTRA_CA_DIRECTORY_PREFIX, "") - print "Found an extra cert %s in config-secret, copying to kube ca dir" + print("Found an extra cert %s in config-secret, copying to kube ca dir") with open(os.path.join(KUBE_EXTRA_CA_CERTDIR, cert_filename), "w") as f: f.write(cert_value) diff --git a/conf/init/certs_install.sh b/conf/init/certs_install.sh index 12a97c26f9..bec50b2f9d 100755 --- a/conf/init/certs_install.sh +++ b/conf/init/certs_install.sh @@ -6,7 +6,7 @@ QUAYCONFIG=${QUAYCONFIG:-"$QUAYCONF/stack"} CERTDIR=${CERTDIR:-"$QUAYCONFIG/extra_ca_certs"} SYSTEM_CERTDIR=${SYSTEM_CERTDIR:-"/etc/pki/ca-trust/source/anchors"} -PYTHON_ROOT=${PYTHON_ROOT:-"/opt/rh/python27/root/usr/lib/python2.7"} +PYTHON_ROOT=${PYTHON_ROOT:-"/usr/local/lib/python3.6"} # If we're running under kube, the previous script (02_get_kube_certs.sh) will put the certs in a different location if [[ "$KUBERNETES_SERVICE_HOST" != "" ]];then diff --git a/conf/init/supervisord_conf_create.py b/conf/init/supervisord_conf_create.py index 79eda8a802..8b1ab1ac9c 100644 --- a/conf/init/supervisord_conf_create.py +++ b/conf/init/supervisord_conf_create.py @@ -61,7 +61,7 @@ def limit_services(config, enabled_services): if enabled_services == []: return - for service in config.keys(): + for service in list(config.keys()): if service in enabled_services: config[service]["autostart"] = "true" else: @@ -72,7 +72,7 @@ def override_services(config, override_services): if override_services == []: return - for service in config.keys(): + for service in list(config.keys()): if service + "=true" in override_services: config[service]["autostart"] = "true" elif service + "=false" in override_services: diff --git a/conf/init/test/test_supervisord_conf_create.py b/conf/init/test/test_supervisord_conf_create.py index b6709fed1c..33064044e2 100644 --- a/conf/init/test/test_supervisord_conf_create.py +++ b/conf/init/test/test_supervisord_conf_create.py @@ -50,7 +50,7 @@ def test_supervisord_conf_create_defaults(): ): opts = ServerOptions() - with tempfile.NamedTemporaryFile() as f: + with tempfile.NamedTemporaryFile(mode="w") as f: f.write(rendered_config_file) f.flush() diff --git a/conf/nginx/http-base.conf b/conf/nginx/http-base.conf index 672118bef5..7b4f0ef281 100644 --- a/conf/nginx/http-base.conf +++ b/conf/nginx/http-base.conf @@ -9,7 +9,7 @@ log_format lb_logs '$remote_addr ($proxy_protocol_addr) ' '($request_time $request_length $upstream_response_time)'; types_hash_max_size 2048; -include /etc/opt/rh/rh-nginx112/nginx/mime.types; +include /etc/nginx/mime.types; default_type application/octet-stream; diff --git a/config_app/conf/http-base.conf b/config_app/conf/http-base.conf index ad7409008d..94e3f59f31 100644 --- a/config_app/conf/http-base.conf +++ b/config_app/conf/http-base.conf @@ -9,7 +9,7 @@ log_format lb_logs '$remote_addr ($proxy_protocol_addr) ' '($request_time $request_length $upstream_response_time)'; types_hash_max_size 2048; -include /etc/opt/rh/rh-nginx112/nginx/mime.types; +include /etc/nginx/mime.types; default_type application/octet-stream; diff --git a/config_app/config_application.py b/config_app/config_application.py index 04cedcff6d..7756562c70 100644 --- a/config_app/config_application.py +++ b/config_app/config_application.py @@ -1,10 +1,9 @@ import logging + +from config_app import config_web from config_app.c_app import app as application from util.log import logfile_path -# Bind all of the blueprints -import config_web - if __name__ == "__main__": logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) diff --git a/config_app/config_endpoints/api/__init__.py b/config_app/config_endpoints/api/__init__.py index 722e8066b0..ab09661bcd 100644 --- a/config_app/config_endpoints/api/__init__.py +++ b/config_app/config_endpoints/api/__init__.py @@ -117,7 +117,7 @@ def wrapped(self, *args, **kwargs): try: validate(resp, schema) except ValidationError as ex: - raise InvalidResponse(ex.message) + raise InvalidResponse(str(ex)) return resp @@ -141,7 +141,7 @@ def wrapped(self, *args, **kwargs): validate(json_data, schema) return func(self, *args, **kwargs) except ValidationError as ex: - raise InvalidRequest(ex.message) + raise InvalidRequest(str(ex)) return wrapped diff --git a/config_app/config_endpoints/api/discovery.py b/config_app/config_endpoints/api/discovery.py index 749fb46ef1..9d01608165 100644 --- a/config_app/config_endpoints/api/discovery.py +++ b/config_app/config_endpoints/api/discovery.py @@ -196,7 +196,7 @@ def swagger_parameter( "404": {"description": "Not found",}, } - for _, body in responses.items(): + for _, body in list(responses.items()): body["schema"] = {"$ref": "#/definitions/ApiError"} if method_name == "DELETE": @@ -229,7 +229,7 @@ def swagger_parameter( path_swagger[method_name.lower()] = operation_swagger tags.sort(key=lambda t: t["name"]) - paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]["x-tag"])) + paths = OrderedDict(sorted(list(paths.items()), key=lambda p: p[1]["x-tag"])) if compact: return {"paths": paths} diff --git a/config_app/config_endpoints/api/kube_endpoints.py b/config_app/config_endpoints/api/kube_endpoints.py index e621af5218..52d0fa56d9 100644 --- a/config_app/config_endpoints/api/kube_endpoints.py +++ b/config_app/config_endpoints/api/kube_endpoints.py @@ -108,7 +108,7 @@ def post(self): kube_accessor.rollback_deployment(name) except K8sApiException as e: logger.exception("Failed to rollback deployment.") - return make_response(e.message, 503) + return make_response(str(e), 503) return make_response("Ok", 204) @@ -127,7 +127,7 @@ def post(self): KubernetesAccessorSingleton.get_instance().replace_qe_secret(new_secret) except K8sApiException as e: logger.exception("Failed to deploy qe config secret to kubernetes.") - return make_response(e.message, 503) + return make_response(str(e), 503) return make_response("Ok", 201) diff --git a/config_app/config_endpoints/api/superuser.py b/config_app/config_endpoints/api/superuser.py index a0beef8fcd..0d5ab3e65b 100644 --- a/config_app/config_endpoints/api/superuser.py +++ b/config_app/config_endpoints/api/superuser.py @@ -138,11 +138,11 @@ def get(self): ) except CertInvalidException as cie: cert_views.append( - {"path": extra_cert_path, "error": cie.message,} + {"path": extra_cert_path, "error": str(cie),} ) except IOError as ioe: cert_views.append( - {"path": extra_cert_path, "error": ioe.message,} + {"path": extra_cert_path, "error": str(ioe),} ) return { diff --git a/config_app/config_endpoints/common.py b/config_app/config_endpoints/common.py index fc29e9053e..1c515b24ed 100644 --- a/config_app/config_endpoints/common.py +++ b/config_app/config_endpoints/common.py @@ -23,7 +23,7 @@ def truthy_bool(param): TYPE_CONVERTER = { truthy_bool: "boolean", str: "string", - basestring: "string", + str: "string", reqparse.text_type: "string", int: "integer", } @@ -67,7 +67,7 @@ def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE external_scripts=external_scripts, config_set=frontend_visible_config(app.config), kubernetes_namespace=IS_KUBERNETES and get_k8s_namespace(), - **kwargs + **kwargs, ) resp = make_response(contents) diff --git a/config_app/config_test/__init__.py b/config_app/config_test/__init__.py index 7e10fda3f4..d58d431827 100644 --- a/config_app/config_test/__init__.py +++ b/config_app/config_test/__init__.py @@ -1,8 +1,8 @@ import json as py_json import unittest from contextlib import contextmanager -from urllib import urlencode -from urlparse import urlparse, parse_qs, urlunparse +from urllib.parse import urlencode +from urllib.parse import urlparse, parse_qs, urlunparse from config_app.c_app import app, config_provider from config_app.config_endpoints.api import api @@ -64,7 +64,7 @@ def toggleFeature(self, name, enabled): def getJsonResponse(self, resource_name, params={}, expected_code=200): rv = self.app.get(api.url_for(resource_name, **params)) - self.assertEquals(expected_code, rv.status_code) + self.assertEqual(expected_code, rv.status_code) data = rv.data parsed = py_json.loads(data) return parsed @@ -82,12 +82,12 @@ def postResponse( headers = None rv = self.app.post(self.url_for(resource_name, params), data=data, headers=headers) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def getResponse(self, resource_name, params={}, expected_code=200): rv = self.app.get(api.url_for(resource_name, **params)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def putResponse(self, resource_name, params={}, data={}, expected_code=200): @@ -96,22 +96,22 @@ def putResponse(self, resource_name, params={}, data={}, expected_code=200): data=py_json.dumps(data), headers={"Content-Type": "application/json"}, ) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteResponse(self, resource_name, params={}, expected_code=204): rv = self.app.delete(self.url_for(resource_name, params)) if rv.status_code != expected_code: - print "Mismatch data for resource DELETE %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource DELETE %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteEmptyResponse(self, resource_name, params={}, expected_code=204): rv = self.app.delete(self.url_for(resource_name, params)) - self.assertEquals(rv.status_code, expected_code) - self.assertEquals(rv.data, "") # ensure response body empty + self.assertEqual(rv.status_code, expected_code) + self.assertEqual(rv.data, "") # ensure response body empty return def postJsonResponse(self, resource_name, params={}, data={}, expected_code=200): @@ -122,9 +122,9 @@ def postJsonResponse(self, resource_name, params={}, data={}, expected_code=200) ) if rv.status_code != expected_code: - print "Mismatch data for resource POST %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource POST %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) data = rv.data parsed = py_json.loads(data) return parsed @@ -139,9 +139,9 @@ def putJsonResponse( ) if rv.status_code != expected_code: - print "Mismatch data for resource PUT %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource PUT %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) data = rv.data parsed = py_json.loads(data) return parsed diff --git a/config_app/config_test/test_api_usage.py b/config_app/config_test/test_api_usage.py index 05423c1aad..ec4ad01637 100644 --- a/config_app/config_test/test_api_usage.py +++ b/config_app/config_test/test_api_usage.py @@ -1,4 +1,4 @@ -from StringIO import StringIO +from io import StringIO from mockldap import MockLdap from data import database, model @@ -56,7 +56,7 @@ def test_create_superuser(self): # Ensure that the current user is a superuser in the config. json = self.getJsonResponse(SuperUserConfig) - self.assertEquals(["newsuper"], json["config"]["SUPER_USERS"]) + self.assertEqual(["newsuper"], json["config"]["SUPER_USERS"]) # Ensure that the current user is a superuser in memory by trying to call an API # that will fail otherwise. @@ -67,7 +67,7 @@ class TestSuperUserConfig(ApiTestCase): def test_get_status_update_config(self): # With no config the status should be 'config-db'. json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("config-db", json["status"]) + self.assertEqual("config-db", json["status"]) # Add some fake config. fake_config = { @@ -78,9 +78,9 @@ def test_get_status_update_config(self): json = self.putJsonResponse( SuperUserConfig, data=dict(config=fake_config, hostname="fakehost") ) - self.assertEquals("fakekey", json["config"]["SECRET_KEY"]) - self.assertEquals("fakehost", json["config"]["SERVER_HOSTNAME"]) - self.assertEquals("Database", json["config"]["AUTHENTICATION_TYPE"]) + self.assertEqual("fakekey", json["config"]["SECRET_KEY"]) + self.assertEqual("fakehost", json["config"]["SERVER_HOSTNAME"]) + self.assertEqual("Database", json["config"]["AUTHENTICATION_TYPE"]) # With config the status should be 'setup-db'. # TODO: fix this test @@ -167,12 +167,12 @@ def test_custom_certificates(self): # Make sure it is present. json = self.getJsonResponse(SuperUserCustomCertificates) - self.assertEquals(1, len(json["certs"])) + self.assertEqual(1, len(json["certs"])) cert_info = json["certs"][0] - self.assertEquals("testcert.crt", cert_info["path"]) + self.assertEqual("testcert.crt", cert_info["path"]) - self.assertEquals(set(["somecoolhost", "bar", "baz"]), set(cert_info["names"])) + self.assertEqual(set(["somecoolhost", "bar", "baz"]), set(cert_info["names"])) self.assertFalse(cert_info["expired"]) # Remove the certificate. @@ -180,7 +180,7 @@ def test_custom_certificates(self): # Make sure it is gone. json = self.getJsonResponse(SuperUserCustomCertificates) - self.assertEquals(0, len(json["certs"])) + self.assertEqual(0, len(json["certs"])) def test_expired_custom_certificate(self): # Upload a certificate. @@ -194,12 +194,12 @@ def test_expired_custom_certificate(self): # Make sure it is present. json = self.getJsonResponse(SuperUserCustomCertificates) - self.assertEquals(1, len(json["certs"])) + self.assertEqual(1, len(json["certs"])) cert_info = json["certs"][0] - self.assertEquals("testcert.crt", cert_info["path"]) + self.assertEqual("testcert.crt", cert_info["path"]) - self.assertEquals(set(["somecoolhost"]), set(cert_info["names"])) + self.assertEqual(set(["somecoolhost"]), set(cert_info["names"])) self.assertTrue(cert_info["expired"]) def test_invalid_custom_certificate(self): @@ -213,11 +213,11 @@ def test_invalid_custom_certificate(self): # Make sure it is present but invalid. json = self.getJsonResponse(SuperUserCustomCertificates) - self.assertEquals(1, len(json["certs"])) + self.assertEqual(1, len(json["certs"])) cert_info = json["certs"][0] - self.assertEquals("testcert.crt", cert_info["path"]) - self.assertEquals("no start line", cert_info["error"]) + self.assertEqual("testcert.crt", cert_info["path"]) + self.assertEqual("no start line", cert_info["error"]) def test_path_sanitization(self): # Upload a certificate. @@ -231,7 +231,7 @@ def test_path_sanitization(self): # Make sure it is present. json = self.getJsonResponse(SuperUserCustomCertificates) - self.assertEquals(1, len(json["certs"])) + self.assertEqual(1, len(json["certs"])) cert_info = json["certs"][0] - self.assertEquals("foobar.crt", cert_info["path"]) + self.assertEqual("foobar.crt", cert_info["path"]) diff --git a/config_app/config_test/test_suconfig_api.py b/config_app/config_test/test_suconfig_api.py index 77ac915641..fd79ea4be2 100644 --- a/config_app/config_test/test_suconfig_api.py +++ b/config_app/config_test/test_suconfig_api.py @@ -38,7 +38,7 @@ class TestSuperUserRegistryStatus(ApiTestCase): def test_registry_status_no_config(self): with FreshConfigProvider(): json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("config-db", json["status"]) + self.assertEqual("config-db", json["status"]) @mock.patch( "config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=False) @@ -47,7 +47,7 @@ def test_registry_status_no_database(self): with FreshConfigProvider(): config_provider.save_config({"key": "value"}) json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("setup-db", json["status"]) + self.assertEqual("setup-db", json["status"]) @mock.patch( "config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True) @@ -56,7 +56,7 @@ def test_registry_status_db_has_superuser(self): with FreshConfigProvider(): config_provider.save_config({"key": "value"}) json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("config", json["status"]) + self.assertEqual("config", json["status"]) @mock.patch( "config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True) @@ -68,7 +68,7 @@ def test_registry_status_db_no_superuser(self): with FreshConfigProvider(): config_provider.save_config({"key": "value"}) json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("create-superuser", json["status"]) + self.assertEqual("create-superuser", json["status"]) @mock.patch( "config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True) @@ -80,7 +80,7 @@ def test_registry_status_setup_complete(self): with FreshConfigProvider(): config_provider.save_config({"key": "value", "SETUP_COMPLETE": True}) json = self.getJsonResponse(SuperUserRegistryStatus) - self.assertEquals("config", json["status"]) + self.assertEqual("config", json["status"]) class TestSuperUserConfigFile(ApiTestCase): @@ -151,7 +151,7 @@ def test_config_file_with_no_db_users(self): # Verify the superuser was placed into the config. result = self.getJsonResponse(SuperUserConfig) - self.assertEquals(["cooluser"], result["config"]["SUPER_USERS"]) + self.assertEqual(["cooluser"], result["config"]["SUPER_USERS"]) class TestSuperUserConfigValidate(ApiTestCase): diff --git a/config_app/config_util/config/__init__.py b/config_app/config_util/config/__init__.py index ddee86803d..16ccac1af3 100644 --- a/config_app/config_util/config/__init__.py +++ b/config_app/config_util/config/__init__.py @@ -28,13 +28,14 @@ def get_config_as_kube_secret(config_path): certs_dir = os.path.join(config_path, EXTRA_CA_DIRECTORY) if os.path.exists(certs_dir): for extra_cert in os.listdir(certs_dir): - with open(os.path.join(certs_dir, extra_cert)) as f: - data[EXTRA_CA_DIRECTORY_PREFIX + extra_cert] = base64.b64encode(f.read()) + file_path = os.path.join(certs_dir, extra_cert) + with open(file_path, "rb") as f: + data[EXTRA_CA_DIRECTORY_PREFIX + extra_cert] = base64.b64encode(f.read()).decode() for name in os.listdir(config_path): file_path = os.path.join(config_path, name) if not os.path.isdir(file_path): - with open(file_path) as f: - data[name] = base64.b64encode(f.read()) + with open(file_path, "rb") as f: + data[name] = base64.b64encode(f.read()).decode() return data diff --git a/config_app/config_util/config/baseprovider.py b/config_app/config_util/config/baseprovider.py index 5c85e8fb04..8a5a8f078d 100644 --- a/config_app/config_util/config/baseprovider.py +++ b/config_app/config_util/config/baseprovider.py @@ -37,7 +37,7 @@ def import_yaml(config_obj, config_file): if isinstance(c, str): raise Exception("Invalid YAML config file: " + str(c)) - for key in c.iterkeys(): + for key in c.keys(): if key.isupper(): config_obj[key] = c[key] @@ -54,7 +54,7 @@ def import_yaml(config_obj, config_file): def get_yaml(config_obj): - return yaml.safe_dump(config_obj, encoding="utf-8", allow_unicode=True) + return yaml.safe_dump(config_obj, allow_unicode=True) def export_yaml(config_obj, config_file): diff --git a/config_app/config_util/config/test/test_helpers.py b/config_app/config_util/config/test/test_helpers.py index c372fea9ff..8c2c29f4b7 100644 --- a/config_app/config_util/config/test/test_helpers.py +++ b/config_app/config_util/config/test/test_helpers.py @@ -11,7 +11,7 @@ def _create_temp_file_structure(file_structure): temp_dir = TemporaryDirectory() - for filename, data in file_structure.iteritems(): + for filename, data in file_structure.items(): if filename == EXTRA_CA_DIRECTORY: extra_ca_dir_path = os.path.join(temp_dir.name, EXTRA_CA_DIRECTORY) os.mkdir(extra_ca_dir_path) @@ -36,14 +36,17 @@ def _create_temp_file_structure(file_structure): ), pytest.param( {"config.yaml": "test:true", "otherfile.ext": "im a file"}, - {"config.yaml": "dGVzdDp0cnVl", "otherfile.ext": base64.b64encode("im a file")}, + { + "config.yaml": "dGVzdDp0cnVl", + "otherfile.ext": base64.b64encode(b"im a file").decode("ascii"), + }, id="config and another file", ), pytest.param( {"config.yaml": "test:true", "extra_ca_certs": [("cert.crt", "im a cert!"),]}, { "config.yaml": "dGVzdDp0cnVl", - "extra_ca_certs_cert.crt": base64.b64encode("im a cert!"), + "extra_ca_certs_cert.crt": base64.b64encode(b"im a cert!").decode("ascii"), }, id="config and an extra cert", ), @@ -58,12 +61,19 @@ def _create_temp_file_structure(file_structure): }, { "config.yaml": "dGVzdDp0cnVl", - "otherfile.ext": base64.b64encode("im a file"), - "extra_ca_certs_cert.crt": base64.b64encode("im a cert!"), - "extra_ca_certs_another.crt": base64.b64encode("im a different cert!"), + "otherfile.ext": base64.b64encode(b"im a file").decode("ascii"), + "extra_ca_certs_cert.crt": base64.b64encode(b"im a cert!").decode("ascii"), + "extra_ca_certs_another.crt": base64.b64encode(b"im a different cert!").decode( + "ascii" + ), }, id="config, files, and extra certs!", ), + pytest.param( + {"config.yaml": "First line\nSecond line"}, + {"config.yaml": "Rmlyc3QgbGluZQpTZWNvbmQgbGluZQ=="}, + id="certificate includes newline characters", + ), ], ) def test_get_config_as_kube_secret(file_structure, expected_secret): diff --git a/config_app/config_util/config/test/test_transient_dir_provider.py b/config_app/config_util/config/test/test_transient_dir_provider.py index 4dbef70c40..c2c0dbe9f7 100644 --- a/config_app/config_util/config/test/test_transient_dir_provider.py +++ b/config_app/config_util/config/test/test_transient_dir_provider.py @@ -36,7 +36,7 @@ def test_transient_dir_copy_config_dir(files_to_write, operations, expected_new_dir): config_provider = TransientDirectoryProvider("", "", "") - for name, data in files_to_write.iteritems(): + for name, data in files_to_write.items(): config_provider.write_volume_file(name, data) config_provider.create_copy_of_config_dir() @@ -53,7 +53,7 @@ def test_transient_dir_copy_config_dir(files_to_write, operations, expected_new_ config_provider.remove_volume_file(delete) # check that the new directory matches expected state - for filename, data in expected_new_dir.iteritems(): + for filename, data in expected_new_dir.items(): with open(os.path.join(config_provider.get_config_dir_path(), filename)) as f: new_data = f.read() assert new_data == data @@ -61,7 +61,7 @@ def test_transient_dir_copy_config_dir(files_to_write, operations, expected_new_ # Now check that the old dir matches the original state saved = config_provider.get_old_config_dir() - for filename, data in files_to_write.iteritems(): + for filename, data in files_to_write.items(): with open(os.path.join(saved, filename)) as f: new_data = f.read() assert new_data == data diff --git a/config_app/config_util/k8saccessor.py b/config_app/config_util/k8saccessor.py index 17028beddf..81dafb787f 100644 --- a/config_app/config_util/k8saccessor.py +++ b/config_app/config_util/k8saccessor.py @@ -129,7 +129,7 @@ def save_secret_to_directory(self, dir_path): extra_ca_dir_path = os.path.join(dir_path, EXTRA_CA_DIRECTORY) os.mkdir(extra_ca_dir_path) - for secret_filename, data in secret_data.iteritems(): + for secret_filename, data in secret_data.items(): write_path = os.path.join(dir_path, secret_filename) if EXTRA_CA_DIRECTORY_PREFIX in secret_filename: diff --git a/config_app/config_util/ssl.py b/config_app/config_util/ssl.py index cce4416864..ceb3b6d64b 100644 --- a/config_app/config_util/ssl.py +++ b/config_app/config_util/ssl.py @@ -28,7 +28,7 @@ def load_certificate(cert_contents): cert = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_contents) return SSLCertificate(cert) except OpenSSL.crypto.Error as ex: - raise CertInvalidException(ex.message[0][2]) + raise CertInvalidException(str(ex)) _SUBJECT_ALT_NAME = "subjectAltName" @@ -55,7 +55,7 @@ def validate_private_key(self, private_key_path): context.use_privatekey_file(private_key_path) context.check_privatekey() except OpenSSL.SSL.Error as ex: - raise KeyInvalidException(ex.message[0][2]) + raise KeyInvalidException(str(ex)) def matches_name(self, check_name): """ diff --git a/data/appr_model/manifest.py b/data/appr_model/manifest.py index 3c61ce5408..3ac95c33b1 100644 --- a/data/appr_model/manifest.py +++ b/data/appr_model/manifest.py @@ -19,7 +19,7 @@ def _ensure_sha256_header(digest): def _digest(manifestjson): return _ensure_sha256_header( - hashlib.sha256(json.dumps(manifestjson, sort_keys=True)).hexdigest() + hashlib.sha256(json.dumps(manifestjson, sort_keys=True).encode("utf-8")).hexdigest() ) diff --git a/data/appr_model/manifest_list.py b/data/appr_model/manifest_list.py index 950ccb9adb..bbf4dd25ab 100644 --- a/data/appr_model/manifest_list.py +++ b/data/appr_model/manifest_list.py @@ -16,7 +16,7 @@ def _ensure_sha256_header(digest): def _digest(manifestjson): return _ensure_sha256_header( - hashlib.sha256(json.dumps(manifestjson, sort_keys=True)).hexdigest() + hashlib.sha256(json.dumps(manifestjson, sort_keys=True).encode("utf-8")).hexdigest() ) @@ -49,7 +49,7 @@ def create_manifestlistmanifest(manifestlist, manifest_ids, manifest_list_json, From a manifestlist, manifests, and the manifest list blob, create if doesn't exist the manfiestlistmanifest for each manifest. """ - for pos in xrange(len(manifest_ids)): + for pos in range(len(manifest_ids)): manifest_id = manifest_ids[pos] manifest_json = manifest_list_json[pos] get_or_create_manifestlistmanifest( diff --git a/data/appr_model/tag.py b/data/appr_model/tag.py index b93f1ce007..877c1db49c 100644 --- a/data/appr_model/tag.py +++ b/data/appr_model/tag.py @@ -144,6 +144,6 @@ def get_most_recent_tag_lifetime_start(repository_ids, models_ref, tag_kind="rel ), Tag, ) - to_seconds = lambda ms: ms / 1000 if ms is not None else None + to_seconds = lambda ms: ms // 1000 if ms is not None else None return {t.repository.id: to_seconds(t.lifetime_start) for t in tags} diff --git a/data/buildlogs.py b/data/buildlogs.py index a896656f4c..df8e1d4188 100644 --- a/data/buildlogs.py +++ b/data/buildlogs.py @@ -139,7 +139,7 @@ def check_health(self): connection.get(self._health_key()) return (True, None) except redis.RedisError as re: - return (False, "Could not connect to redis: %s" % re.message) + return (False, "Could not connect to redis: %s" % str(re)) class BuildLogs(object): diff --git a/data/database.py b/data/database.py index 62d6079a10..d522e71d78 100644 --- a/data/database.py +++ b/data/database.py @@ -22,7 +22,7 @@ from sqlalchemy.engine.url import make_url -import resumablehashlib +import rehash from cachetools.func import lru_cache from data.fields import ( @@ -405,7 +405,7 @@ def _db_from_url( db_kwargs.pop("timeout", None) db_kwargs.pop("max_connections", None) - for key, value in _EXTRA_ARGS.get(parsed_url.drivername, {}).iteritems(): + for key, value in _EXTRA_ARGS.get(parsed_url.drivername, {}).items(): if key not in db_kwargs: db_kwargs[key] = value @@ -1112,7 +1112,7 @@ def ancestor_id_list(self): """ Returns an integer list of ancestor ids, ordered chronologically from root to direct parent. """ - return map(int, self.ancestors.split("/")[1:-1]) + return list(map(int, self.ancestors.split("/")[1:-1])) class DerivedStorageForImage(BaseModel): @@ -1418,7 +1418,8 @@ class BlobUpload(BaseModel): repository = ForeignKeyField(Repository) uuid = CharField(index=True, unique=True) byte_count = BigIntegerField(default=0) - sha_state = ResumableSHA256Field(null=True, default=resumablehashlib.sha256) + # TODO(kleesc): Verify that this is backward compatible with resumablehashlib + sha_state = ResumableSHA256Field(null=True, default=rehash.sha256) location = ForeignKeyField(ImageStorageLocation) storage_metadata = JSONField(null=True, default={}) chunk_count = IntegerField(default=0) diff --git a/data/encryption.py b/data/encryption.py index b719960964..916b87d3cf 100644 --- a/data/encryption.py +++ b/data/encryption.py @@ -27,7 +27,7 @@ def _encrypt_ccm(secret_key, value, field_max_length=None): aesccm = AESCCM(secret_key) nonce = os.urandom(AES_CCM_NONCE_LENGTH) ct = aesccm.encrypt(nonce, value.encode("utf-8"), None) - encrypted = base64.b64encode(nonce + ct) + encrypted = base64.b64encode(nonce + ct).decode("utf-8") if field_max_length: msg = "Tried to encode a value too large for this field" assert (len(encrypted) + _RESERVED_FIELD_SPACE) <= field_max_length, msg @@ -54,7 +54,7 @@ def _decrypt_ccm(secret_key, value): "v0": EncryptionVersion("v0", _encrypt_ccm, _decrypt_ccm), } -_RESERVED_FIELD_SPACE = len(_SEPARATOR) + max([len(k) for k in _VERSIONS.keys()]) +_RESERVED_FIELD_SPACE = len(_SEPARATOR) + max([len(k) for k in list(_VERSIONS.keys())]) class FieldEncrypter(object): diff --git a/data/fields.py b/data/fields.py index c3a638fcb9..3ac65b48b4 100644 --- a/data/fields.py +++ b/data/fields.py @@ -1,14 +1,16 @@ import base64 +import pickle import string import json from random import SystemRandom import bcrypt -import resumablehashlib +import rehash from peewee import TextField, CharField, SmallIntegerField from data.text import prefix_search +from util.bytes import Bytes def random_string(length=16): @@ -17,42 +19,44 @@ def random_string(length=16): class _ResumableSHAField(TextField): + """ + Base Class used to store the state of an in-progress hash in the database. This is particularly + useful for working with large byte streams and allows the hashing to be paused and resumed + as needed. + """ + def _create_sha(self): raise NotImplementedError def db_value(self, value): + """ + Serialize the Hasher's state for storage in the database as plain-text. + """ if value is None: return None - sha_state = value.state() - - # One of the fields is a byte string, let's base64 encode it to make sure - # we can store and fetch it regardless of default collocation. - sha_state[3] = base64.b64encode(sha_state[3]) - - return json.dumps(sha_state) + serialized_state = base64.b64encode(pickle.dumps(value)).decode("ascii") + return serialized_state def python_value(self, value): + """ + Restore the Hasher from its state stored in the database. + """ if value is None: return None - sha_state = json.loads(value) - - # We need to base64 decode the data bytestring. - sha_state[3] = base64.b64decode(sha_state[3]) - to_resume = self._create_sha() - to_resume.set_state(sha_state) - return to_resume + hasher = pickle.loads(base64.b64decode(value.encode("ascii"))) + return hasher class ResumableSHA256Field(_ResumableSHAField): def _create_sha(self): - return resumablehashlib.sha256() + return rehash.sha256() class ResumableSHA1Field(_ResumableSHAField): def _create_sha(self): - return resumablehashlib.sha1() + return rehash.sha1() class JSONField(TextField): @@ -69,12 +73,12 @@ class Base64BinaryField(TextField): def db_value(self, value): if value is None: return None - return base64.b64encode(value) + return base64.b64encode(value).decode("ascii") def python_value(self, value): if value is None: return None - return base64.b64decode(value) + return base64.b64decode(value.encode("ascii")) class DecryptedValue(object): @@ -84,7 +88,6 @@ class DecryptedValue(object): def __init__(self, decrypted_value): assert decrypted_value is not None - assert isinstance(decrypted_value, basestring) self.value = decrypted_value def decrypt(self): @@ -180,6 +183,9 @@ def python_value(self, value): return LazyEncryptedValue(value, self) + def __hash__(self): + return field_class.__hash__(self) + def __eq__(self, _): raise Exception("Disallowed operation; use `matches`") @@ -322,15 +328,15 @@ def db_value(self, value): if value is None: return None - if isinstance(value, basestring): + if isinstance(value, str): raise Exception( "A string cannot be given to a CredentialField; please wrap in a Credential" ) - return value.hashed + return Bytes.for_string_or_unicode(value.hashed).as_unicode() def python_value(self, value): if value is None: return None - return Credential(value) + return Credential(Bytes.for_string_or_unicode(value).as_encoded_str()) diff --git a/data/logs_model/combined_model.py b/data/logs_model/combined_model.py index a359a7b6ea..9b20737f4e 100644 --- a/data/logs_model/combined_model.py +++ b/data/logs_model/combined_model.py @@ -30,7 +30,8 @@ def canonical_key_from_kind_date_tuple(kind_id, dt): matching_keys[kind_date_key] = (kind_id, dt, count) return [ - AggregatedLogCount(kind_id, count, dt) for (kind_id, dt, count) in matching_keys.values() + AggregatedLogCount(kind_id, count, dt) + for (kind_id, dt, count) in list(matching_keys.values()) ] diff --git a/data/logs_model/elastic_logs.py b/data/logs_model/elastic_logs.py index 81e5489664..56fc845b11 100644 --- a/data/logs_model/elastic_logs.py +++ b/data/logs_model/elastic_logs.py @@ -231,7 +231,7 @@ def can_delete_index(self, index, cutoff_date): def list_indices(self): self._initialize() try: - return self._client.indices.get(self._index_prefix + "*").keys() + return list(self._client.indices.get(self._index_prefix + "*").keys()) except NotFoundError as nfe: logger.exception("`%s` indices not found: %s", self._index_prefix, nfe.info) return [] diff --git a/data/logs_model/inmemory_model.py b/data/logs_model/inmemory_model.py index 8a3fd8a990..8f3b70da6e 100644 --- a/data/logs_model/inmemory_model.py +++ b/data/logs_model/inmemory_model.py @@ -177,7 +177,7 @@ def get_aggregated_log_counts( else: entries[key] = AggregatedLogCount(entry.kind_id, 1, synthetic_date) - return entries.values() + return list(entries.values()) def count_repository_actions(self, repository, day): count = 0 diff --git a/data/logs_model/logs_producer/kinesis_stream_logs_producer.py b/data/logs_model/logs_producer/kinesis_stream_logs_producer.py index f656cf056d..86cee075bf 100644 --- a/data/logs_model/logs_producer/kinesis_stream_logs_producer.py +++ b/data/logs_model/logs_producer/kinesis_stream_logs_producer.py @@ -30,9 +30,13 @@ def _partition_key(number_of_shards=None): key = None if number_of_shards is not None: shard_number = random.randrange(0, number_of_shards) - key = hashlib.sha1(KINESIS_PARTITION_KEY_PREFIX + str(shard_number)).hexdigest() + key = hashlib.sha1( + (KINESIS_PARTITION_KEY_PREFIX + str(shard_number)).encode("utf-8") + ).hexdigest() else: - key = hashlib.sha1(KINESIS_PARTITION_KEY_PREFIX + str(random.getrandbits(256))).hexdigest() + key = hashlib.sha1( + (KINESIS_PARTITION_KEY_PREFIX + str(random.getrandbits(256))).encode("utf-8") + ).hexdigest() return key diff --git a/data/logs_model/logs_producer/test/test_json_logs_serializer.py b/data/logs_model/logs_producer/test/test_json_logs_serializer.py index d4e0c82ffb..555844bd64 100644 --- a/data/logs_model/logs_producer/test/test_json_logs_serializer.py +++ b/data/logs_model/logs_producer/test/test_json_logs_serializer.py @@ -15,7 +15,7 @@ TEST_DATETIME = datetime.utcnow() TEST_JSON_STRING = '{"a": "b", "c": "d"}' -TEST_JSON_STRING_WITH_UNICODE = u'{"éëê": "îôû"}' +TEST_JSON_STRING_WITH_UNICODE = '{"éëê": "îôû"}' VALID_LOGENTRY = LogEntry( random_id="123-45", ip="0.0.0.0", metadata_json=TEST_JSON_STRING, datetime=TEST_DATETIME @@ -30,11 +30,11 @@ VALID_LOGENTRY_EXPECTED_OUTPUT = ( '{"datetime": "%s", "ip": "0.0.0.0", "metadata_json": "{\\"a\\": \\"b\\", \\"c\\": \\"d\\"}", "random_id": "123-45"}' % TEST_DATETIME.isoformat() -) +).encode("ascii") VALID_LOGENTRY_WITH_UNICODE_EXPECTED_OUTPUT = ( '{"datetime": "%s", "ip": "0.0.0.0", "metadata_json": "{\\"\\u00e9\\u00eb\\u00ea\\": \\"\\u00ee\\u00f4\\u00fb\\"}", "random_id": "123-45"}' % TEST_DATETIME.isoformat() -) +).encode("ascii") @pytest.mark.parametrize( diff --git a/data/logs_model/shared.py b/data/logs_model/shared.py index 5cda67c60e..baf139b5b4 100644 --- a/data/logs_model/shared.py +++ b/data/logs_model/shared.py @@ -57,7 +57,7 @@ def queue_logs_export( def epoch_ms(dt): - return (timegm(dt.timetuple()) * 1000) + (dt.microsecond / 1000) + return (timegm(dt.timetuple()) * 1000) + (dt.microsecond // 1000) def get_kinds_filter(kinds): diff --git a/data/logs_model/table_logs_model.py b/data/logs_model/table_logs_model.py index 1230afa5d4..8a95eeee15 100644 --- a/data/logs_model/table_logs_model.py +++ b/data/logs_model/table_logs_model.py @@ -199,7 +199,7 @@ def get_aggregated_log_counts( else: entries[key] = AggregatedLogCount(entry.kind_id, entry.count, synthetic_date) - return entries.values() + return list(entries.values()) def count_repository_actions(self, repository, day): return model.repositoryactioncount.count_repository_actions(repository, day) diff --git a/data/logs_model/test/fake_elasticsearch.py b/data/logs_model/test/fake_elasticsearch.py index b22f1a1338..a9b7aeab1d 100644 --- a/data/logs_model/test/fake_elasticsearch.py +++ b/data/logs_model/test/fake_elasticsearch.py @@ -37,7 +37,7 @@ def transform(value, field_name): # fields here. if field_name == "datetime": if isinstance(value, int): - return datetime.utcfromtimestamp(value / 1000) + return datetime.utcfromtimestamp(value // 1000) parsed = dateutil.parser.parse(value) return parsed @@ -75,7 +75,7 @@ def post_doc(url, request): def index_delete(url, request): index_name_or_pattern = url.path[1:] to_delete = [] - for index_name in docs.keys(): + for index_name in list(docs.keys()): if not fnmatch.fnmatch(index_name, index_name_or_pattern): continue @@ -94,7 +94,7 @@ def index_delete(url, request): def index_lookup(url, request): index_name_or_pattern = url.path[1:] found = {} - for index_name in docs.keys(): + for index_name in list(docs.keys()): if not fnmatch.fnmatch(index_name, index_name_or_pattern): continue @@ -115,7 +115,7 @@ def _match_query(index_name_or_pattern, query): found = [] found_index = False - for index_name in docs.keys(): + for index_name in list(docs.keys()): if not allow_wildcard and index_name_or_pattern.find("*") >= 0: break @@ -128,8 +128,8 @@ def _is_match(doc, current_query): if current_query is None: return True - for filter_type, filter_params in current_query.iteritems(): - for field_name, filter_props in filter_params.iteritems(): + for filter_type, filter_params in current_query.items(): + for field_name, filter_props in filter_params.items(): if filter_type == "range": lt = transform(filter_props["lt"], field_name) gte = transform(filter_props["gte"], field_name) @@ -244,7 +244,7 @@ def get_sort_key(item): source = item["_source"] key = "" for sort_config in sort: - for sort_key, direction in sort_config.iteritems(): + for sort_key, direction in sort_config.items(): assert direction == "desc" sort_key = sort_key.replace(".keyword", "") key += str(transform(source[sort_key], sort_key)) @@ -258,11 +258,11 @@ def get_sort_key(item): if search_after: sort_fields = [] for sort_config in sort: - if isinstance(sort_config, unicode): + if isinstance(sort_config, str): sort_fields.append(sort_config) continue - for sort_key, _ in sort_config.iteritems(): + for sort_key, _ in sort_config.items(): sort_key = sort_key.replace(".keyword", "") sort_fields.append(sort_key) @@ -304,7 +304,7 @@ def get_sort_key(item): def _by_field(agg_field_params, results): aggregated_by_field = defaultdict(list) - for agg_means, agg_means_params in agg_field_params.iteritems(): + for agg_means, agg_means_params in agg_field_params.items(): if agg_means == "terms": field_name = agg_means_params["field"] for result in results: @@ -324,7 +324,7 @@ def _by_field(agg_field_params, results): # Invoke the aggregation recursively. buckets = [] - for field_value, field_results in aggregated_by_field.iteritems(): + for field_value, field_results in aggregated_by_field.items(): aggregated = _aggregate(agg_field_params, field_results) if isinstance(aggregated, list): aggregated = {"doc_count": len(aggregated)} @@ -335,12 +335,12 @@ def _by_field(agg_field_params, results): return {"buckets": buckets} def _aggregate(query_config, results): - agg_params = query_config.get(u"aggs") + agg_params = query_config.get("aggs") if not agg_params: return results by_field_name = {} - for agg_field_name, agg_field_params in agg_params.iteritems(): + for agg_field_name, agg_field_params in agg_params.items(): by_field_name[agg_field_name] = _by_field(agg_field_params, results) return by_field_name @@ -364,10 +364,7 @@ def _aggregate(query_config, results): @urlmatch(netloc=FAKE_ES_HOST) def catchall_handler(url, request): - print "Unsupported URL: %s %s" % ( - request.method, - url, - ) + print("Unsupported URL: %s %s" % (request.method, url,)) return {"status_code": 501} handlers = [ diff --git a/data/logs_model/test/test_elasticsearch.py b/data/logs_model/test/test_elasticsearch.py index fb4024c866..c66cb4b726 100644 --- a/data/logs_model/test/test_elasticsearch.py +++ b/data/logs_model/test/test_elasticsearch.py @@ -14,7 +14,7 @@ from data.model.log import _json_serialize from data.logs_model.elastic_logs import ElasticsearchLogs, INDEX_NAME_PREFIX, INDEX_DATE_FORMAT from data.logs_model import configure, LogsModelProxy -from mock_elasticsearch import * +from .mock_elasticsearch import * FAKE_ES_HOST = "fakees" FAKE_ES_HOST_PATTERN = r"fakees.*" @@ -195,7 +195,7 @@ def search(url, req): window_size = query["scroll"] maximum_result_size = int(query["size"]) return mock.search_scroll_create(window_size, maximum_result_size, json.loads(req.body)) - elif "aggs" in req.body: + elif b"aggs" in req.body: return mock.search_aggs(json.loads(req.body)) else: return mock.search_after(json.loads(req.body)) diff --git a/data/logs_model/test/test_logs_producer.py b/data/logs_model/test/test_logs_producer.py index f96a753656..16103dc974 100644 --- a/data/logs_model/test/test_logs_producer.py +++ b/data/logs_model/test/test_logs_producer.py @@ -7,14 +7,14 @@ from data.logs_model import configure -from test_elasticsearch import ( +from .test_elasticsearch import ( app_config, logs_model_config, logs_model, mock_elasticsearch, mock_db_model, ) -from mock_elasticsearch import * +from .mock_elasticsearch import * logger = logging.getLogger(__name__) @@ -65,7 +65,7 @@ def test_kafka_logs_producers( producer_config = kafka_logs_producer_config with patch("kafka.client_async.KafkaClient.check_version"), patch( "kafka.KafkaProducer.send" - ) as mock_send: + ) as mock_send, patch("kafka.KafkaProducer._max_usable_produce_magic"): configure(producer_config) logs_model.log_action( "pull_repo", @@ -104,4 +104,4 @@ def test_kinesis_logs_producers( # Check that a PutRecord api call is made. # NOTE: The second arg of _make_api_call uses a randomized PartitionKey - mock_send.assert_called_once_with(u"PutRecord", mock_send.call_args_list[0][0][1]) + mock_send.assert_called_once_with("PutRecord", mock_send.call_args_list[0][0][1]) diff --git a/data/migrations/env.py b/data/migrations/env.py index a2c1b1b2bb..161b7d9de1 100644 --- a/data/migrations/env.py +++ b/data/migrations/env.py @@ -2,8 +2,8 @@ import os from logging.config import fileConfig -from urllib import unquote from functools import partial +from urllib.parse import unquote from alembic import context, op as alembic_op from alembic.script.revision import ResolutionError @@ -81,7 +81,7 @@ def _process_label_key(label_key): labels = { _process_label_key(k): v - for k, v in os.environ.items() + for k, v in list(os.environ.items()) if k.startswith(PROM_LABEL_PREFIX) } @@ -130,7 +130,7 @@ def run_migrations_online(): """ if isinstance(db.obj, SqliteDatabase) and not "DB_URI" in os.environ: - print "Skipping Sqlite migration!" + print("Skipping Sqlite migration!") return progress_reporter = get_progress_reporter() diff --git a/data/migrations/tester.py b/data/migrations/tester.py index afd82f3055..56c1a53e8e 100644 --- a/data/migrations/tester.py +++ b/data/migrations/tester.py @@ -142,7 +142,7 @@ def populate_table(self, table_name, fields): "INSERT INTO %s (%s) VALUES (%s)" % (table_name, ", ".join(field_names), ", ".join(field_name_vars)) ) - logger.info("Executing test query %s with values %s", query, columns.values()) + logger.info("Executing test query %s with values %s", query, list(columns.values())) op.get_bind().execute(query, **columns) def populate_column(self, table_name, col_name, field_type): diff --git a/data/migrations/versions/5248ddf35167_repository_mirror.py b/data/migrations/versions/5248ddf35167_repository_mirror.py index db8093df3e..8c174fa5e6 100644 --- a/data/migrations/versions/5248ddf35167_repository_mirror.py +++ b/data/migrations/versions/5248ddf35167_repository_mirror.py @@ -117,7 +117,7 @@ def upgrade(op, tables, tester): ) op.add_column( - u"repository", sa.Column("state", sa.Integer(), nullable=False, server_default="0") + "repository", sa.Column("state", sa.Integer(), nullable=False, server_default="0") ) op.create_index("repository_state", "repository", ["state"], unique=False) @@ -176,7 +176,7 @@ def upgrade(op, tables, tester): def downgrade(op, tables, tester): - op.drop_column(u"repository", "state") + op.drop_column("repository", "state") op.drop_table("repomirrorconfig") diff --git a/data/migrations/versions/61cadbacb9fc_add_ability_for_build_triggers_to_be_.py b/data/migrations/versions/61cadbacb9fc_add_ability_for_build_triggers_to_be_.py index 27a6caf930..a2515090bf 100644 --- a/data/migrations/versions/61cadbacb9fc_add_ability_for_build_triggers_to_be_.py +++ b/data/migrations/versions/61cadbacb9fc_add_ability_for_build_triggers_to_be_.py @@ -31,10 +31,10 @@ def upgrade(op, tables, tester): op.bulk_insert(tables.logentrykind, [{"name": "toggle_repo_trigger"},]) op.add_column( - u"repositorybuildtrigger", sa.Column("disabled_reason_id", sa.Integer(), nullable=True) + "repositorybuildtrigger", sa.Column("disabled_reason_id", sa.Integer(), nullable=True) ) op.add_column( - u"repositorybuildtrigger", + "repositorybuildtrigger", sa.Column("enabled", sa.Boolean(), nullable=False, server_default=sa.sql.expression.true()), ) op.create_index( @@ -68,8 +68,8 @@ def downgrade(op, tables, tester): type_="foreignkey", ) op.drop_index("repositorybuildtrigger_disabled_reason_id", table_name="repositorybuildtrigger") - op.drop_column(u"repositorybuildtrigger", "enabled") - op.drop_column(u"repositorybuildtrigger", "disabled_reason_id") + op.drop_column("repositorybuildtrigger", "enabled") + op.drop_column("repositorybuildtrigger", "disabled_reason_id") op.drop_table("disablereason") # ### end Alembic commands ### diff --git a/data/migrations/versions/703298a825c2_backfill_new_encrypted_fields.py b/data/migrations/versions/703298a825c2_backfill_new_encrypted_fields.py index d270b355c5..fe37424607 100644 --- a/data/migrations/versions/703298a825c2_backfill_new_encrypted_fields.py +++ b/data/migrations/versions/703298a825c2_backfill_new_encrypted_fields.py @@ -72,7 +72,7 @@ def _decrypted(value): if value is None: return None - assert isinstance(value, basestring) + assert isinstance(value, str) return DecryptedValue(value) diff --git a/data/migrations/versions/b4df55dea4b3_add_repository_kind.py b/data/migrations/versions/b4df55dea4b3_add_repository_kind.py index 64ef33382f..7631383297 100644 --- a/data/migrations/versions/b4df55dea4b3_add_repository_kind.py +++ b/data/migrations/versions/b4df55dea4b3_add_repository_kind.py @@ -28,7 +28,7 @@ def upgrade(op, tables, tester): ) op.add_column( - u"repository", sa.Column("kind_id", sa.Integer(), nullable=False, server_default="1") + "repository", sa.Column("kind_id", sa.Integer(), nullable=False, server_default="1") ) op.create_index("repository_kind_id", "repository", ["kind_id"], unique=False) op.create_foreign_key( @@ -49,5 +49,5 @@ def downgrade(op, tables, tester): op.f("fk_repository_kind_id_repositorykind"), "repository", type_="foreignkey" ) op.drop_index("repository_kind_id", table_name="repository") - op.drop_column(u"repository", "kind_id") + op.drop_column("repository", "kind_id") op.drop_table("repositorykind") diff --git a/data/migrations/versions/c059b952ed76_remove_unencrypted_fields_and_data.py b/data/migrations/versions/c059b952ed76_remove_unencrypted_fields_and_data.py index b1609b83b1..74e46b0bd6 100644 --- a/data/migrations/versions/c059b952ed76_remove_unencrypted_fields_and_data.py +++ b/data/migrations/versions/c059b952ed76_remove_unencrypted_fields_and_data.py @@ -24,7 +24,7 @@ def upgrade(op, tables, tester): # ### commands auto generated by Alembic - please adjust! ### op.drop_index("oauthaccesstoken_refresh_token", table_name="oauthaccesstoken") - op.drop_column(u"oauthaccesstoken", "refresh_token") + op.drop_column("oauthaccesstoken", "refresh_token") op.drop_column("accesstoken", "code") @@ -82,7 +82,7 @@ def upgrade(op, tables, tester): def downgrade(op, tables, tester): # ### commands auto generated by Alembic - please adjust! ### op.add_column( - u"oauthaccesstoken", sa.Column("refresh_token", sa.String(length=255), nullable=True) + "oauthaccesstoken", sa.Column("refresh_token", sa.String(length=255), nullable=True) ) op.create_index( "oauthaccesstoken_refresh_token", "oauthaccesstoken", ["refresh_token"], unique=False diff --git a/data/migrations/versions/c13c8052f7a6_add_new_fields_and_tables_for_encrypted_.py b/data/migrations/versions/c13c8052f7a6_add_new_fields_and_tables_for_encrypted_.py index b81be19cea..58bc635384 100644 --- a/data/migrations/versions/c13c8052f7a6_add_new_fields_and_tables_for_encrypted_.py +++ b/data/migrations/versions/c13c8052f7a6_add_new_fields_and_tables_for_encrypted_.py @@ -32,46 +32,42 @@ def upgrade(op, tables, tester): "robotaccounttoken_robot_account_id", "robotaccounttoken", ["robot_account_id"], unique=True ) - op.add_column(u"accesstoken", sa.Column("token_code", sa.String(length=255), nullable=True)) - op.add_column(u"accesstoken", sa.Column("token_name", sa.String(length=255), nullable=True)) + op.add_column("accesstoken", sa.Column("token_code", sa.String(length=255), nullable=True)) + op.add_column("accesstoken", sa.Column("token_name", sa.String(length=255), nullable=True)) op.create_index("accesstoken_token_name", "accesstoken", ["token_name"], unique=True) op.add_column( - u"appspecificauthtoken", sa.Column("token_name", sa.String(length=255), nullable=True) + "appspecificauthtoken", sa.Column("token_name", sa.String(length=255), nullable=True) ) op.add_column( - u"appspecificauthtoken", sa.Column("token_secret", sa.String(length=255), nullable=True) + "appspecificauthtoken", sa.Column("token_secret", sa.String(length=255), nullable=True) ) op.create_index( "appspecificauthtoken_token_name", "appspecificauthtoken", ["token_name"], unique=True ) op.add_column( - u"emailconfirmation", sa.Column("verification_code", sa.String(length=255), nullable=True) + "emailconfirmation", sa.Column("verification_code", sa.String(length=255), nullable=True) ) - op.add_column( - u"oauthaccesstoken", sa.Column("token_code", sa.String(length=255), nullable=True) - ) - op.add_column( - u"oauthaccesstoken", sa.Column("token_name", sa.String(length=255), nullable=True) - ) + op.add_column("oauthaccesstoken", sa.Column("token_code", sa.String(length=255), nullable=True)) + op.add_column("oauthaccesstoken", sa.Column("token_name", sa.String(length=255), nullable=True)) op.create_index("oauthaccesstoken_token_name", "oauthaccesstoken", ["token_name"], unique=True) op.add_column( - u"oauthapplication", sa.Column("secure_client_secret", sa.String(length=255), nullable=True) + "oauthapplication", sa.Column("secure_client_secret", sa.String(length=255), nullable=True) ) op.add_column( - u"oauthapplication", + "oauthapplication", sa.Column("fully_migrated", sa.Boolean(), server_default="0", nullable=False), ) op.add_column( - u"oauthauthorizationcode", + "oauthauthorizationcode", sa.Column("code_credential", sa.String(length=255), nullable=True), ) op.add_column( - u"oauthauthorizationcode", sa.Column("code_name", sa.String(length=255), nullable=True) + "oauthauthorizationcode", sa.Column("code_name", sa.String(length=255), nullable=True) ) op.create_index( "oauthauthorizationcode_code_name", "oauthauthorizationcode", ["code_name"], unique=True @@ -80,14 +76,14 @@ def upgrade(op, tables, tester): op.create_index("oauthauthorizationcode_code", "oauthauthorizationcode", ["code"], unique=True) op.add_column( - u"repositorybuildtrigger", + "repositorybuildtrigger", sa.Column("secure_auth_token", sa.String(length=255), nullable=True), ) op.add_column( - u"repositorybuildtrigger", sa.Column("secure_private_key", sa.Text(), nullable=True) + "repositorybuildtrigger", sa.Column("secure_private_key", sa.Text(), nullable=True) ) op.add_column( - u"repositorybuildtrigger", + "repositorybuildtrigger", sa.Column("fully_migrated", sa.Boolean(), server_default="0", nullable=False), ) # ### end Alembic commands ### @@ -114,30 +110,30 @@ def upgrade(op, tables, tester): def downgrade(op, tables, tester): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u"repositorybuildtrigger", "secure_private_key") - op.drop_column(u"repositorybuildtrigger", "secure_auth_token") + op.drop_column("repositorybuildtrigger", "secure_private_key") + op.drop_column("repositorybuildtrigger", "secure_auth_token") op.drop_index("oauthauthorizationcode_code", table_name="oauthauthorizationcode") op.create_index("oauthauthorizationcode_code", "oauthauthorizationcode", ["code"], unique=False) op.drop_index("oauthauthorizationcode_code_name", table_name="oauthauthorizationcode") - op.drop_column(u"oauthauthorizationcode", "code_name") - op.drop_column(u"oauthauthorizationcode", "code_credential") + op.drop_column("oauthauthorizationcode", "code_name") + op.drop_column("oauthauthorizationcode", "code_credential") - op.drop_column(u"oauthapplication", "secure_client_secret") + op.drop_column("oauthapplication", "secure_client_secret") op.drop_index("oauthaccesstoken_token_name", table_name="oauthaccesstoken") - op.drop_column(u"oauthaccesstoken", "token_name") - op.drop_column(u"oauthaccesstoken", "token_code") + op.drop_column("oauthaccesstoken", "token_name") + op.drop_column("oauthaccesstoken", "token_code") - op.drop_column(u"emailconfirmation", "verification_code") + op.drop_column("emailconfirmation", "verification_code") op.drop_index("appspecificauthtoken_token_name", table_name="appspecificauthtoken") - op.drop_column(u"appspecificauthtoken", "token_secret") - op.drop_column(u"appspecificauthtoken", "token_name") + op.drop_column("appspecificauthtoken", "token_secret") + op.drop_column("appspecificauthtoken", "token_name") op.drop_index("accesstoken_token_name", table_name="accesstoken") - op.drop_column(u"accesstoken", "token_name") - op.drop_column(u"accesstoken", "token_code") + op.drop_column("accesstoken", "token_name") + op.drop_column("accesstoken", "token_code") op.drop_table("robotaccounttoken") # ### end Alembic commands ### diff --git a/data/migrationutil.py b/data/migrationutil.py index 5d64e44c36..f79ddac2f9 100644 --- a/data/migrationutil.py +++ b/data/migrationutil.py @@ -65,7 +65,7 @@ def __init__(self, name, env_var, phases): @property def _error_suffix(self): - message = "Available values for this migration: %s. " % (self.phases.keys()) + message = "Available values for this migration: %s. " % (list(self.phases.keys())) message += "If this is a new installation, please use `new-installation`." return message diff --git a/data/model/_basequery.py b/data/model/_basequery.py index 5f9159e744..0b3fd990b2 100644 --- a/data/model/_basequery.py +++ b/data/model/_basequery.py @@ -24,6 +24,7 @@ db_count_estimator, db, ) +from functools import reduce logger = logging.getLogger(__name__) @@ -36,7 +37,8 @@ def reduce_as_tree(queries_to_reduce): This works around a bug in peewee SQL generation where reducing linearly generates a chain of queries that will exceed the recursion depth limit when it has around 80 queries. """ - mid = len(queries_to_reduce) / 2 + mid = len(queries_to_reduce) // 2 + left = queries_to_reduce[:mid] right = queries_to_reduce[mid:] diff --git a/data/model/appspecifictoken.py b/data/model/appspecifictoken.py index 842fb3cd84..59bbb9ddf3 100644 --- a/data/model/appspecifictoken.py +++ b/data/model/appspecifictoken.py @@ -8,6 +8,7 @@ from data.fields import DecryptedValue from util.timedeltastring import convert_to_timedelta from util.unicode import remove_unicode +from util.bytes import Bytes logger = logging.getLogger(__name__) @@ -132,7 +133,7 @@ def access_valid_token(token_code): If found, the token's last_accessed field is set to now and the token is returned. If not found, returns None. """ - token_code = remove_unicode(token_code) + token_code = remove_unicode(Bytes.for_string_or_unicode(token_code).as_encoded_str()) prefix = token_code[:TOKEN_NAME_PREFIX_LENGTH] if len(prefix) != TOKEN_NAME_PREFIX_LENGTH: diff --git a/data/model/blob.py b/data/model/blob.py index 220424284d..53a1816c89 100644 --- a/data/model/blob.py +++ b/data/model/blob.py @@ -261,7 +261,7 @@ def get_or_create_shared_blob(digest, byte_data, storage): special empty gzipped tar layer that Docker no longer pushes to us. """ assert digest - assert byte_data is not None + assert byte_data is not None and isinstance(byte_data, bytes) assert storage try: diff --git a/data/model/gc.py b/data/model/gc.py index 92dcc7e5cd..f459bedc4d 100644 --- a/data/model/gc.py +++ b/data/model/gc.py @@ -166,7 +166,7 @@ def _chunk_iterate_for_deletion(query, chunk_size=10): while True: results = list(query.limit(chunk_size)) if not results: - raise StopIteration + return yield results diff --git a/data/model/health.py b/data/model/health.py index 1f0471d19c..c5185018d7 100644 --- a/data/model/health.py +++ b/data/model/health.py @@ -13,11 +13,11 @@ def check_health(app_config): try: validate_database_url(app_config["DB_URI"], {}, connect_timeout=3) except Exception as ex: - return (False, "Could not connect to the database: %s" % ex.message) + return (False, "Could not connect to the database: %s" % str(ex)) # We will connect to the db, check that it contains some team role kinds try: okay = bool(list(TeamRole.select().limit(1))) return (okay, "Could not connect to the database" if not okay else None) except Exception as ex: - return (False, "Could not connect to the database: %s" % ex.message) + return (False, "Could not connect to the database: %s" % str(ex)) diff --git a/data/model/image.py b/data/model/image.py index 48d74f02d5..2a6f8115e2 100644 --- a/data/model/image.py +++ b/data/model/image.py @@ -76,7 +76,7 @@ def filter_to_parents(query): parents = _get_repository_images_and_storages( namespace_name, repository_name, filter_to_parents ) - id_to_image = {unicode(image.id): image for image in parents} + id_to_image = {str(image.id): image for image in parents} try: return [id_to_image[parent_id] for parent_id in reversed(parent_db_ids)] except KeyError as ke: @@ -560,7 +560,7 @@ def _get_uniqueness_hash(varying_metadata): if not varying_metadata: return None - return hashlib.sha256(json.dumps(canonicalize(varying_metadata))).hexdigest() + return hashlib.sha256(json.dumps(canonicalize(varying_metadata)).encode("utf-8")).hexdigest() def find_or_create_derived_storage( diff --git a/data/model/notification.py b/data/model/notification.py index 95bd5aa949..8f409342e2 100644 --- a/data/model/notification.py +++ b/data/model/notification.py @@ -132,7 +132,7 @@ def delete_matching_notifications(target, kind_name, **kwargs): except: continue - for (key, value) in kwargs.iteritems(): + for (key, value) in kwargs.items(): if not key in metadata or metadata[key] != value: matches = False break diff --git a/data/model/oauth.py b/data/model/oauth.py index 8b4c34be39..faec5ec5cd 100644 --- a/data/model/oauth.py +++ b/data/model/oauth.py @@ -3,8 +3,8 @@ from flask import url_for from datetime import datetime, timedelta -from oauth2lib.provider import AuthorizationProvider -from oauth2lib import utils +from oauth.provider import AuthorizationProvider +from oauth import utils from data.database import ( OAuthApplication, @@ -281,12 +281,12 @@ def create_application(org, name, application_uri, redirect_uri, **kwargs): application_uri=application_uri, redirect_uri=redirect_uri, secure_client_secret=DecryptedValue(client_secret), - **kwargs + **kwargs, ) def validate_access_token(access_token): - assert isinstance(access_token, basestring) + assert isinstance(access_token, str) token_name = access_token[:ACCESS_TOKEN_PREFIX_LENGTH] if not token_name: return None diff --git a/data/model/oci/manifest.py b/data/model/oci/manifest.py index 4ed8679cac..de99b1bf5b 100644 --- a/data/model/oci/manifest.py +++ b/data/model/oci/manifest.py @@ -294,7 +294,7 @@ def _create_manifest( # Create the manifest and its blobs. media_type = Manifest.media_type.get_id(manifest_interface_instance.media_type) - storage_ids = {storage.id for storage in blob_map.values()} + storage_ids = {storage.id for storage in list(blob_map.values())} with db_transaction(): # Check for the manifest. This is necessary because Postgres doesn't handle IntegrityErrors @@ -349,7 +349,7 @@ def _create_manifest( if child_manifest_rows: children_to_insert = [ dict(manifest=manifest, child_manifest=child_manifest, repository=repository_id) - for child_manifest in child_manifest_rows.values() + for child_manifest in list(child_manifest_rows.values()) ] ManifestChild.insert_many(children_to_insert).execute() @@ -366,7 +366,7 @@ def _create_manifest( # application to the manifest occur under the transaction. labels = manifest_interface_instance.get_manifest_labels(retriever) if labels: - for key, value in labels.iteritems(): + for key, value in labels.items(): # NOTE: There can technically be empty label keys via Dockerfile's. We ignore any # such `labels`, as they don't really mean anything. if not key: @@ -381,11 +381,11 @@ def _create_manifest( # to ensure that any action performed is defined in all manifests. labels_to_apply = labels or {} if child_manifest_label_dicts: - labels_to_apply = child_manifest_label_dicts[0].viewitems() + labels_to_apply = child_manifest_label_dicts[0].items() for child_manifest_label_dict in child_manifest_label_dicts[1:]: # Intersect the key+values of the labels to ensure we get the exact same result # for all the child manifests. - labels_to_apply = labels_to_apply & child_manifest_label_dict.viewitems() + labels_to_apply = labels_to_apply & child_manifest_label_dict.items() labels_to_apply = dict(labels_to_apply) diff --git a/data/model/oci/retriever.py b/data/model/oci/retriever.py index a0aedef56a..7fd88dc1d8 100644 --- a/data/model/oci/retriever.py +++ b/data/model/oci/retriever.py @@ -4,6 +4,7 @@ from data.database import Manifest from data.model.oci.blob import get_repository_blob_by_digest from data.model.storage import get_layer_path +from util.bytes import Bytes RETRY_COUNT = 5 RETRY_DELAY = 0.3 # seconds @@ -34,7 +35,7 @@ def get_manifest_bytes_with_digest(self, digest): ) try: - return query.get().manifest_bytes + return Bytes.for_string_or_unicode(query.get().manifest_bytes).as_encoded_str() except Manifest.DoesNotExist: return None diff --git a/data/model/oci/tag.py b/data/model/oci/tag.py index df694344e2..3a560c497b 100644 --- a/data/model/oci/tag.py +++ b/data/model/oci/tag.py @@ -337,7 +337,7 @@ def retarget_tag( legacy_image = get_legacy_image_for_manifest(manifest) now_ms = now_ms or get_epoch_timestamp_ms() - now_ts = int(now_ms / 1000) + now_ts = int(now_ms // 1000) with db_transaction(): # Lookup an existing tag in the repository with the same name and, if present, mark it @@ -381,7 +381,7 @@ def _delete_tag(tag, now_ms): """ Deletes the given tag by marking it as expired. """ - now_ts = int(now_ms / 1000) + now_ts = int(now_ms // 1000) with db_transaction(): updated = ( @@ -498,7 +498,7 @@ def change_tag_expiration(tag_id, expiration_datetime): ) if expiration_datetime is not None: - lifetime_start_ts = int(tag.lifetime_start_ms / 1000) + lifetime_start_ts = int(tag.lifetime_start_ms // 1000) offset = timegm(expiration_datetime.utctimetuple()) - lifetime_start_ts offset = min(max(offset, min_expire_sec.total_seconds()), max_expire_sec.total_seconds()) @@ -550,7 +550,7 @@ def set_tag_end_ms(tag, end_ms): .get() ).repository_tag - old_style_tag.lifetime_end_ts = end_ms / 1000 if end_ms is not None else None + old_style_tag.lifetime_end_ts = end_ms // 1000 if end_ms is not None else None old_style_tag.save() except TagToRepositoryTag.DoesNotExist: pass diff --git a/data/model/oci/test/test_oci_manifest.py b/data/model/oci/test/test_oci_manifest.py index b502033ac1..ec71d2ddd4 100644 --- a/data/model/oci/test/test_oci_manifest.py +++ b/data/model/oci/test/test_oci_manifest.py @@ -74,7 +74,7 @@ def create_manifest_for_testing(repository, differentiation_field="1"): remote_digest = sha256_digest("something") builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(remote_digest, 1234, urls=["http://hello/world" + differentiation_field]) manifest = builder.build() @@ -115,6 +115,7 @@ def test_lookup_manifest_child_tag(initialized_db): def _populate_blob(content): + content = Bytes.for_string_or_unicode(content).as_encoded_str() digest = str(sha256_digest(content)) location = ImageStorageLocation.get(name="local_us") blob = store_blob_record_and_temp_link( @@ -161,8 +162,8 @@ def test_get_or_create_manifest(schema_version, initialized_db): sample_manifest_instance = builder.build(docker_v2_signing_key) elif schema_version == 2: builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) - builder.add_layer(random_digest, len(random_data)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) + builder.add_layer(random_digest, len(random_data.encode("utf-8"))) sample_manifest_instance = builder.build() # Create a new manifest. @@ -267,8 +268,8 @@ def test_get_or_create_manifest_list(initialized_db): v1_manifest = v1_builder.build(docker_v2_signing_key).unsigned() v2_builder = DockerSchema2ManifestBuilder() - v2_builder.set_config_digest(config_digest, len(layer_json)) - v2_builder.add_layer(random_digest, len(random_data)) + v2_builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) + v2_builder.add_layer(random_digest, len(random_data.encode("utf-8"))) v2_manifest = v2_builder.build() # Write the manifests. @@ -339,8 +340,8 @@ def test_get_or_create_manifest_list_duplicate_child_manifest(initialized_db): # Build the manifest. v2_builder = DockerSchema2ManifestBuilder() - v2_builder.set_config_digest(config_digest, len(layer_json)) - v2_builder.add_layer(random_digest, len(random_data)) + v2_builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) + v2_builder.add_layer(random_digest, len(random_data.encode("utf-8"))) v2_manifest = v2_builder.build() # Write the manifest. @@ -399,12 +400,12 @@ def test_get_or_create_manifest_with_remote_layers(initialized_db): random_data = "hello world" _, random_digest = _populate_blob(random_data) - remote_digest = sha256_digest("something") + remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(remote_digest, 1234, urls=["http://hello/world"]) - builder.add_layer(random_digest, len(random_data)) + builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() assert remote_digest in manifest.blob_digests @@ -447,9 +448,9 @@ def create_manifest_for_testing(repository, differentiation_field="1", include_s # Add a blob containing the config. _, config_digest = _populate_blob(layer_json) - remote_digest = sha256_digest("something") + remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) builder.add_layer(remote_digest, 1234, urls=["http://hello/world" + differentiation_field]) if include_shared_blob: @@ -488,12 +489,12 @@ def test_retriever(initialized_db): other_random_data = "hi place" _, other_random_digest = _populate_blob(other_random_data) - remote_digest = sha256_digest("something") + remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) - builder.add_layer(other_random_digest, len(other_random_data)) - builder.add_layer(random_digest, len(random_data)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) + builder.add_layer(other_random_digest, len(other_random_data.encode("utf-8"))) + builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() assert config_digest in manifest.blob_digests @@ -573,11 +574,11 @@ def test_create_manifest_cannot_load_config_blob(initialized_db): random_data = "hello world" _, random_digest = _populate_blob(random_data) - remote_digest = sha256_digest("something") + remote_digest = sha256_digest(b"something") builder = DockerSchema2ManifestBuilder() - builder.set_config_digest(config_digest, len(layer_json)) - builder.add_layer(random_digest, len(random_data)) + builder.set_config_digest(config_digest, len(layer_json.encode("utf-8"))) + builder.add_layer(random_digest, len(random_data.encode("utf-8"))) manifest = builder.build() broken_retriever = BrokenRetriever() diff --git a/data/model/organization.py b/data/model/organization.py index 5a55c1d19a..196128b602 100644 --- a/data/model/organization.py +++ b/data/model/organization.py @@ -38,7 +38,7 @@ def create_organization(name, email, creating_user, email_required=True, is_poss return new_org except InvalidUsernameException as iue: - raise InvalidOrganizationException(iue.message) + raise InvalidOrganizationException(str(iue)) def get_organization(name): diff --git a/data/model/permission.py b/data/model/permission.py index 697e742f99..83e6731da0 100644 --- a/data/model/permission.py +++ b/data/model/permission.py @@ -233,7 +233,7 @@ def __apply_permission_list(repo, proto_query, name_property, create_permission_ # proto yet, so we can safely assume it applies final_protos[name] = (applies_to, proto.role) - for delegate, role in final_protos.values(): + for delegate, role in list(final_protos.values()): create_permission_func(delegate, repo, role) diff --git a/data/model/team.py b/data/model/team.py index 853524cc10..8020cc61e5 100644 --- a/data/model/team.py +++ b/data/model/team.py @@ -259,7 +259,7 @@ def _team_view(team): # Add repository permissions count. permission_tuples = ( RepositoryPermission.select(RepositoryPermission.team, fn.Count(RepositoryPermission.id)) - .where(RepositoryPermission.team << teams.keys()) + .where(RepositoryPermission.team << list(teams.keys())) .group_by(RepositoryPermission.team) .tuples() ) @@ -270,7 +270,7 @@ def _team_view(team): # Add the member count. members_tuples = ( TeamMember.select(TeamMember.team, fn.Count(TeamMember.id)) - .where(TeamMember.team << teams.keys()) + .where(TeamMember.team << list(teams.keys())) .group_by(TeamMember.team) .tuples() ) @@ -280,11 +280,11 @@ def _team_view(team): # Add syncing information. if has_external_auth: - sync_query = TeamSync.select(TeamSync.team).where(TeamSync.team << teams.keys()) + sync_query = TeamSync.select(TeamSync.team).where(TeamSync.team << list(teams.keys())) for team_sync in sync_query: teams[team_sync.team_id]["is_synced"] = True - return [AttrDict(team_info) for team_info in teams.values()] + return [AttrDict(team_info) for team_info in list(teams.values())] def get_user_teams_within_org(username, organization): diff --git a/data/model/test/test_gc.py b/data/model/test/test_gc.py index 3d4d3d2436..eb91341df8 100644 --- a/data/model/test/test_gc.py +++ b/data/model/test/test_gc.py @@ -69,9 +69,10 @@ def _delete_temp_links(repo): def _populate_blob(repo, content): - digest = str(sha256_digest(content)) + assert isinstance(content, bytes) + digest = sha256_digest(content) location = ImageStorageLocation.get(name="local_us") - storage.put_content(["local_us"], storage.blob_path(digest), "somecontent") + storage.put_content(["local_us"], storage.blob_path(digest), content) blob = model.blob.store_blob_record_and_temp_link_in_repo( repo, digest, location, len(content), 120 ) @@ -83,7 +84,7 @@ def create_repository(namespace=ADMIN_ACCESS_USER, name=REPO, **kwargs): repo = model.repository.create_repository(namespace, name, user) # Populate the repository with the tags. - for tag_name, image_ids in kwargs.iteritems(): + for tag_name, image_ids in kwargs.items(): move_tag(repo, tag_name, image_ids, expect_gc=False) return repo @@ -116,7 +117,7 @@ def move_tag(repository, tag, image_ids, expect_gc=True): config["parent"] = parent_id # Create a storage row for the layer blob. - _, layer_blob_digest = _populate_blob(repository, image_id) + _, layer_blob_digest = _populate_blob(repository, image_id.encode("ascii")) builder.insert_layer(layer_blob_digest, json.dumps(config)) parent_id = image_id @@ -584,7 +585,7 @@ def test_image_with_cas(default_tag_policy, initialized_db): repository = create_repository() # Create an image storage record under CAS. - content = "hello world" + content = b"hello world" digest = "sha256:" + hashlib.sha256(content).hexdigest() preferred = storage.preferred_locations[0] storage.put_content({preferred}, storage.blob_path(digest), content) @@ -638,7 +639,7 @@ def test_images_shared_cas(default_tag_policy, initialized_db): repository = create_repository() # Create two image storage records with the same content checksum. - content = "hello world" + content = b"hello world" digest = "sha256:" + hashlib.sha256(content).hexdigest() preferred = storage.preferred_locations[0] storage.put_content({preferred}, storage.blob_path(digest), content) diff --git a/data/model/test/test_model_blob.py b/data/model/test/test_model_blob.py index 7cf348218d..d472143207 100644 --- a/data/model/test/test_model_blob.py +++ b/data/model/test/test_model_blob.py @@ -40,10 +40,10 @@ def test_store_blob(initialized_db): def test_get_or_create_shared_blob(initialized_db): - shared = model.blob.get_or_create_shared_blob("sha256:abcdef", "somecontent", storage) + shared = model.blob.get_or_create_shared_blob("sha256:abcdef", b"somecontent", storage) assert shared.content_checksum == "sha256:abcdef" - again = model.blob.get_or_create_shared_blob("sha256:abcdef", "somecontent", storage) + again = model.blob.get_or_create_shared_blob("sha256:abcdef", b"somecontent", storage) assert shared == again diff --git a/data/model/test/test_repo_mirroring.py b/data/model/test/test_repo_mirroring.py index 97b12673ec..d73efa1690 100644 --- a/data/model/test/test_repo_mirroring.py +++ b/data/model/test/test_repo_mirroring.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import + from jsonschema import ValidationError from data.database import RepoMirrorConfig, RepoMirrorStatus, User @@ -26,7 +26,9 @@ def create_mirror_repo_robot(rules, repo_name="repo", external_registry_config=N except model.InvalidRobotException: robot, _ = create_robot("robot", user) - repo = create_repository("mirror", repo_name, None, repo_kind="image", visibility="public") + repo = model.repository.create_repository( + "mirror", repo_name, None, repo_kind="image", visibility="public" + ) repo.save() rule = model.repo_mirror.create_mirroring_rule(repo, rules) @@ -39,7 +41,7 @@ def create_mirror_repo_robot(rules, repo_name="repo", external_registry_config=N "sync_interval": timedelta(days=1).total_seconds(), "external_registry_config": external_registry_config, } - mirror = enable_mirroring_for_repository(**mirror_kwargs) + mirror = model.repo_mirror.enable_mirroring_for_repository(**mirror_kwargs) mirror.sync_status = RepoMirrorStatus.NEVER_RUN mirror.sync_start_date = datetime.utcnow() - timedelta(days=1) mirror.sync_retries_remaining = 3 diff --git a/data/model/user.py b/data/model/user.py index 33611a19a1..a862492c91 100644 --- a/data/model/user.py +++ b/data/model/user.py @@ -65,6 +65,7 @@ ) from util.backoff import exponential_backoff from util.timedeltastring import convert_to_timedelta +from util.bytes import Bytes from util.unicode import remove_unicode from util.security.token import decode_public_private_token, encode_public_private_token @@ -77,6 +78,7 @@ def hash_password(password, salt=None): salt = salt or bcrypt.gensalt() + salt = Bytes.for_string_or_unicode(salt).as_encoded_str() return bcrypt.hashpw(password.encode("utf-8"), salt) @@ -102,7 +104,7 @@ def create_user( prompts=prompts, is_possible_abuser=is_possible_abuser, ) - created.password_hash = hash_password(password) + created.password_hash = hash_password(password).decode("ascii") created.verified = auto_verify created.save() @@ -166,7 +168,7 @@ def create_user_noverify( return new_user except Exception as ex: - raise DataModelException(ex.message) + raise DataModelException(ex) def increase_maximum_build_count(user, maximum_queued_builds_count): @@ -196,7 +198,7 @@ def change_password(user, new_password): pw_hash = hash_password(new_password) user.invalid_login_attempts = 0 - user.password_hash = pw_hash + user.password_hash = pw_hash.decode("ascii") invalidate_all_sessions(user) # Remove any password required notifications for the user. @@ -354,7 +356,7 @@ def create_robot(robot_shortname, parent, description="", unstructured_metadata= ) return created, token except Exception as ex: - raise DataModelException(ex.message) + raise DataModelException(ex) def get_or_create_robot_metadata(robot): @@ -426,7 +428,7 @@ def get_matching_robots(name_prefix, username, limit=10): def verify_robot(robot_username, password): try: - password = remove_unicode(password) + password.encode("ascii") except UnicodeEncodeError: msg = "Could not find robot with username: %s and supplied password." % robot_username raise InvalidRobotException(msg) @@ -679,7 +681,7 @@ def create_confirm_email_code(user, new_email=None): code = EmailConfirmation.create( user=user, email_confirm=True, new_email=new_email, verification_code=verification_code ) - return encode_public_private_token(code.code, unhashed) + return encode_public_private_token(code.code, unhashed).decode("ascii") def confirm_user_email(token): @@ -955,8 +957,8 @@ def verify_user(username_or_email, password): # Make sure we didn't get any unicode for the username. try: - str(username_or_email) - except ValueError: + username_or_email.encode("ascii") + except UnicodeEncodeError: return None # Fetch the user with the matching username or e-mail address. @@ -983,9 +985,8 @@ def verify_user(username_or_email, password): # Hash the given password and compare it to the specified password. if ( fetched.password_hash - and hash_password(password, fetched.password_hash) == fetched.password_hash + and hash_password(password, fetched.password_hash).decode("ascii") == fetched.password_hash ): - # If the user previously had any invalid login attempts, clear them out now. if fetched.invalid_login_attempts > 0: try: @@ -1370,4 +1371,4 @@ def is_active(self): return self.db_user() and self.db_user().verified def get_id(self): - return unicode(self._uuid) + return str(self._uuid) diff --git a/data/queue.py b/data/queue.py index f7edd37034..893ecc2ea9 100644 --- a/data/queue.py +++ b/data/queue.py @@ -104,7 +104,7 @@ def num_alive_jobs(self, canonical_name_list): def strip_slash(name): return name.lstrip("/") - canonical_name_list = map(strip_slash, canonical_name_list) + canonical_name_list = list(map(strip_slash, canonical_name_list)) canonical_name_query = "/".join([self._queue_name] + canonical_name_list) + "%" return ( @@ -125,7 +125,7 @@ def num_available_jobs_between( def strip_slash(name): return name.lstrip("/") - canonical_name_list = map(strip_slash, canonical_name_list) + canonical_name_list = list(map(strip_slash, canonical_name_list)) available = self._available_jobs( available_max_time, "/".join([self._queue_name] + canonical_name_list) + "%" diff --git a/data/registry_model/blobuploader.py b/data/registry_model/blobuploader.py index d11c9b4c28..6c7fe66b76 100644 --- a/data/registry_model/blobuploader.py +++ b/data/registry_model/blobuploader.py @@ -5,7 +5,7 @@ from collections import namedtuple import bitmath -import resumablehashlib +import rehash from prometheus_client import Counter, Histogram diff --git a/data/registry_model/datatype.py b/data/registry_model/datatype.py index 6525f8c5f3..37e3c06e8d 100644 --- a/data/registry_model/datatype.py +++ b/data/registry_model/datatype.py @@ -42,6 +42,9 @@ def __getattr__(self, name): def __repr__(self): return "<%s> #%s" % (name, self._db_id) + def __hash__(self): + return hash((self.__name__, self._db_id)) + @classmethod def from_dict(cls, dict_data): try: diff --git a/data/registry_model/datatypes.py b/data/registry_model/datatypes.py index 3f2e5bb964..fcec97953c 100644 --- a/data/registry_model/datatypes.py +++ b/data/registry_model/datatypes.py @@ -233,8 +233,8 @@ def for_tag(cls, tag, legacy_image=None): reversion=tag.reversion, lifetime_start_ms=tag.lifetime_start_ms, lifetime_end_ms=tag.lifetime_end_ms, - lifetime_start_ts=tag.lifetime_start_ms / 1000, - lifetime_end_ts=tag.lifetime_end_ms / 1000 if tag.lifetime_end_ms else None, + lifetime_start_ts=tag.lifetime_start_ms // 1000, + lifetime_end_ts=tag.lifetime_end_ms // 1000 if tag.lifetime_end_ms else None, manifest_digest=tag.manifest.digest, inputs=dict( legacy_image=legacy_image, @@ -609,7 +609,7 @@ def unique_id(self): This call will consistently produce the same unique ID across calls in the same code base. """ - return hashlib.sha256("%s:%s" % (self.verb, self._db_id)).hexdigest() + return hashlib.sha256(("%s:%s" % (self.verb, self._db_id)).encode("utf-8")).hexdigest() class BlobUpload( diff --git a/data/registry_model/manifestbuilder.py b/data/registry_model/manifestbuilder.py index 58fa737f29..4946974b2a 100644 --- a/data/registry_model/manifestbuilder.py +++ b/data/registry_model/manifestbuilder.py @@ -86,7 +86,7 @@ def committed_tags(self): """ return [ registry_model.get_repo_tag(self._repository_ref, tag_name, include_legacy_image=True) - for tag_name in self._builder_state.tags.keys() + for tag_name in list(self._builder_state.tags.keys()) ] def start_layer( diff --git a/data/registry_model/registry_oci_model.py b/data/registry_model/registry_oci_model.py index fafa270c17..cfa4c314e4 100644 --- a/data/registry_model/registry_oci_model.py +++ b/data/registry_model/registry_oci_model.py @@ -337,17 +337,17 @@ def get_most_recent_tag_lifetime_start(self, repository_refs): if not repository_refs: return {} - toSeconds = lambda ms: ms / 1000 if ms is not None else None + toSeconds = lambda ms: ms // 1000 if ms is not None else None last_modified = oci.tag.get_most_recent_tag_lifetime_start([r.id for r in repository_refs]) - return {repo_id: toSeconds(ms) for repo_id, ms in last_modified.items()} + return {repo_id: toSeconds(ms) for repo_id, ms in list(last_modified.items())} def get_repo_tag(self, repository_ref, tag_name, include_legacy_image=False): """ Returns the latest, *active* tag found in the repository, with the matching name or None if none. """ - assert isinstance(tag_name, basestring) + assert isinstance(tag_name, str) tag = oci.tag.get_tag(repository_ref._db_id, tag_name) if tag is None: @@ -401,7 +401,7 @@ def create_manifest_and_retarget_tag( # Apply any labels that should modify the created tag. if created_manifest.labels_to_apply: - for key, value in created_manifest.labels_to_apply.iteritems(): + for key, value in created_manifest.labels_to_apply.items(): apply_label_to_manifest(dict(key=key, value=value), wrapped_manifest, self) # Reload the tag in case any updates were applied. diff --git a/data/registry_model/test/test_blobuploader.py b/data/registry_model/test/test_blobuploader.py index 964a356b23..ba71024b54 100644 --- a/data/registry_model/test/test_blobuploader.py +++ b/data/registry_model/test/test_blobuploader.py @@ -35,7 +35,7 @@ def test_basic_upload_blob(chunk_count, subchunk, registry_model): settings = BlobUploadSettings("2M", 3600) app_config = {"TESTING": True} - data = "" + data = b"" with upload_blob(repository_ref, storage, settings) as manager: assert manager assert manager.blob_upload_id @@ -72,7 +72,7 @@ def test_cancel_upload(registry_model): blob_upload_id = manager.blob_upload_id assert registry_model.lookup_blob_upload(repository_ref, blob_upload_id) is not None - manager.upload_chunk(app_config, BytesIO("hello world")) + manager.upload_chunk(app_config, BytesIO(b"hello world")) # Since the blob was not comitted, the upload should be deleted. assert blob_upload_id @@ -94,11 +94,11 @@ def test_extra_blob_stream_handlers(registry_model): handler1_result = [] handler2_result = [] - def handler1(bytes): - handler1_result.append(bytes) + def handler1(bytes_data): + handler1_result.append(bytes_data) - def handler2(bytes): - handler2_result.append(bytes) + def handler2(bytes_data): + handler2_result.append(bytes_data) repository_ref = registry_model.lookup_repository("devtable", "complex") storage = DistributedStorage({"local_us": FakeStorage(None)}, ["local_us"]) @@ -108,14 +108,15 @@ def handler2(bytes): with upload_blob( repository_ref, storage, settings, extra_blob_stream_handlers=[handler1, handler2] ) as manager: - manager.upload_chunk(app_config, BytesIO("hello ")) - manager.upload_chunk(app_config, BytesIO("world")) + manager.upload_chunk(app_config, BytesIO(b"hello ")) + manager.upload_chunk(app_config, BytesIO(b"world")) - assert "".join(handler1_result) == "hello world" - assert "".join(handler2_result) == "hello world" + assert b"".join(handler1_result) == b"hello world" + assert b"".join(handler2_result) == b"hello world" def valid_tar_gz(contents): + assert isinstance(contents, bytes) with closing(BytesIO()) as layer_data: with closing(tarfile.open(fileobj=layer_data, mode="w|gz")) as tar_file: tar_file_info = tarfile.TarInfo(name="somefile") @@ -135,7 +136,7 @@ def test_uncompressed_size(registry_model): app_config = {"TESTING": True} with upload_blob(repository_ref, storage, settings) as manager: - manager.upload_chunk(app_config, BytesIO(valid_tar_gz("hello world"))) + manager.upload_chunk(app_config, BytesIO(valid_tar_gz(b"hello world"))) blob = manager.commit_to_blob(app_config) diff --git a/data/registry_model/test/test_interface.py b/data/registry_model/test/test_interface.py index 54dcdf6b40..16c894e8b1 100644 --- a/data/registry_model/test/test_interface.py +++ b/data/registry_model/test/test_interface.py @@ -288,9 +288,9 @@ def test_get_most_recent_tag_lifetime_start(repositories, expected_tag_count, re ) assert len(last_modified_map) == expected_tag_count - for repo_id, last_modified in last_modified_map.items(): + for repo_id, last_modified in list(last_modified_map.items()): tag = registry_model.get_most_recent_tag(RepositoryReference.for_id(repo_id)) - assert last_modified == tag.lifetime_start_ms / 1000 + assert last_modified == tag.lifetime_start_ms // 1000 @pytest.mark.parametrize( @@ -481,7 +481,7 @@ def test_manifest_remote_layers(oci_model): app_config = {"TESTING": True} repository_ref = oci_model.lookup_repository("devtable", "simple") with upload_blob(repository_ref, storage, BlobUploadSettings(500, 500)) as upload: - upload.upload_chunk(app_config, BytesIO(config_json)) + upload.upload_chunk(app_config, BytesIO(config_json.encode("utf-8"))) blob = upload.commit_to_blob(app_config) # Create the manifest in the repo. @@ -602,7 +602,7 @@ def test_derived_image_for_manifest_list(manifest_builder, list_builder, oci_mod app_config = {"TESTING": True} repository_ref = oci_model.lookup_repository("devtable", "simple") with upload_blob(repository_ref, storage, BlobUploadSettings(500, 500)) as upload: - upload.upload_chunk(app_config, BytesIO(config_json)) + upload.upload_chunk(app_config, BytesIO(config_json.encode("utf-8"))) blob = upload.commit_to_blob(app_config) # Create the manifest in the repo. @@ -682,7 +682,7 @@ def test_commit_blob_upload(registry_model): ) # Commit the blob upload and make sure it is written as a blob. - digest = "sha256:" + hashlib.sha256("hello").hexdigest() + digest = "sha256:" + hashlib.sha256(b"hello").hexdigest() blob = registry_model.commit_blob_upload(blob_upload, digest, 60) assert blob.digest == digest @@ -873,7 +873,7 @@ def test_known_issue_schema1(registry_model): def test_unicode_emoji(registry_model): builder = DockerSchema1ManifestBuilder("devtable", "simple", "latest") builder.add_layer( - "sha256:abcde", json.dumps({"id": "someid", "author": u"😱",}, ensure_ascii=False) + "sha256:abcde", json.dumps({"id": "someid", "author": "😱",}, ensure_ascii=False) ) manifest = builder.build(ensure_ascii=False) diff --git a/data/registry_model/test/test_manifestbuilder.py b/data/registry_model/test/test_manifestbuilder.py index 97cd90dc45..b5a4ffa8b2 100644 --- a/data/registry_model/test/test_manifestbuilder.py +++ b/data/registry_model/test/test_manifestbuilder.py @@ -32,9 +32,9 @@ def fake_session(): @pytest.mark.parametrize( "layers", [ - pytest.param([("someid", None, "some data")], id="Single layer"), + pytest.param([("someid", None, b"some data")], id="Single layer"), pytest.param( - [("parentid", None, "some parent data"), ("someid", "parentid", "some data")], + [("parentid", None, b"some parent data"), ("someid", "parentid", b"some data")], id="Multi layer", ), ], diff --git a/data/secscan_model/test/test_secscan_v4_model.py b/data/secscan_model/test/test_secscan_v4_model.py index a0552f23d6..91084113d4 100644 --- a/data/secscan_model/test/test_secscan_v4_model.py +++ b/data/secscan_model/test/test_secscan_v4_model.py @@ -372,19 +372,24 @@ def test_features_for(): with open(vuln_report_filename) as vuln_report_file: vuln_report = json.load(vuln_report_file) - with open(security_info_filename) as security_info_file: - security_info = json.load(security_info_file) - - features_for_sec_info = SecurityInformation( - Layer( - "sha256:b05ac1eeec8635442fa5d3e55d6ef4ad287b9c66055a552c2fd309c334563b0a", - "", - "", - 4, - features_for(vuln_report), - ) - ).to_dict() - - assert json.dumps( - canonicalize(features_for_sec_info, preserve_sequence_order=False) - ) == json.dumps(canonicalize(security_info["data"], preserve_sequence_order=False)) + with open(security_info_filename) as security_info_file: + security_info = json.load(security_info_file) + + expected = security_info["data"] + expected["Layer"]["Features"].sort(key=lambda d: d["Name"]) + generated = SecurityInformation( + Layer( + "sha256:b05ac1eeec8635442fa5d3e55d6ef4ad287b9c66055a552c2fd309c334563b0a", + "", + "", + 4, + features_for(vuln_report), + ) + ).to_dict() + + # Sort the Features' list so that the following assertion holds even if they are out of order + # (Ordering of the dicts' key iteration is different from Python 2 to 3) + expected["Layer"]["Features"].sort(key=lambda d: d["Name"]) + generated["Layer"]["Features"].sort(key=lambda d: d["Name"]) + + assert generated == expected diff --git a/data/test/test_encryption.py b/data/test/test_encryption.py index 4167d64064..6844934822 100644 --- a/data/test/test_encryption.py +++ b/data/test/test_encryption.py @@ -20,18 +20,16 @@ "a" * 32, "a" * 33, "a" * 150, - u"😇", + "😇", ], ) -@pytest.mark.parametrize("version", _VERSIONS.keys()) +@pytest.mark.parametrize("version", list(_VERSIONS.keys())) @pytest.mark.parametrize( "secret_key", [ - u"test1234", "test1234", "thisisanothercoolsecretkeyhere", "107383705745765174750346070528443780244192102846031525796571939503548634055845", - bytes("test1234"), ], ) @pytest.mark.parametrize("use_valid_key", [True, False,]) @@ -55,15 +53,15 @@ def test_encryption(test_data, version, secret_key, use_valid_key): @pytest.mark.parametrize( "secret_key, encrypted_value, expected_decrypted_value", [ - (u"test1234", "v0$$iE+87Qefu/2i+5zC87nlUtOskypk8MUUDS/QZPs=", ""), + ("test1234", "v0$$iE+87Qefu/2i+5zC87nlUtOskypk8MUUDS/QZPs=", ""), ("test1234", "v0$$XTxqlz/Kw8s9WKw+GaSvXFEKgpO/a2cGNhvnozzkaUh4C+FgHqZqnA==", "hello world"), ( - bytes("test1234"), + "test1234", "v0$$9LadVsSvfAr9r1OvghSYcJqrJpv46t+U6NgLKrcFY6y2bQsASIN36g==", "hello world", ), ( - bytes("\1\2\3\4\5\6"), + "\1\2\3\4\5\6", "v0$$2wwWX8IhUYzuh4cyMgSXF3MEVDlEhrf0CNimTghlHgCuK6E4+bLJb1xJOKxsXMs=", "hello world, again", ), diff --git a/data/test/test_userfiles.py b/data/test/test_userfiles.py index 1c0e80d781..b61f53f2f5 100644 --- a/data/test/test_userfiles.py +++ b/data/test/test_userfiles.py @@ -32,7 +32,7 @@ def test_lookup_userfile(app, client): def _stream_read_file(locations, path): if path.find(uuid) > 0 or path.find(upper_uuid) > 0: - return BytesIO("hello world") + return BytesIO(b"hello world") raise IOError("Not found!") diff --git a/data/userevent.py b/data/userevent.py index 4ad2a5916d..eb339b7451 100644 --- a/data/userevent.py +++ b/data/userevent.py @@ -124,7 +124,7 @@ def event_stream(self): while True: pubsub = self._pubsub if pubsub is None: - raise StopIteration + return try: item = pubsub.get_message(ignore_subscribe_messages=True, timeout=5) diff --git a/data/userfiles.py b/data/userfiles.py index 9e77977e0a..a42c7ffaa9 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -1,6 +1,6 @@ import os import logging -import urlparse +import urllib.parse from uuid import uuid4 from _pyio import BufferedReader @@ -100,7 +100,7 @@ def prepare_for_drop(self, mime_type, requires_cors=True): with self._app.app_context() as ctx: ctx.url_adapter = self._build_url_adapter() file_relative_url = url_for(self._handler_name, file_id=file_id) - file_url = urlparse.urljoin(get_app_url(self._app.config), file_relative_url) + file_url = urllib.parse.urljoin(get_app_url(self._app.config), file_relative_url) return (file_url, file_id) return (url, file_id) @@ -128,7 +128,7 @@ def get_file_url(self, file_id, remote_ip, expires_in=300, requires_cors=False): with self._app.app_context() as ctx: ctx.url_adapter = self._build_url_adapter() file_relative_url = url_for(self._handler_name, file_id=file_id) - return urlparse.urljoin(get_app_url(self._app.config), file_relative_url) + return urllib.parse.urljoin(get_app_url(self._app.config), file_relative_url) return url diff --git a/data/users/__init__.py b/data/users/__init__.py index 3ae4ea9fe5..a7556fa6b2 100644 --- a/data/users/__init__.py +++ b/data/users/__init__.py @@ -154,7 +154,7 @@ def encrypt_user_password(self, password): """ data = {"password": password} - message = json.dumps(data) + message = json.dumps(data).encode("utf-8") cipher = AESCipher(self.secret_key) return cipher.encrypt(message) diff --git a/data/users/apptoken.py b/data/users/apptoken.py index 57e966f112..e19b123a61 100644 --- a/data/users/apptoken.py +++ b/data/users/apptoken.py @@ -3,7 +3,6 @@ from data import model from oauth.loginmanager import OAuthLoginManager from oauth.oidc import PublicKeyLoadException -from util.security.jwtutil import InvalidTokenError logger = logging.getLogger(__name__) diff --git a/data/users/externaljwt.py b/data/users/externaljwt.py index 1cb93956b1..11324c4c8a 100644 --- a/data/users/externaljwt.py +++ b/data/users/externaljwt.py @@ -47,7 +47,7 @@ def __init__( self.public_key_path = public_key_path - with open(public_key_path) as public_key_file: + with open(public_key_path, mode="rb") as public_key_file: self.public_key = public_key_file.read() def has_password_set(self, username): diff --git a/data/users/externalldap.py b/data/users/externalldap.py index 4477860694..3023182660 100644 --- a/data/users/externalldap.py +++ b/data/users/externalldap.py @@ -157,24 +157,24 @@ def _add_user_filter(self, query): assert user_filter.startswith("(") and user_filter.endswith(")") assert query.startswith("(") and query.endswith(")") - return u"(&{0}{1})".format(query, user_filter) + return "(&{0}{1})".format(query, user_filter) def _ldap_user_search_with_rdn(self, conn, username_or_email, user_search_dn, suffix=""): - query = u"(|({0}={2}{3})({1}={2}{3}))".format( + query = "(|({0}={2}{3})({1}={2}{3}))".format( self._uid_attr, self._email_attr, escape_filter_chars(username_or_email), suffix ) query = self._add_user_filter(query) logger.debug("Conducting user search: %s under %s", query, user_search_dn) try: - return (conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query.encode("utf-8")), None) + return (conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query), None) except ldap.REFERRAL as re: referral_dn = self._get_ldap_referral_dn(re) if not referral_dn: return (None, "Failed to follow referral when looking up username") try: - subquery = u"(%s=%s)" % (self._uid_attr, username_or_email) + subquery = "(%s=%s)" % (self._uid_attr, username_or_email) subquery = self._add_user_filter(subquery) return (conn.search_s(referral_dn, ldap.SCOPE_BASE, subquery), None) except ldap.LDAPError: @@ -242,7 +242,7 @@ def _build_user_information(self, response): if self._requires_email and not response.get(self._email_attr): return (None, 'Missing mail field "%s" in user record' % self._email_attr) - username = response[self._uid_attr][0].decode("utf-8") + username = response[self._uid_attr][0] email = response.get(self._email_attr, [None])[0] return (UserInformation(username=username, email=email, id=username), None) @@ -254,7 +254,7 @@ def ping(self): return (False, "LDAP Admin dn or password is invalid") except ldap.LDAPError as lde: logger.exception("Exception when trying to health check LDAP") - return (False, lde.message) + return (False, str(lde)) return (True, None) @@ -289,7 +289,7 @@ def at_least_one_user_exists(self): return (True, None) except ldap.LDAPError as lde: - return (False, lde.message or "Could not find DN %s" % user_search_dn) + return (False, str(lde) or "Could not find DN %s" % user_search_dn) return (False, None) @@ -347,9 +347,7 @@ def verify_credentials(self, username_or_email, password): # First validate the password by binding as the user try: - with LDAPConnection( - self._ldap_uri, found_dn, password.encode("utf-8"), self._allow_tls_fallback - ): + with LDAPConnection(self._ldap_uri, found_dn, password, self._allow_tls_fallback): pass except ldap.REFERRAL as re: referral_dn = self._get_ldap_referral_dn(re) @@ -358,7 +356,7 @@ def verify_credentials(self, username_or_email, password): try: with LDAPConnection( - self._ldap_uri, referral_dn, password.encode("utf-8"), self._allow_tls_fallback + self._ldap_uri, referral_dn, password, self._allow_tls_fallback ): pass except ldap.INVALID_CREDENTIALS: @@ -437,7 +435,7 @@ def _iterate_members(self, group_dn, page_size, disable_pagination): "Got error when trying to search %s with filter %s: %s", user_search_dn, search_flt, - lde.message, + str(lde), ) break @@ -465,14 +463,14 @@ def _iterate_members(self, group_dn, page_size, disable_pagination): "NSO when trying to lookup results of search %s with filter %s: %s", user_search_dn, search_flt, - nsoe.message, + str(nsoe), ) except ldap.LDAPError as lde: logger.exception( "Error when trying to lookup results of search %s with filter %s: %s", user_search_dn, search_flt, - lde.message, + str(lde), ) break diff --git a/data/users/keystone.py b/data/users/keystone.py index 12c61382d0..b7fc12822c 100644 --- a/data/users/keystone.py +++ b/data/users/keystone.py @@ -83,10 +83,10 @@ def ping(self): assert sess.get_user_id() # Make sure we loaded a valid user. except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized admin") - return (False, "Keystone admin credentials are invalid: %s" % kut.message) + return (False, "Keystone admin credentials are invalid: %s" % str(kut)) except ClientException as e: logger.exception("Keystone unauthorized admin") - return (False, "Keystone ping check failed: %s" % e.message) + return (False, "Keystone ping check failed: %s" % str(e)) return (True, None) @@ -105,7 +105,7 @@ def at_least_one_user_exists(self): except ClientException as e: # Catch exceptions to give the user our custom error message logger.exception("Unable to list users in Keystone") - return (False, e.message) + return (False, str(e)) def verify_credentials(self, username_or_email, password): try: @@ -131,7 +131,7 @@ def verify_credentials(self, username_or_email, password): user = admin_client.users.get(user_id) except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized admin") - return (None, "Keystone admin credentials are invalid: %s" % kut.message) + return (None, "Keystone admin credentials are invalid: %s" % str(kut)) if self.requires_email and not hasattr(user, "email"): return (None, "Missing email field for user %s" % user_id) @@ -201,10 +201,10 @@ def ping(self): assert sess.get_user_id() # Make sure we loaded a valid user. except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized admin") - return (False, "Keystone admin credentials are invalid: %s" % kut.message) + return (False, "Keystone admin credentials are invalid: %s" % str(kut)) except ClientException as cle: logger.exception("Keystone unauthorized admin") - return (False, "Keystone ping check failed: %s" % cle.message) + return (False, "Keystone ping check failed: %s" % str(cle)) return (True, None) @@ -217,7 +217,7 @@ def at_least_one_user_exists(self): except ClientException as cle: # Catch exceptions to give the user our custom error message logger.exception("Unable to list users in Keystone") - return (False, cle.message) + return (False, str(cle)) def verify_credentials(self, username_or_email, password): try: @@ -274,13 +274,13 @@ def _check_group(self, group_id): return (False, "Group not found") except KeystoneAuthorizationFailure as kaf: logger.exception("Keystone auth failure for admin user for group lookup %s", group_id) - return (False, kaf.message or "Invalid admin username or password") + return (False, str(kaf) or "Invalid admin username or password") except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized for admin user for group lookup %s", group_id) - return (False, kut.message or "Invalid admin username or password") + return (False, str(kut) or "Invalid admin username or password") except ClientException as cle: logger.exception("Keystone unauthorized for admin user for group lookup %s", group_id) - return (False, cle.message or "Invalid admin username or password") + return (False, str(cle) or "Invalid admin username or password") def iterate_group_members(self, group_lookup_args, page_size=None, disable_pagination=False): group_id = group_lookup_args["group_id"] @@ -302,13 +302,13 @@ def iterator(): return (iterator(), None) except KeystoneAuthorizationFailure as kaf: logger.exception("Keystone auth failure for admin user for group lookup %s", group_id) - return (False, kaf.message or "Invalid admin username or password") + return (False, str(kaf) or "Invalid admin username or password") except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized for admin user for group lookup %s", group_id) - return (False, kut.message or "Invalid admin username or password") + return (False, str(kut) or "Invalid admin username or password") except ClientException as cle: logger.exception("Keystone unauthorized for admin user for group lookup %s", group_id) - return (False, cle.message or "Invalid admin username or password") + return (False, str(cle) or "Invalid admin username or password") @staticmethod def _user_info(user): @@ -335,19 +335,19 @@ def query_users(self, query, limit=20): return ( None, self.federated_service, - kaf.message or "Invalid admin username or password", + str(kaf) or "Invalid admin username or password", ) except KeystoneUnauthorized as kut: logger.exception("Keystone unauthorized for admin user for query %s", query) return ( None, self.federated_service, - kut.message or "Invalid admin username or password", + str(kut) or "Invalid admin username or password", ) except ClientException as cle: logger.exception("Keystone unauthorized for admin user for query %s", query) return ( None, self.federated_service, - cle.message or "Invalid admin username or password", + str(cle) or "Invalid admin username or password", ) diff --git a/data/users/test/test_shared.py b/data/users/test/test_shared.py index 28b4657f9d..f00ae01aed 100644 --- a/data/users/test/test_shared.py +++ b/data/users/test/test_shared.py @@ -2,7 +2,7 @@ from mock import patch -from data.database import model +from data import model from data.users.shared import can_create_user from test.fixtures import * @@ -28,7 +28,7 @@ (True, False, "foo@blacklisted.com", False, False), (True, False, "foo@blacklisted.org", False, False), (True, False, "foo@BlAcKlIsTeD.CoM", False, False), # Verify Capitalization - (True, False, u"foo@mail.bLacklisted.Com", False, False), # Verify unicode + (True, False, "foo@mail.bLacklisted.Com", False, False), # Verify unicode (True, False, "foo@blacklisted.net", False, True), # Avoid False Positives (True, False, "foo@myblacklisted.com", False, True), # Avoid partial domain matches (True, False, "fooATblacklisted.com", False, True), # Ignore invalid email addresses diff --git a/data/users/test/test_users.py b/data/users/test/test_users.py index 4f14636458..a1e9ce340b 100644 --- a/data/users/test/test_users.py +++ b/data/users/test/test_users.py @@ -3,7 +3,7 @@ from contextlib import contextmanager from mock import patch -from data.database import model +from data import model from data.users.federated import DISABLED_MESSAGE from test.test_ldap import mock_ldap from test.test_keystone_auth import fake_keystone diff --git a/digest/checksums.py b/digest/checksums.py index 8626805e8e..7b8842892b 100644 --- a/digest/checksums.py +++ b/digest/checksums.py @@ -8,7 +8,7 @@ def sha256_file(fp, data=None): - h = hashlib.sha256(data or "") + h = hashlib.sha256(data.encode("utf-8") if data else "".encode("utf-8")) if not fp: return h.hexdigest() while True: @@ -20,7 +20,7 @@ def sha256_file(fp, data=None): def sha256_string(s): - return hashlib.sha256(s).hexdigest() + return hashlib.sha256(s.encode("utf-8")).hexdigest() def compute_tarsum(fp, json_data): @@ -64,7 +64,8 @@ def compute_tarsum(fp, json_data): hashes.append(h) hashes.sort() except tarfile.ReadError as e: - if e.message != "empty file": + # TODO (kleesc): Need to add a test for empty tarfile + if str(e) != "empty file": # NOTE(samalba): ignore empty tarfiles but still let the tarsum # compute with json data raise @@ -78,7 +79,7 @@ def compute_tarsum(fp, json_data): def simple_checksum_handler(json_data): - h = hashlib.sha256(json_data.encode("utf8") + "\n") + h = hashlib.sha256(json_data.encode("utf8") + b"\n") def fn(buf): h.update(buf) @@ -90,7 +91,7 @@ def content_checksum_handler(): h = hashlib.sha256() def fn(buf): - h.update(buf) + h.update(buf.encode("utf-8")) return h, fn @@ -104,9 +105,9 @@ def compute_simple(fp, json_data): import sys if len(sys.argv) < 3: - print "Usage: {0} json_file layer_file".format(sys.argv[0]) + print("Usage: {0} json_file layer_file".format(sys.argv[0])) sys.exit(1) - json_data = file(sys.argv[1]).read() + json_data = open(sys.argv[1]).read() fp = open(sys.argv[2]) - print compute_simple(fp, json_data) - print compute_tarsum(fp, json_data) + print(compute_simple(fp, json_data)) + print(compute_tarsum(fp, json_data)) diff --git a/digest/digest_tools.py b/digest/digest_tools.py index baa510f2c7..1099885842 100644 --- a/digest/digest_tools.py +++ b/digest/digest_tools.py @@ -25,6 +25,9 @@ def __str__(self): def __eq__(self, rhs): return isinstance(rhs, Digest) and str(self) == str(rhs) + def __hash__(self): + return hash((self._hash_alg, self._hash_bytes)) + @staticmethod def parse_digest(digest): """ @@ -65,6 +68,7 @@ def sha256_digest(content): """ Returns a sha256 hash of the content bytes in digest form. """ + assert isinstance(content, bytes) def single_chunk_generator(): yield content diff --git a/digest/test/test_digest_tools.py b/digest/test/test_digest_tools.py index 1e3792ff3e..9d285ab0ee 100644 --- a/digest/test/test_digest_tools.py +++ b/digest/test/test_digest_tools.py @@ -28,6 +28,7 @@ def test_parse_good(digest, output_args): "sha256123123", "tarsum.v1+", "tarsum.v1123+sha1:", + "sha256:👌", ], ) def test_parse_fail(bad_digest): diff --git a/displayversion.py b/displayversion.py index 69e74a1099..3108652a91 100644 --- a/displayversion.py +++ b/displayversion.py @@ -4,10 +4,10 @@ def displayversion(): version_string = ("Quay %s (%s)" % (__version__, __gitrev__.strip())).strip() - print "=" * (len(version_string) + 4) - print "= " + version_string + " =" - print "=" * (len(version_string) + 4) - print "" + print("=" * (len(version_string) + 4)) + print("= " + version_string + " =") + print("=" * (len(version_string) + 4)) + print("") if __name__ == "__main__": diff --git a/docs/development-container.md b/docs/development-container.md index 665dc01821..c950a8379e 100644 --- a/docs/development-container.md +++ b/docs/development-container.md @@ -59,7 +59,7 @@ This will start the quay container and be fully running. The web UI is available When exec'ing into the development container, it is best to run under the [SCLs](https://www.softwarecollections.org) used during production. This will provide the correct paths to python and other executables. ``` -docker exec --rm -it quay scl enable python27 rh-nginx112 bash +docker exec --rm -it quay bash ``` The following sections are perhaps easiest to run in separate `docker exec` sessions, which is how they will be described. Some or all could be run in the background and managed differently than described here. diff --git a/endpoints/api/__init__.py b/endpoints/api/__init__.py index ae1a9f1120..2ec5b0d31a 100644 --- a/endpoints/api/__init__.py +++ b/endpoints/api/__init__.py @@ -44,7 +44,7 @@ from util.request import get_request_ip from util.timedeltastring import convert_to_timedelta -from __init__models_pre_oci import pre_oci_model as model +from .__init__models_pre_oci import pre_oci_model as model logger = logging.getLogger(__name__) diff --git a/endpoints/api/__init__models_pre_oci.py b/endpoints/api/__init__models_pre_oci.py index 06521978ff..e34f683f85 100644 --- a/endpoints/api/__init__models_pre_oci.py +++ b/endpoints/api/__init__models_pre_oci.py @@ -1,4 +1,4 @@ -from __init__models_interface import InitDataInterface +from .__init__models_interface import InitDataInterface from data import model from data.logs_model import logs_model diff --git a/endpoints/api/build.py b/endpoints/api/build.py index bb0408eb5f..86912f9770 100644 --- a/endpoints/api/build.py +++ b/endpoints/api/build.py @@ -8,7 +8,7 @@ import os from flask import request -from urlparse import urlparse +from urllib.parse import urlparse import features @@ -328,7 +328,7 @@ def post(self, namespace, repository): build_name = ( user_files.get_file_checksum(dockerfile_id) if dockerfile_id - else hashlib.sha224(archive_url).hexdigest()[0:7] + else hashlib.sha224(archive_url.encode("ascii")).hexdigest()[0:7] ) except IOError: raise InvalidRequest("File %s could not be found or is invalid" % dockerfile_id) diff --git a/endpoints/api/discovery.py b/endpoints/api/discovery.py index 6c0a52fa05..73d893300a 100644 --- a/endpoints/api/discovery.py +++ b/endpoints/api/discovery.py @@ -34,7 +34,6 @@ TYPE_CONVERTER = { truthy_bool: "boolean", str: "string", - basestring: "string", reqparse.text_type: "string", int: "integer", } @@ -242,7 +241,7 @@ def swagger_parameter( "404": {"description": "Not found",}, } - for _, body in responses.items(): + for _, body in list(responses.items()): body["schema"] = {"$ref": "#/definitions/ApiError"} if method_name == "DELETE": @@ -275,7 +274,7 @@ def swagger_parameter( path_swagger[method_name.lower()] = operation_swagger tags.sort(key=lambda t: t["name"]) - paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]["x-tag"])) + paths = OrderedDict(sorted(list(paths.items()), key=lambda p: p[1]["x-tag"])) if compact: return {"paths": paths} @@ -304,7 +303,7 @@ def swagger_parameter( % (PREFERRED_URL_SCHEME, SERVER_HOSTNAME), "scopes": { scope.scope: scope.description - for scope in scopes.app_scopes(app.config).values() + for scope in list(scopes.app_scopes(app.config).values()) }, }, }, diff --git a/endpoints/api/globalmessages.py b/endpoints/api/globalmessages.py index bf050e9a92..26f1dbb7df 100644 --- a/endpoints/api/globalmessages.py +++ b/endpoints/api/globalmessages.py @@ -18,7 +18,7 @@ require_scope, show_if, ) -from globalmessages_models_pre_oci import pre_oci_model as model +from .globalmessages_models_pre_oci import pre_oci_model as model @resource("/v1/messages") diff --git a/endpoints/api/globalmessages_models_pre_oci.py b/endpoints/api/globalmessages_models_pre_oci.py index faa5e7a3eb..52529f9911 100644 --- a/endpoints/api/globalmessages_models_pre_oci.py +++ b/endpoints/api/globalmessages_models_pre_oci.py @@ -1,4 +1,4 @@ -from globalmessages_models_interface import GlobalMessageDataInterface, GlobalMessage +from .globalmessages_models_interface import GlobalMessageDataInterface, GlobalMessage from data import model diff --git a/endpoints/api/mirror.py b/endpoints/api/mirror.py index f80f703322..02e718884b 100644 --- a/endpoints/api/mirror.py +++ b/endpoints/api/mirror.py @@ -540,7 +540,7 @@ def _string_to_dt(self, string): dt = datetime.fromtimestamp(ts, pytz.UTC) return dt """ - assert isinstance(string, (str, unicode)) + assert isinstance(string, str) dt = datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ") return dt diff --git a/endpoints/api/organization.py b/endpoints/api/organization.py index c575414abc..a51319cc8c 100644 --- a/endpoints/api/organization.py +++ b/endpoints/api/organization.py @@ -364,7 +364,7 @@ def get(self, orgname): collaborators[username]["repositories"].append(perm.repository.name) - return {"collaborators": collaborators.values()} + return {"collaborators": list(collaborators.values())} @resource("/v1/organization//members") @@ -419,7 +419,7 @@ def get(self, orgname): members_dict[username]["repositories"].append(permission.repository.name) - return {"members": members_dict.values()} + return {"members": list(members_dict.values())} raise Unauthorized() diff --git a/endpoints/api/permission.py b/endpoints/api/permission.py index ed5421adc0..5ab7c15240 100644 --- a/endpoints/api/permission.py +++ b/endpoints/api/permission.py @@ -17,8 +17,8 @@ path_param, ) from endpoints.exception import NotFound -from permission_models_pre_oci import pre_oci_model as model -from permission_models_interface import DeleteException, SaveException +from .permission_models_pre_oci import pre_oci_model as model +from .permission_models_interface import DeleteException, SaveException logger = logging.getLogger(__name__) diff --git a/endpoints/api/permission_models_pre_oci.py b/endpoints/api/permission_models_pre_oci.py index 921cb818fa..0ed72c28f0 100644 --- a/endpoints/api/permission_models_pre_oci.py +++ b/endpoints/api/permission_models_pre_oci.py @@ -1,6 +1,6 @@ from app import avatar from data import model -from permission_models_interface import ( +from .permission_models_interface import ( PermissionDataInterface, UserPermission, TeamPermission, diff --git a/endpoints/api/prototype.py b/endpoints/api/prototype.py index bfd35325c7..30256b0526 100644 --- a/endpoints/api/prototype.py +++ b/endpoints/api/prototype.py @@ -62,7 +62,7 @@ def log_prototype_action(action_kind, orgname, prototype, **kwargs): "role": prototype.role.name, } - for key, value in kwargs.items(): + for key, value in list(kwargs.items()): log_params[key] = value if prototype.delegate_user: diff --git a/endpoints/api/repository_models_interface.py b/endpoints/api/repository_models_interface.py index 784e46bcfd..779fd24c02 100644 --- a/endpoints/api/repository_models_interface.py +++ b/endpoints/api/repository_models_interface.py @@ -163,7 +163,7 @@ def to_dict(self): "name": self.name, "release": self.linked_tag_name, "last_modified": format_date( - datetime.fromtimestamp(self.linked_tag_lifetime_start / 1000) + datetime.fromtimestamp(self.linked_tag_lifetime_start // 1000) ), } @@ -180,7 +180,7 @@ class Release(namedtuple("Channel", ["name", "lifetime_start", "releases_channel def to_dict(self): return { "name": self.name, - "last_modified": format_date(datetime.fromtimestamp(self.lifetime_start / 1000)), + "last_modified": format_date(datetime.fromtimestamp(self.lifetime_start // 1000)), "channels": self.releases_channels_map[self.name], } diff --git a/endpoints/api/repositorynotification.py b/endpoints/api/repositorynotification.py index 2759bd0488..d68c4c591d 100644 --- a/endpoints/api/repositorynotification.py +++ b/endpoints/api/repositorynotification.py @@ -77,7 +77,7 @@ def post(self, namespace_name, repository_name): try: method_handler.validate(namespace_name, repository_name, parsed["config"]) except CannotValidateNotificationMethodException as ex: - raise request_error(message=ex.message) + raise request_error(message=str(ex)) new_notification = model.create_repo_notification( namespace_name, diff --git a/endpoints/api/robot_models_pre_oci.py b/endpoints/api/robot_models_pre_oci.py index 2d4c3ba2f2..54e9a4dea4 100644 --- a/endpoints/api/robot_models_pre_oci.py +++ b/endpoints/api/robot_models_pre_oci.py @@ -100,7 +100,7 @@ def list_entity_robot_permission_teams( robot_dict["description"], ) - return robots.values() + return list(robots.values()) def regenerate_user_robot_token(self, robot_shortname, owning_user): robot, password, metadata = model.user.regenerate_robot_token(robot_shortname, owning_user) diff --git a/endpoints/api/search.py b/endpoints/api/search.py index 966589dba8..e3fbcfd9f2 100644 --- a/endpoints/api/search.py +++ b/endpoints/api/search.py @@ -36,7 +36,7 @@ from util.names import parse_robot_username from util.parsing import truthy_bool -import anunidecode # Don't listen to pylint's lies. This import is required. +from text_unidecode import unidecode import math @@ -97,7 +97,7 @@ def get(self, prefix, parsed_args): # Ensure we don't have any unicode characters in the search, as it breaks the search. Nothing # being searched can have unicode in it anyway, so this is a safe operation. - prefix = prefix.encode("unidecode", "ignore").replace(" ", "").lower() + prefix = unidecode(prefix).replace(" ", "").lower() teams = [] org_data = [] diff --git a/endpoints/api/subscribe.py b/endpoints/api/subscribe.py index cb70ab0979..f24f5f362a 100644 --- a/endpoints/api/subscribe.py +++ b/endpoints/api/subscribe.py @@ -30,7 +30,7 @@ def check_repository_usage(user_or_org, plan_found): def carderror_response(exc): - return {"carderror": exc.message}, 402 + return {"carderror": str(exc)}, 402 def connection_response(exc): diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py index 38d9e3b49f..60f0845fe6 100644 --- a/endpoints/api/superuser.py +++ b/endpoints/api/superuser.py @@ -159,7 +159,9 @@ def user_view(user, password=None): } if password is not None: - user_data["encrypted_password"] = authentication.encrypt_user_password(password) + user_data["encrypted_password"] = authentication.encrypt_user_password(password).decode( + "ascii" + ) return user_data @@ -288,7 +290,9 @@ def post(self): "username": username, "email": email, "password": password, - "encrypted_password": authentication.encrypt_user_password(password), + "encrypted_password": authentication.encrypt_user_password(password).decode( + "ascii" + ), } raise Unauthorized() @@ -438,7 +442,9 @@ def put(self, username): return_value = user.to_dict() if user_data.get("password") is not None: password = user_data.get("password") - return_value["encrypted_password"] = authentication.encrypt_user_password(password) + return_value["encrypted_password"] = authentication.encrypt_user_password( + password + ).decode("ascii") if user_data.get("email") is not None: return_value["email"] = user_data.get("email") @@ -670,8 +676,8 @@ def post(self): "kid": key_id, "name": key_name, "service": body["service"], - "public_key": private_key.publickey().exportKey("PEM"), - "private_key": private_key.exportKey("PEM"), + "public_key": private_key.publickey().exportKey("PEM").decode("ascii"), + "private_key": private_key.exportKey("PEM").decode("ascii"), } ) diff --git a/endpoints/api/tag.py b/endpoints/api/tag.py index f015397f78..cb6da0f307 100644 --- a/endpoints/api/tag.py +++ b/endpoints/api/tag.py @@ -34,10 +34,10 @@ def _tag_dict(tag): "reversion": tag.reversion, } - if tag.lifetime_start_ts > 0: + if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: tag_info["start_ts"] = tag.lifetime_start_ts - if tag.lifetime_end_ts > 0: + if tag.lifetime_end_ts and tag.lifetime_end_ts > 0: tag_info["end_ts"] = tag.lifetime_end_ts # TODO: Remove this once fully on OCI data model. @@ -53,7 +53,7 @@ def _tag_dict(tag): if tag.manifest: tag_info["is_manifest_list"] = tag.manifest.is_manifest_list - if tag.lifetime_start_ts > 0: + if tag.lifetime_start_ts and tag.lifetime_start_ts > 0: last_modified = format_date(datetime.utcfromtimestamp(tag.lifetime_start_ts)) tag_info["last_modified"] = last_modified diff --git a/endpoints/api/test/test_mirror.py b/endpoints/api/test/test_mirror.py index e3755ff64c..7e91a2cddf 100644 --- a/endpoints/api/test/test_mirror.py +++ b/endpoints/api/test/test_mirror.py @@ -21,7 +21,7 @@ def _setup_mirror(): "is_enabled": True, "external_reference": "quay.io/redhat/quay", "sync_interval": 5000, - "sync_start_date": datetime(2020, 01, 02, 6, 30, 0), + "sync_start_date": datetime(2020, 0o1, 0o2, 6, 30, 0), "external_registry_username": "fakeUsername", "external_registry_password": "fakePassword", "external_registry_config": { diff --git a/endpoints/api/test/test_secscan.py b/endpoints/api/test/test_secscan.py index 6aaf4812cd..561a177224 100644 --- a/endpoints/api/test/test_secscan.py +++ b/endpoints/api/test/test_secscan.py @@ -3,6 +3,7 @@ import pytest from data.registry_model import registry_model +from endpoints.test.shared import gen_basic_auth from endpoints.api.test.shared import conduct_api_call from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity @@ -22,7 +23,7 @@ def test_get_security_info_with_pull_secret(endpoint, client): } headers = { - "Authorization": "Basic %s" % base64.b64encode("devtable:password"), + "Authorization": gen_basic_auth("devtable", "password"), } conduct_api_call(client, endpoint, "GET", params, None, headers=headers, expected_code=200) diff --git a/endpoints/api/test/test_security.py b/endpoints/api/test/test_security.py index 98e276243e..e4d941075c 100644 --- a/endpoints/api/test/test_security.py +++ b/endpoints/api/test/test_security.py @@ -246,7 +246,7 @@ StarredRepositoryList, "POST", None, - {u"namespace": "public", u"repository": "publicrepo"}, + {"namespace": "public", "repository": "publicrepo"}, None, 401, ), @@ -254,7 +254,7 @@ StarredRepositoryList, "POST", None, - {u"namespace": "public", u"repository": "publicrepo"}, + {"namespace": "public", "repository": "publicrepo"}, "devtable", 201, ), @@ -262,7 +262,7 @@ StarredRepositoryList, "POST", None, - {u"namespace": "public", u"repository": "publicrepo"}, + {"namespace": "public", "repository": "publicrepo"}, "freshuser", 201, ), @@ -270,7 +270,7 @@ StarredRepositoryList, "POST", None, - {u"namespace": "public", u"repository": "publicrepo"}, + {"namespace": "public", "repository": "publicrepo"}, "reader", 201, ), @@ -298,7 +298,7 @@ ConvertToOrganization, "POST", None, - {u"adminPassword": "IQTM", u"plan": "1RB4", u"adminUser": "44E8"}, + {"adminPassword": "IQTM", "plan": "1RB4", "adminUser": "44E8"}, None, 401, ), @@ -306,7 +306,7 @@ ConvertToOrganization, "POST", None, - {u"adminPassword": "IQTM", u"plan": "1RB4", u"adminUser": "44E8"}, + {"adminPassword": "IQTM", "plan": "1RB4", "adminUser": "44E8"}, "devtable", 400, ), @@ -314,7 +314,7 @@ ConvertToOrganization, "POST", None, - {u"adminPassword": "IQTM", u"plan": "1RB4", u"adminUser": "44E8"}, + {"adminPassword": "IQTM", "plan": "1RB4", "adminUser": "44E8"}, "freshuser", 400, ), @@ -322,7 +322,7 @@ ConvertToOrganization, "POST", None, - {u"adminPassword": "IQTM", u"plan": "1RB4", u"adminUser": "44E8"}, + {"adminPassword": "IQTM", "plan": "1RB4", "adminUser": "44E8"}, "reader", 400, ), @@ -334,20 +334,20 @@ (UserCard, "GET", None, None, "devtable", 200), (UserCard, "GET", None, None, "freshuser", 200), (UserCard, "GET", None, None, "reader", 200), - (UserCard, "POST", None, {u"token": "ORH4"}, None, 401), + (UserCard, "POST", None, {"token": "ORH4"}, None, 401), (UserPlan, "GET", None, None, None, 401), (UserPlan, "GET", None, None, "devtable", 200), (UserPlan, "GET", None, None, "freshuser", 200), (UserPlan, "GET", None, None, "reader", 200), - (UserPlan, "PUT", None, {u"plan": "1QIK"}, None, 401), + (UserPlan, "PUT", None, {"plan": "1QIK"}, None, 401), (UserLogs, "GET", None, None, None, 401), (UserLogs, "GET", None, None, "devtable", 200), (UserLogs, "GET", None, None, "freshuser", 200), (UserLogs, "GET", None, None, "reader", 200), - (OrganizationList, "POST", None, {u"name": "KSIS", u"email": "DHVZ"}, None, 401), - (OrganizationList, "POST", None, {u"name": "KSIS", u"email": "DHVZ"}, "devtable", 400), - (OrganizationList, "POST", None, {u"name": "KSIS", u"email": "DHVZ"}, "freshuser", 400), - (OrganizationList, "POST", None, {u"name": "KSIS", u"email": "DHVZ"}, "reader", 400), + (OrganizationList, "POST", None, {"name": "KSIS", "email": "DHVZ"}, None, 401), + (OrganizationList, "POST", None, {"name": "KSIS", "email": "DHVZ"}, "devtable", 400), + (OrganizationList, "POST", None, {"name": "KSIS", "email": "DHVZ"}, "freshuser", 400), + (OrganizationList, "POST", None, {"name": "KSIS", "email": "DHVZ"}, "reader", 400), (Repository, "GET", {"repository": "public/publicrepo"}, None, None, 200), (Repository, "GET", {"repository": "public/publicrepo"}, None, "devtable", 200), (Repository, "GET", {"repository": "public/publicrepo"}, None, "freshuser", 200), @@ -360,7 +360,7 @@ RepositoryList, "POST", None, - {u"repository": "xzgb", u"visibility": u"public", u"description": "0O8U"}, + {"repository": "xzgb", "visibility": "public", "description": "0O8U"}, None, 400, ), @@ -368,7 +368,7 @@ RepositoryList, "POST", None, - {u"repository": "xzgb", u"visibility": u"public", u"description": "0O8U"}, + {"repository": "xzgb", "visibility": "public", "description": "0O8U"}, "devtable", 201, ), @@ -376,7 +376,7 @@ RepositoryList, "POST", None, - {u"repository": "xzgb", u"visibility": u"public", u"description": "0O8U"}, + {"repository": "xzgb", "visibility": "public", "description": "0O8U"}, "freshuser", 201, ), @@ -384,7 +384,7 @@ RepositoryList, "POST", None, - {u"repository": "xzgb", u"visibility": u"public", u"description": "0O8U"}, + {"repository": "xzgb", "visibility": "public", "description": "0O8U"}, "reader", 201, ), @@ -392,22 +392,22 @@ (DiscoveryResource, "GET", None, None, "devtable", 200), (DiscoveryResource, "GET", None, None, "freshuser", 200), (DiscoveryResource, "GET", None, None, "reader", 200), - (FileDropResource, "POST", None, {u"mimeType": "TKBX"}, None, 200), - (FileDropResource, "POST", None, {u"mimeType": "TKBX"}, "devtable", 200), - (FileDropResource, "POST", None, {u"mimeType": "TKBX"}, "freshuser", 200), - (FileDropResource, "POST", None, {u"mimeType": "TKBX"}, "reader", 200), - (Recovery, "POST", None, {u"email": "826S"}, None, 200), - (Recovery, "POST", None, {u"email": "826S"}, "devtable", 200), - (Recovery, "POST", None, {u"email": "826S"}, "freshuser", 200), - (Recovery, "POST", None, {u"email": "826S"}, "reader", 200), + (FileDropResource, "POST", None, {"mimeType": "TKBX"}, None, 200), + (FileDropResource, "POST", None, {"mimeType": "TKBX"}, "devtable", 200), + (FileDropResource, "POST", None, {"mimeType": "TKBX"}, "freshuser", 200), + (FileDropResource, "POST", None, {"mimeType": "TKBX"}, "reader", 200), + (Recovery, "POST", None, {"email": "826S"}, None, 200), + (Recovery, "POST", None, {"email": "826S"}, "devtable", 200), + (Recovery, "POST", None, {"email": "826S"}, "freshuser", 200), + (Recovery, "POST", None, {"email": "826S"}, "reader", 200), (Signout, "POST", None, None, None, 200), (Signout, "POST", None, None, "devtable", 200), (Signout, "POST", None, None, "freshuser", 200), (Signout, "POST", None, None, "reader", 200), - (Signin, "POST", None, {u"username": "E9RY", u"password": "LQ0N"}, None, 403), - (Signin, "POST", None, {u"username": "E9RY", u"password": "LQ0N"}, "devtable", 403), - (Signin, "POST", None, {u"username": "E9RY", u"password": "LQ0N"}, "freshuser", 403), - (Signin, "POST", None, {u"username": "E9RY", u"password": "LQ0N"}, "reader", 403), + (Signin, "POST", None, {"username": "E9RY", "password": "LQ0N"}, None, 403), + (Signin, "POST", None, {"username": "E9RY", "password": "LQ0N"}, "devtable", 403), + (Signin, "POST", None, {"username": "E9RY", "password": "LQ0N"}, "freshuser", 403), + (Signin, "POST", None, {"username": "E9RY", "password": "LQ0N"}, "reader", 403), (ExternalLoginInformation, "POST", {"service_id": "someservice"}, {}, None, 400), (ExternalLoginInformation, "POST", {"service_id": "someservice"}, {}, "devtable", 400), (ExternalLoginInformation, "POST", {"service_id": "someservice"}, {}, "freshuser", 400), @@ -416,14 +416,14 @@ (DetachExternal, "POST", {"service_id": "someservice"}, {}, "devtable", 200), (DetachExternal, "POST", {"service_id": "someservice"}, {}, "freshuser", 200), (DetachExternal, "POST", {"service_id": "someservice"}, {}, "reader", 200), - (VerifyUser, "POST", None, {u"password": "LQ0N"}, None, 401), - (VerifyUser, "POST", None, {u"password": "password"}, "devtable", 200), - (VerifyUser, "POST", None, {u"password": "LQ0N"}, "freshuser", 403), - (VerifyUser, "POST", None, {u"password": "LQ0N"}, "reader", 403), - (ClientKey, "POST", None, {u"password": "LQ0N"}, None, 401), - (ClientKey, "POST", None, {u"password": "password"}, "devtable", 200), - (ClientKey, "POST", None, {u"password": "LQ0N"}, "freshuser", 400), - (ClientKey, "POST", None, {u"password": "password"}, "reader", 200), + (VerifyUser, "POST", None, {"password": "LQ0N"}, None, 401), + (VerifyUser, "POST", None, {"password": "password"}, "devtable", 200), + (VerifyUser, "POST", None, {"password": "LQ0N"}, "freshuser", 403), + (VerifyUser, "POST", None, {"password": "LQ0N"}, "reader", 403), + (ClientKey, "POST", None, {"password": "LQ0N"}, None, 401), + (ClientKey, "POST", None, {"password": "password"}, "devtable", 200), + (ClientKey, "POST", None, {"password": "LQ0N"}, "freshuser", 400), + (ClientKey, "POST", None, {"password": "password"}, "reader", 200), (ListPlans, "GET", None, None, None, 200), (ListPlans, "GET", None, None, "devtable", 200), (ListPlans, "GET", None, None, "freshuser", 200), @@ -432,12 +432,12 @@ (User, "GET", None, None, "devtable", 200), (User, "GET", None, None, "freshuser", 200), (User, "GET", None, None, "reader", 200), - (User, "POST", None, {u"username": "T946", u"password": "0SG4", u"email": "MENT"}, None, 400), + (User, "POST", None, {"username": "T946", "password": "0SG4", "email": "MENT"}, None, 400), ( User, "POST", None, - {u"username": "T946", u"password": "0SG4", u"email": "MENT"}, + {"username": "T946", "password": "0SG4", "email": "MENT"}, "devtable", 400, ), @@ -445,18 +445,11 @@ User, "POST", None, - {u"username": "T946", u"password": "0SG4", u"email": "MENT"}, + {"username": "T946", "password": "0SG4", "email": "MENT"}, "freshuser", 400, ), - ( - User, - "POST", - None, - {u"username": "T946", u"password": "0SG4", u"email": "MENT"}, - "reader", - 400, - ), + (User, "POST", None, {"username": "T946", "password": "0SG4", "email": "MENT"}, "reader", 400,), (User, "PUT", None, {}, None, 401), (User, "PUT", None, {}, "devtable", 200), (User, "PUT", None, {}, "freshuser", 200), @@ -694,7 +687,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -702,7 +695,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 403, ), @@ -710,7 +703,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -718,7 +711,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -790,7 +783,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -798,7 +791,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 400, ), @@ -806,7 +799,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -814,7 +807,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -886,7 +879,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -894,7 +887,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 400, ), @@ -902,7 +895,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -910,7 +903,7 @@ RepositoryUserPermission, "PUT", {"username": "A2O9", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -982,7 +975,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -990,7 +983,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 403, ), @@ -998,7 +991,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1006,7 +999,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -1078,7 +1071,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -1086,7 +1079,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 400, ), @@ -1094,7 +1087,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1102,7 +1095,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -1174,7 +1167,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -1182,7 +1175,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 200, ), @@ -1190,7 +1183,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1198,7 +1191,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "readers"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -1270,7 +1263,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -1278,7 +1271,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 403, ), @@ -1286,7 +1279,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1294,7 +1287,7 @@ RepositoryTeamPermission, "PUT", {"repository": "public/publicrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -1366,7 +1359,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -1374,7 +1367,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 400, ), @@ -1382,7 +1375,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1390,7 +1383,7 @@ RepositoryTeamPermission, "PUT", {"repository": "devtable/shared", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -1462,7 +1455,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -1470,7 +1463,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 200, ), @@ -1478,7 +1471,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -1486,7 +1479,7 @@ RepositoryTeamPermission, "PUT", {"repository": "buynlarge/orgrepo", "teamname": "owners"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -2760,7 +2753,7 @@ PermissionPrototype, "PUT", {"orgname": "buynlarge", "prototypeid": "L24B"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -2768,7 +2761,7 @@ PermissionPrototype, "PUT", {"orgname": "buynlarge", "prototypeid": "L24B"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 404, ), @@ -2776,7 +2769,7 @@ PermissionPrototype, "PUT", {"orgname": "buynlarge", "prototypeid": "L24B"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -2784,7 +2777,7 @@ PermissionPrototype, "PUT", {"orgname": "buynlarge", "prototypeid": "L24B"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -2907,7 +2900,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "readers"}, - {u"role": u"member"}, + {"role": "member"}, None, 401, ), @@ -2915,7 +2908,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "readers"}, - {u"role": u"member"}, + {"role": "member"}, "devtable", 200, ), @@ -2923,7 +2916,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "readers"}, - {u"role": u"member"}, + {"role": "member"}, "freshuser", 403, ), @@ -2931,7 +2924,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "readers"}, - {u"role": u"member"}, + {"role": "member"}, "reader", 403, ), @@ -2964,7 +2957,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "owners"}, - {u"role": u"member"}, + {"role": "member"}, None, 401, ), @@ -2972,7 +2965,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "owners"}, - {u"role": u"member"}, + {"role": "member"}, "devtable", 400, ), @@ -2980,7 +2973,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "owners"}, - {u"role": u"member"}, + {"role": "member"}, "freshuser", 403, ), @@ -2988,7 +2981,7 @@ OrganizationTeam, "PUT", {"orgname": "buynlarge", "teamname": "owners"}, - {u"role": u"member"}, + {"role": "member"}, "reader", 403, ), @@ -3531,7 +3524,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -3539,7 +3532,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 403, ), @@ -3547,7 +3540,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -3555,7 +3548,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "public/publicrepo"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -3613,7 +3606,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -3621,7 +3614,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 410, ), @@ -3629,7 +3622,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -3637,7 +3630,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "devtable/shared"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -3702,7 +3695,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, None, 401, ), @@ -3710,7 +3703,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "devtable", 410, ), @@ -3718,7 +3711,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "freshuser", 403, ), @@ -3726,7 +3719,7 @@ RepositoryToken, "PUT", {"code": "UJQB", "repository": "buynlarge/orgrepo"}, - {u"role": u"read"}, + {"role": "read"}, "reader", 403, ), @@ -3830,7 +3823,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -3838,7 +3831,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 403, ), @@ -3846,7 +3839,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -3854,7 +3847,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -3862,7 +3855,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -3870,7 +3863,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 404, ), @@ -3878,7 +3871,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -3886,7 +3879,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -3894,7 +3887,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -3902,7 +3895,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 404, ), @@ -3910,7 +3903,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -3918,7 +3911,7 @@ RestoreTag, "POST", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -3951,7 +3944,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -3959,7 +3952,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 403, ), @@ -3967,7 +3960,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -3975,7 +3968,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "public/publicrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -4008,7 +4001,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -4016,7 +4009,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 404, ), @@ -4024,7 +4017,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -4032,7 +4025,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "devtable/shared"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -4065,7 +4058,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, None, 401, ), @@ -4073,7 +4066,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "devtable", 404, ), @@ -4081,7 +4074,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "freshuser", 403, ), @@ -4089,7 +4082,7 @@ RepositoryTag, "PUT", {"tag": "HP8R", "repository": "buynlarge/orgrepo"}, - {u"image": "WXNG"}, + {"image": "WXNG"}, "reader", 403, ), @@ -4101,7 +4094,7 @@ PermissionPrototypeList, "POST", {"orgname": "buynlarge"}, - {u"role": u"read", u"delegate": {u"kind": u"user", u"name": "7DGP"}}, + {"role": "read", "delegate": {"kind": "user", "name": "7DGP"}}, None, 401, ), @@ -4109,7 +4102,7 @@ PermissionPrototypeList, "POST", {"orgname": "buynlarge"}, - {u"role": u"read", u"delegate": {u"kind": u"user", u"name": "7DGP"}}, + {"role": "read", "delegate": {"kind": "user", "name": "7DGP"}}, "devtable", 400, ), @@ -4117,7 +4110,7 @@ PermissionPrototypeList, "POST", {"orgname": "buynlarge"}, - {u"role": u"read", u"delegate": {u"kind": u"user", u"name": "7DGP"}}, + {"role": "read", "delegate": {"kind": "user", "name": "7DGP"}}, "freshuser", 403, ), @@ -4125,7 +4118,7 @@ PermissionPrototypeList, "POST", {"orgname": "buynlarge"}, - {u"role": u"read", u"delegate": {u"kind": u"user", u"name": "7DGP"}}, + {"role": "read", "delegate": {"kind": "user", "name": "7DGP"}}, "reader", 403, ), @@ -4149,16 +4142,16 @@ (OrganizationCard, "GET", {"orgname": "buynlarge"}, None, "devtable", 200), (OrganizationCard, "GET", {"orgname": "buynlarge"}, None, "freshuser", 403), (OrganizationCard, "GET", {"orgname": "buynlarge"}, None, "reader", 403), - (OrganizationCard, "POST", {"orgname": "buynlarge"}, {u"token": "4VFR"}, None, 401), - (OrganizationCard, "POST", {"orgname": "buynlarge"}, {u"token": "4VFR"}, "freshuser", 403), - (OrganizationCard, "POST", {"orgname": "buynlarge"}, {u"token": "4VFR"}, "reader", 403), + (OrganizationCard, "POST", {"orgname": "buynlarge"}, {"token": "4VFR"}, None, 401), + (OrganizationCard, "POST", {"orgname": "buynlarge"}, {"token": "4VFR"}, "freshuser", 403), + (OrganizationCard, "POST", {"orgname": "buynlarge"}, {"token": "4VFR"}, "reader", 403), (OrganizationPlan, "GET", {"orgname": "buynlarge"}, None, None, 401), (OrganizationPlan, "GET", {"orgname": "buynlarge"}, None, "devtable", 200), (OrganizationPlan, "GET", {"orgname": "buynlarge"}, None, "freshuser", 403), (OrganizationPlan, "GET", {"orgname": "buynlarge"}, None, "reader", 403), - (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {u"plan": "WWEI"}, None, 401), - (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {u"plan": "WWEI"}, "freshuser", 403), - (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {u"plan": "WWEI"}, "reader", 403), + (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {"plan": "WWEI"}, None, 401), + (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {"plan": "WWEI"}, "freshuser", 403), + (OrganizationPlan, "PUT", {"orgname": "buynlarge"}, {"plan": "WWEI"}, "reader", 403), (OrgLogs, "GET", {"orgname": "buynlarge"}, None, None, 401), (OrgLogs, "GET", {"orgname": "buynlarge"}, None, "devtable", 200), (OrgLogs, "GET", {"orgname": "buynlarge"}, None, "freshuser", 403), @@ -4167,7 +4160,7 @@ RepositoryVisibility, "POST", {"repository": "public/publicrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, None, 401, ), @@ -4175,7 +4168,7 @@ RepositoryVisibility, "POST", {"repository": "public/publicrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "devtable", 403, ), @@ -4183,7 +4176,7 @@ RepositoryVisibility, "POST", {"repository": "public/publicrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "freshuser", 403, ), @@ -4191,7 +4184,7 @@ RepositoryVisibility, "POST", {"repository": "public/publicrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "reader", 403, ), @@ -4199,7 +4192,7 @@ RepositoryVisibility, "POST", {"repository": "devtable/shared"}, - {u"visibility": u"public"}, + {"visibility": "public"}, None, 401, ), @@ -4207,7 +4200,7 @@ RepositoryVisibility, "POST", {"repository": "devtable/shared"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "devtable", 200, ), @@ -4215,7 +4208,7 @@ RepositoryVisibility, "POST", {"repository": "devtable/shared"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "freshuser", 403, ), @@ -4223,7 +4216,7 @@ RepositoryVisibility, "POST", {"repository": "devtable/shared"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "reader", 403, ), @@ -4231,7 +4224,7 @@ RepositoryVisibility, "POST", {"repository": "buynlarge/orgrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, None, 401, ), @@ -4239,7 +4232,7 @@ RepositoryVisibility, "POST", {"repository": "buynlarge/orgrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "devtable", 200, ), @@ -4247,7 +4240,7 @@ RepositoryVisibility, "POST", {"repository": "buynlarge/orgrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "freshuser", 403, ), @@ -4255,7 +4248,7 @@ RepositoryVisibility, "POST", {"repository": "buynlarge/orgrepo"}, - {u"visibility": u"public"}, + {"visibility": "public"}, "reader", 403, ), @@ -4523,7 +4516,7 @@ RepositoryTokenList, "POST", {"repository": "public/publicrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, None, 401, ), @@ -4531,7 +4524,7 @@ RepositoryTokenList, "POST", {"repository": "public/publicrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "devtable", 403, ), @@ -4539,7 +4532,7 @@ RepositoryTokenList, "POST", {"repository": "public/publicrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "freshuser", 403, ), @@ -4547,7 +4540,7 @@ RepositoryTokenList, "POST", {"repository": "public/publicrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "reader", 403, ), @@ -4559,7 +4552,7 @@ RepositoryTokenList, "POST", {"repository": "devtable/shared"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, None, 401, ), @@ -4567,7 +4560,7 @@ RepositoryTokenList, "POST", {"repository": "devtable/shared"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "devtable", 410, ), @@ -4575,7 +4568,7 @@ RepositoryTokenList, "POST", {"repository": "devtable/shared"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "freshuser", 403, ), @@ -4583,7 +4576,7 @@ RepositoryTokenList, "POST", {"repository": "devtable/shared"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "reader", 403, ), @@ -4595,7 +4588,7 @@ RepositoryTokenList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, None, 401, ), @@ -4603,7 +4596,7 @@ RepositoryTokenList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "devtable", 410, ), @@ -4611,7 +4604,7 @@ RepositoryTokenList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "freshuser", 403, ), @@ -4619,7 +4612,7 @@ RepositoryTokenList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"friendlyName": "R1CN"}, + {"friendlyName": "R1CN"}, "reader", 403, ), @@ -4631,7 +4624,7 @@ RepositoryBuildList, "POST", {"repository": "public/publicrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, None, 401, ), @@ -4639,7 +4632,7 @@ RepositoryBuildList, "POST", {"repository": "public/publicrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "devtable", 403, ), @@ -4647,7 +4640,7 @@ RepositoryBuildList, "POST", {"repository": "public/publicrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "freshuser", 403, ), @@ -4655,7 +4648,7 @@ RepositoryBuildList, "POST", {"repository": "public/publicrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "reader", 403, ), @@ -4667,7 +4660,7 @@ RepositoryBuildList, "POST", {"repository": "devtable/shared"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, None, 401, ), @@ -4675,7 +4668,7 @@ RepositoryBuildList, "POST", {"repository": "devtable/shared"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "devtable", 201, ), @@ -4683,7 +4676,7 @@ RepositoryBuildList, "POST", {"repository": "devtable/shared"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "freshuser", 403, ), @@ -4691,7 +4684,7 @@ RepositoryBuildList, "POST", {"repository": "devtable/shared"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "reader", 403, ), @@ -4703,7 +4696,7 @@ RepositoryBuildList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, None, 401, ), @@ -4711,7 +4704,7 @@ RepositoryBuildList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "devtable", 201, ), @@ -4719,7 +4712,7 @@ RepositoryBuildList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "freshuser", 403, ), @@ -4727,7 +4720,7 @@ RepositoryBuildList, "POST", {"repository": "buynlarge/orgrepo"}, - {u"file_id": "UX7K"}, + {"file_id": "UX7K"}, "reader", 403, ), @@ -4859,12 +4852,12 @@ (Repository, "GET", {"repository": "public/publicrepo"}, None, "devtable", 200), (Repository, "GET", {"repository": "public/publicrepo"}, None, "freshuser", 200), (Repository, "GET", {"repository": "public/publicrepo"}, None, "reader", 200), - (Repository, "PUT", {"repository": "public/publicrepo"}, {u"description": "WXNG"}, None, 401), + (Repository, "PUT", {"repository": "public/publicrepo"}, {"description": "WXNG"}, None, 401), ( Repository, "PUT", {"repository": "public/publicrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "devtable", 403, ), @@ -4872,7 +4865,7 @@ Repository, "PUT", {"repository": "public/publicrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "freshuser", 403, ), @@ -4880,7 +4873,7 @@ Repository, "PUT", {"repository": "public/publicrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "reader", 403, ), @@ -4892,12 +4885,12 @@ (Repository, "GET", {"repository": "devtable/shared"}, None, "devtable", 200), (Repository, "GET", {"repository": "devtable/shared"}, None, "freshuser", 403), (Repository, "GET", {"repository": "devtable/shared"}, None, "reader", 200), - (Repository, "PUT", {"repository": "devtable/shared"}, {u"description": "WXNG"}, None, 401), + (Repository, "PUT", {"repository": "devtable/shared"}, {"description": "WXNG"}, None, 401), ( Repository, "PUT", {"repository": "devtable/shared"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "devtable", 200, ), @@ -4905,11 +4898,11 @@ Repository, "PUT", {"repository": "devtable/shared"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "freshuser", 403, ), - (Repository, "PUT", {"repository": "devtable/shared"}, {u"description": "WXNG"}, "reader", 403), + (Repository, "PUT", {"repository": "devtable/shared"}, {"description": "WXNG"}, "reader", 403), (Repository, "DELETE", {"repository": "buynlarge/orgrepo"}, None, None, 401), (Repository, "DELETE", {"repository": "buynlarge/orgrepo"}, None, "devtable", 204), (Repository, "DELETE", {"repository": "buynlarge/orgrepo"}, None, "freshuser", 403), @@ -4918,12 +4911,12 @@ (Repository, "GET", {"repository": "buynlarge/orgrepo"}, None, "devtable", 200), (Repository, "GET", {"repository": "buynlarge/orgrepo"}, None, "freshuser", 403), (Repository, "GET", {"repository": "buynlarge/orgrepo"}, None, "reader", 200), - (Repository, "PUT", {"repository": "buynlarge/orgrepo"}, {u"description": "WXNG"}, None, 401), + (Repository, "PUT", {"repository": "buynlarge/orgrepo"}, {"description": "WXNG"}, None, 401), ( Repository, "PUT", {"repository": "buynlarge/orgrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "devtable", 200, ), @@ -4931,7 +4924,7 @@ Repository, "PUT", {"repository": "buynlarge/orgrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "freshuser", 403, ), @@ -4939,7 +4932,7 @@ Repository, "PUT", {"repository": "buynlarge/orgrepo"}, - {u"description": "WXNG"}, + {"description": "WXNG"}, "reader", 403, ), @@ -4955,17 +4948,17 @@ (OrganizationApplications, "GET", {"orgname": "buynlarge"}, None, "devtable", 200), (OrganizationApplications, "GET", {"orgname": "buynlarge"}, None, "freshuser", 403), (OrganizationApplications, "GET", {"orgname": "buynlarge"}, None, "reader", 403), - (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {u"name": "foo"}, None, 401), - (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {u"name": "foo"}, "devtable", 200), + (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {"name": "foo"}, None, 401), + (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {"name": "foo"}, "devtable", 200), ( OrganizationApplications, "POST", {"orgname": "buynlarge"}, - {u"name": "foo"}, + {"name": "foo"}, "freshuser", 403, ), - (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {u"name": "foo"}, "reader", 403), + (OrganizationApplications, "POST", {"orgname": "buynlarge"}, {"name": "foo"}, "reader", 403), ( OrganizationApplicationResource, "DELETE", @@ -5034,7 +5027,7 @@ OrganizationApplicationResource, "PUT", {"orgname": "buynlarge", "client_id": "deadbeef"}, - {u"redirect_uri": "foo", u"name": "foo", u"application_uri": "foo"}, + {"redirect_uri": "foo", "name": "foo", "application_uri": "foo"}, None, 401, ), @@ -5042,7 +5035,7 @@ OrganizationApplicationResource, "PUT", {"orgname": "buynlarge", "client_id": "deadbeef"}, - {u"redirect_uri": "foo", u"name": "foo", u"application_uri": "foo"}, + {"redirect_uri": "foo", "name": "foo", "application_uri": "foo"}, "devtable", 200, ), @@ -5050,7 +5043,7 @@ OrganizationApplicationResource, "PUT", {"orgname": "buynlarge", "client_id": "deadbeef"}, - {u"redirect_uri": "foo", u"name": "foo", u"application_uri": "foo"}, + {"redirect_uri": "foo", "name": "foo", "application_uri": "foo"}, "freshuser", 403, ), @@ -5058,7 +5051,7 @@ OrganizationApplicationResource, "PUT", {"orgname": "buynlarge", "client_id": "deadbeef"}, - {u"redirect_uri": "foo", u"name": "foo", u"application_uri": "foo"}, + {"redirect_uri": "foo", "name": "foo", "application_uri": "foo"}, "reader", 403, ), diff --git a/endpoints/api/trigger.py b/endpoints/api/trigger.py index 927847dd28..e839ea9fe8 100644 --- a/endpoints/api/trigger.py +++ b/endpoints/api/trigger.py @@ -3,7 +3,7 @@ """ import logging -from urlparse import urlunparse +from urllib.parse import urlunparse from flask import request, url_for @@ -223,7 +223,7 @@ def post(self, namespace_name, repo_name, trigger_uuid): except TriggerException as exc: return { "status": "error", - "message": exc.message, + "message": str(exc), } else: raise Unauthorized() @@ -313,7 +313,7 @@ def post(self, namespace_name, repo_name, trigger_uuid): except TriggerException as exc: write_token.delete_instance() - raise request_error(message=exc.message) + raise request_error(message=str(exc)) # Save the updated config. update_build_trigger(trigger, final_config, write_token=write_token) @@ -389,7 +389,7 @@ def post(self, namespace_name, repo_name, trigger_uuid): except TriggerException as rre: return { "status": "error", - "message": "Could not analyze the repository: %s" % rre.message, + "message": "Could not analyze the repository: %s" % rre, } except NotImplementedError: return { @@ -452,7 +452,7 @@ def post(self, namespace_name, repo_name, trigger_uuid): prepared = handler.manual_start(run_parameters=run_parameters) build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name) except TriggerException as tse: - raise InvalidRequest(tse.message) + raise InvalidRequest(str(tse)) from tse except MaximumBuildsQueuedException: abort(429, message="Maximum queued build rate exceeded.") except BuildTriggerDisabledException: @@ -567,7 +567,7 @@ def post(self, namespace_name, repo_name, trigger_uuid): try: return {"sources": handler.list_build_sources_for_namespace(namespace)} except TriggerException as rre: - raise InvalidRequest(rre.message) + raise InvalidRequest(str(rre)) from rre else: raise Unauthorized() @@ -597,6 +597,6 @@ def get(self, namespace_name, repo_name, trigger_uuid): try: return {"namespaces": handler.list_build_source_namespaces()} except TriggerException as rre: - raise InvalidRequest(rre.message) + raise InvalidRequest(str(rre)) from rre else: raise Unauthorized() diff --git a/endpoints/api/user.py b/endpoints/api/user.py index 6ea05964cd..0649211092 100644 --- a/endpoints/api/user.py +++ b/endpoints/api/user.py @@ -186,7 +186,7 @@ def login_view(login): user_response.update( { "organizations": [ - org_view(o, user_admin=user_admin.can()) for o in organizations.values() + org_view(o, user_admin=user_admin.can()) for o in list(organizations.values()) ], } ) @@ -420,7 +420,7 @@ def put(self): elif confirm_username: model.user.remove_user_prompt(user, "confirm_username") - except model.user.InvalidPasswordException, ex: + except model.user.InvalidPasswordException as ex: raise request_error(exception=ex) return user_view(user, previous_username=previous_username), 200, headers @@ -575,7 +575,7 @@ def post(self): if not result: raise request_error(message=error_message) - return {"key": authentication.encrypt_user_password(password)} + return {"key": authentication.encrypt_user_password(password).decode("ascii")} def conduct_signin(username_or_email, password, invite_code=None): @@ -824,7 +824,7 @@ def post(self, service_id): return {"auth_url": auth_url} except DiscoveryFailureException as dfe: logger.exception("Could not discovery OAuth endpoint information") - raise DownstreamIssue(dfe.message) + raise DownstreamIssue(str(dfe)) @resource("/v1/detachexternal/") @@ -883,7 +883,7 @@ def redact(value): if i < threshold or i >= len(value) - threshold: v = v + value[i] else: - v = v + u"\u2022" + v = v + "\u2022" return v diff --git a/endpoints/appr/decorators.py b/endpoints/appr/decorators.py index 13c543fac5..c9fd1c73bd 100644 --- a/endpoints/appr/decorators.py +++ b/endpoints/appr/decorators.py @@ -10,7 +10,7 @@ def _raise_unauthorized(repository, scopes): - raise StandardError("Unauthorized acces to %s", repository) + raise Exception("Unauthorized acces to %s", repository) def _get_reponame_kwargs(*args, **kwargs): diff --git a/endpoints/appr/models_cnr.py b/endpoints/appr/models_cnr.py index b73df233b5..00b0e7233f 100644 --- a/endpoints/appr/models_cnr.py +++ b/endpoints/appr/models_cnr.py @@ -70,7 +70,7 @@ def _join_package_name(ns, name): def _timestamp_to_iso(timestamp, in_ms=True): if in_ms: - timestamp = timestamp / 1000 + timestamp = timestamp // 1000 return datetime.fromtimestamp(timestamp).isoformat() @@ -116,7 +116,7 @@ def log_action( repo, analytics_name=analytics_name, analytics_sample=analytics_sample, - **metadata + **metadata, ) def list_applications( diff --git a/endpoints/appr/models_interface.py b/endpoints/appr/models_interface.py index 808ff9f570..d133a2dd57 100644 --- a/endpoints/appr/models_interface.py +++ b/endpoints/appr/models_interface.py @@ -229,7 +229,7 @@ def log_action( repo_name=None, analytics_name=None, analytics_sample=1, - **kwargs + **kwargs, ): """ Logs an action to the audit log. diff --git a/endpoints/appr/registry.py b/endpoints/appr/registry.py index 378605bc7f..3e10dc5d89 100644 --- a/endpoints/appr/registry.py +++ b/endpoints/appr/registry.py @@ -72,7 +72,8 @@ def login(): if not result.auth_valid: raise UnauthorizedAccess(result.error_message) - return jsonify({"token": "basic " + b64encode("%s:%s" % (username, password))}) + auth = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii"))) + return jsonify({"token": "basic " + auth.decode("ascii")}) # @TODO: Redirect to S3 url diff --git a/endpoints/appr/test/test_api_security.py b/endpoints/appr/test/test_api_security.py index 3681f5312d..153e0fdb50 100644 --- a/endpoints/appr/test/test_api_security.py +++ b/endpoints/appr/test/test_api_security.py @@ -175,7 +175,8 @@ def test_api_security( url = url_for(resource, **params) headers = {} if identity is not None: - headers["authorization"] = "basic " + base64.b64encode("%s:password" % identity) + auth = base64.b64encode(("%s:password" % identity).encode("ascii")) + headers["authorization"] = "basic " + auth.decode("ascii") rv = cl.open(url, headers=headers, method=method) assert rv.status_code == expected diff --git a/endpoints/appr/test/test_registry.py b/endpoints/appr/test/test_registry.py index aab80982f7..d96cbbda29 100644 --- a/endpoints/appr/test/test_registry.py +++ b/endpoints/appr/test/test_registry.py @@ -45,7 +45,7 @@ def test_invalid_release_name(release_name, app, client): } url = url_for("appr.push", **params) - auth = base64.b64encode("devtable:password") + auth = base64.b64encode(b"devtable:password").decode("ascii") headers = {"Content-Type": "application/json", "Authorization": "Basic " + auth} data = { "release": release_name, @@ -65,7 +65,7 @@ def test_readonly(readonly, expected_status, app, client): } url = url_for("appr.push", **params) - auth = base64.b64encode("devtable:password") + auth = base64.b64encode(b"devtable:password").decode("ascii") headers = {"Content-Type": "application/json", "Authorization": "Basic " + auth} data = { "release": "1.0", diff --git a/endpoints/common.py b/endpoints/common.py index 338faf5147..dd6c5cd862 100644 --- a/endpoints/common.py +++ b/endpoints/common.py @@ -124,7 +124,9 @@ def get_oauth_config(): if not features.BILLING: version_number = "Quay %s" % __version__ - scopes_set = {scope.scope: scope._asdict() for scope in scopes.app_scopes(app.config).values()} + scopes_set = { + scope.scope: scope._asdict() for scope in list(scopes.app_scopes(app.config).values()) + } contents = render_template( name, @@ -156,7 +158,7 @@ def get_oauth_config(): version_number=version_number, current_year=datetime.datetime.now().year, kubernetes_namespace=IS_KUBERNETES and QE_NAMESPACE, - **kwargs + **kwargs, ) resp = make_response(contents) diff --git a/endpoints/csrf.py b/endpoints/csrf.py index b96d312ac9..9087a89a23 100644 --- a/endpoints/csrf.py +++ b/endpoints/csrf.py @@ -30,7 +30,7 @@ def generate_csrf_token(session_token_name=_QUAY_CSRF_TOKEN_NAME, force=False): Returns the generated token. """ if session_token_name not in session or force: - session[session_token_name] = base64.b64encode(os.urandom(48)) + session[session_token_name] = base64.b64encode(os.urandom(48)).decode("ascii") return session[session_token_name] diff --git a/endpoints/test/shared.py b/endpoints/test/shared.py index 8431bc49e5..9159333356 100644 --- a/endpoints/test/shared.py +++ b/endpoints/test/shared.py @@ -59,7 +59,11 @@ def gen_basic_auth(username, password): """ Generates a basic auth header. """ - return "Basic " + base64.b64encode("%s:%s" % (username, password)) + encoded_username = username.encode("utf-8") + encoded_password = password.encode("utf-8") + return "Basic " + base64.b64encode(b"%s:%s" % (encoded_username, encoded_password)).decode( + "ascii" + ) def conduct_call( diff --git a/endpoints/test/test_webhooks.py b/endpoints/test/test_webhooks.py index e98dcd401f..825645a7bc 100644 --- a/endpoints/test/test_webhooks.py +++ b/endpoints/test/test_webhooks.py @@ -4,7 +4,7 @@ from flask import url_for from data import model -from endpoints.test.shared import conduct_call +from endpoints.test.shared import conduct_call, gen_basic_auth from test.fixtures import * @@ -18,7 +18,7 @@ def test_start_build_disabled_trigger(app, client): } headers = { - "Authorization": "Basic " + base64.b64encode("devtable:password"), + "Authorization": gen_basic_auth("devtable", "password"), } conduct_call( diff --git a/endpoints/v1/index.py b/endpoints/v1/index.py index d7b53ef026..3ba18377fe 100644 --- a/endpoints/v1/index.py +++ b/endpoints/v1/index.py @@ -1,6 +1,6 @@ import json import logging -import urlparse +import urllib.parse from functools import wraps @@ -68,7 +68,7 @@ def wrapper(namespace_name, repo_name, *args, **kwargs): session["repository"] = repo_name # We run our index and registry on the same hosts for now - registry_server = urlparse.urlparse(request.url).netloc + registry_server = urllib.parse.urlparse(request.url).netloc response.headers["X-Docker-Endpoints"] = registry_server has_token_request = request.headers.get("X-Docker-Token", "") diff --git a/endpoints/v1/registry.py b/endpoints/v1/registry.py index 365e15cb01..ae94642d5b 100644 --- a/endpoints/v1/registry.py +++ b/endpoints/v1/registry.py @@ -421,7 +421,8 @@ def put_image_json(namespace, repository, image_id): logger.debug("Parsing image JSON") try: uploaded_metadata = request.data - data = json.loads(uploaded_metadata.decode("utf8")) + uploaded_metadata_string = uploaded_metadata.decode("utf-8") + data = json.loads(uploaded_metadata_string) except ValueError: pass @@ -455,7 +456,7 @@ def put_image_json(namespace, repository, image_id): username = get_authenticated_user() and get_authenticated_user().username layer = builder.start_layer( image_id, - uploaded_metadata, + uploaded_metadata_string, location_pref, username, app.config["PUSH_TEMP_TAG_EXPIRATION_SEC"], diff --git a/endpoints/v2/__init__.py b/endpoints/v2/__init__.py index 9f8438d139..8e0d024280 100644 --- a/endpoints/v2/__init__.py +++ b/endpoints/v2/__init__.py @@ -2,8 +2,8 @@ import os.path from functools import wraps -from urlparse import urlparse -from urllib import urlencode +from urllib.parse import urlparse +from urllib.parse import urlencode from flask import Blueprint, make_response, url_for, request, jsonify from semantic_version import Spec diff --git a/endpoints/v2/test/test_blob.py b/endpoints/v2/test/test_blob.py index 975eaf6407..3d71a407f3 100644 --- a/endpoints/v2/test/test_blob.py +++ b/endpoints/v2/test/test_blob.py @@ -19,7 +19,7 @@ "method, endpoint", [("GET", "download_blob"), ("HEAD", "check_blob_exists"),] ) def test_blob_caching(method, endpoint, client, app): - digest = "sha256:" + hashlib.sha256("a").hexdigest() + digest = "sha256:" + hashlib.sha256(b"a").hexdigest() location = ImageStorageLocation.get(name="local_us") model.blob.store_blob_record_and_temp_link("devtable", "simple", digest, location, 1, 10000000) @@ -37,7 +37,7 @@ def test_blob_caching(method, endpoint, client, app): ) headers = { - "Authorization": "Bearer %s" % token, + "Authorization": "Bearer %s" % token.decode("ascii"), } # Run without caching to make sure the request works. This also preloads some of @@ -71,26 +71,26 @@ def test_blob_caching(method, endpoint, client, app): # Unknown blob. ("sha256:unknown", "devtable/simple", "devtable", False), # Blob not in repo. - ("sha256:" + hashlib.sha256("a").hexdigest(), "devtable/complex", "devtable", False), + ("sha256:" + hashlib.sha256(b"a").hexdigest(), "devtable/complex", "devtable", False), # Blob in repo. - ("sha256:" + hashlib.sha256("b").hexdigest(), "devtable/complex", "devtable", True), + ("sha256:" + hashlib.sha256(b"b").hexdigest(), "devtable/complex", "devtable", True), # No access to repo. - ("sha256:" + hashlib.sha256("b").hexdigest(), "devtable/complex", "public", False), + ("sha256:" + hashlib.sha256(b"b").hexdigest(), "devtable/complex", "public", False), # Public repo. - ("sha256:" + hashlib.sha256("c").hexdigest(), "public/publicrepo", "devtable", True), + ("sha256:" + hashlib.sha256(b"c").hexdigest(), "public/publicrepo", "devtable", True), ], ) def test_blob_mounting(mount_digest, source_repo, username, expect_success, client, app): location = ImageStorageLocation.get(name="local_us") # Store and link some blobs. - digest = "sha256:" + hashlib.sha256("a").hexdigest() + digest = "sha256:" + hashlib.sha256(b"a").hexdigest() model.blob.store_blob_record_and_temp_link("devtable", "simple", digest, location, 1, 10000000) - digest = "sha256:" + hashlib.sha256("b").hexdigest() + digest = "sha256:" + hashlib.sha256(b"b").hexdigest() model.blob.store_blob_record_and_temp_link("devtable", "complex", digest, location, 1, 10000000) - digest = "sha256:" + hashlib.sha256("c").hexdigest() + digest = "sha256:" + hashlib.sha256(b"c").hexdigest() model.blob.store_blob_record_and_temp_link( "public", "publicrepo", digest, location, 1, 10000000 ) @@ -115,7 +115,7 @@ def test_blob_mounting(mount_digest, source_repo, username, expect_success, clie ) headers = { - "Authorization": "Bearer %s" % token, + "Authorization": "Bearer %s" % token.decode("ascii"), } expected_code = 201 if expect_success else 202 @@ -147,7 +147,7 @@ def test_blob_upload_offset(client, app): ) headers = { - "Authorization": "Bearer %s" % token, + "Authorization": "Bearer %s" % token.decode("ascii"), } # Create a blob upload request. @@ -167,7 +167,7 @@ def test_blob_upload_offset(client, app): } headers = { - "Authorization": "Bearer %s" % token, + "Authorization": "Bearer %s" % token.decode("ascii"), "Content-Range": "13-50", } diff --git a/endpoints/v2/test/test_manifest.py b/endpoints/v2/test/test_manifest.py index f666af4a7e..4629f30aab 100644 --- a/endpoints/v2/test/test_manifest.py +++ b/endpoints/v2/test/test_manifest.py @@ -35,7 +35,7 @@ def test_e2e_query_count_manifest_norewrite(client, app): ) headers = { - "Authorization": "Bearer %s" % token, + "Authorization": "Bearer %s" % token.decode("ascii"), } # Conduct a call to prime the instance key and other caches. diff --git a/endpoints/v2/test/test_manifest_cornercases.py b/endpoints/v2/test/test_manifest_cornercases.py index 86beb6b4f6..0037292cfa 100644 --- a/endpoints/v2/test/test_manifest_cornercases.py +++ b/endpoints/v2/test/test_manifest_cornercases.py @@ -59,7 +59,7 @@ def test_missing_link(initialized_db): location = database.ImageStorageLocation.get(name=location_name) # Create first blob. - first_blob_sha = "sha256:" + hashlib.sha256("FIRST").hexdigest() + first_blob_sha = "sha256:" + hashlib.sha256(b"FIRST").hexdigest() model.blob.store_blob_record_and_temp_link( ADMIN_ACCESS_USER, REPO, first_blob_sha, location, 0, 0, 0 ) @@ -89,8 +89,8 @@ def test_missing_link(initialized_db): assert found_tag.legacy_image.docker_image_id == "first" # Create the second and third blobs. - second_blob_sha = "sha256:" + hashlib.sha256("SECOND").hexdigest() - third_blob_sha = "sha256:" + hashlib.sha256("THIRD").hexdigest() + second_blob_sha = "sha256:" + hashlib.sha256(b"SECOND").hexdigest() + third_blob_sha = "sha256:" + hashlib.sha256(b"THIRD").hexdigest() model.blob.store_blob_record_and_temp_link( ADMIN_ACCESS_USER, REPO, second_blob_sha, location, 0, 0, 0 @@ -131,7 +131,7 @@ def test_missing_link(initialized_db): assert found_tag.legacy_image.docker_image_id != "second" # Create the fourth blob. - fourth_blob_sha = "sha256:" + hashlib.sha256("FOURTH").hexdigest() + fourth_blob_sha = "sha256:" + hashlib.sha256(b"FOURTH").hexdigest() model.blob.store_blob_record_and_temp_link( ADMIN_ACCESS_USER, REPO, fourth_blob_sha, location, 0, 0, 0 ) diff --git a/endpoints/v2/test/test_v2auth.py b/endpoints/v2/test/test_v2auth.py index d7d2689643..d2e41c4f7a 100644 --- a/endpoints/v2/test/test_v2auth.py +++ b/endpoints/v2/test/test_v2auth.py @@ -4,7 +4,7 @@ from app import instance_keys, app as original_app from data.model.user import regenerate_robot_token, get_robot_and_metadata, get_user -from endpoints.test.shared import conduct_call +from endpoints.test.shared import conduct_call, gen_basic_auth from util.security.registry_jwt import decode_bearer_token, CLAIM_TUF_ROOTS from test.fixtures import * @@ -181,7 +181,7 @@ def test_generate_registry_jwt( headers = {} if username and password: - headers["Authorization"] = "Basic %s" % (base64.b64encode("%s:%s" % (username, password))) + headers["Authorization"] = gen_basic_auth(username, password) resp = conduct_call( client, diff --git a/endpoints/v2/v2auth.py b/endpoints/v2/v2auth.py index b6285cd2ee..bd77331b93 100644 --- a/endpoints/v2/v2auth.py +++ b/endpoints/v2/v2auth.py @@ -134,7 +134,7 @@ def generate_registry_jwt(auth_result): token = generate_bearer_token( audience_param, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, instance_keys ) - return jsonify({"token": token}) + return jsonify({"token": token.decode("ascii")}) @lru_cache(maxsize=1) diff --git a/endpoints/verbs/__init__.py b/endpoints/verbs/__init__.py index 5e3f0af6dd..e2bb25f7c4 100644 --- a/endpoints/verbs/__init__.py +++ b/endpoints/verbs/__init__.py @@ -117,8 +117,10 @@ def _sign_derived_image(verb, derived_image, queue_file): signature = None try: signature = signer.detached_sign(queue_file) - except: - logger.exception("Exception when signing %s deriving image %s", verb, derived_image) + except Exception as e: + logger.exception( + "Exception when signing %s deriving image %s: $s", verb, derived_image, str(e) + ) return # Setup the database (since this is a new process) and then disconnect immediately @@ -390,7 +392,7 @@ def _store_metadata_and_cleanup(): unique_id = ( derived_image.unique_id if derived_image is not None - else hashlib.sha256("%s:%s" % (verb, uuid.uuid4())).hexdigest() + else hashlib.sha256(("%s:%s" % (verb, uuid.uuid4())).encode("utf-8")).hexdigest() ) handlers = [hasher.update] reporter = VerbReporter(verb) diff --git a/endpoints/web.py b/endpoints/web.py index 76532f19ff..48072b4363 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -421,7 +421,7 @@ def buildlogs(build_uuid): def exportedlogs(file_id): # Only enable this endpoint if local storage is available. has_local_storage = False - for storage_type, _ in app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values(): + for storage_type, _ in list(app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values()): if storage_type == "LocalStorage": has_local_storage = True break @@ -525,7 +525,7 @@ def confirm_repo_email(): try: record = model.repository.confirm_email_authorization_for_repo(code) except model.DataModelException as ex: - return index("", error_info=dict(reason="confirmerror", error_message=ex.message)) + return index("", error_info=dict(reason="confirmerror", error_message=str(ex))) message = """ Your E-mail address has been authorized to receive notifications for repository @@ -553,7 +553,7 @@ def confirm_email(): try: user, new_email, old_email = model.user.confirm_user_email(code) except model.DataModelException as ex: - return index("", error_info=dict(reason="confirmerror", error_message=ex.message)) + return index("", error_info=dict(reason="confirmerror", error_message=str(ex))) if new_email: send_email_changed(user.username, old_email, new_email) diff --git a/external_libraries.py b/external_libraries.py index 172fd3ea3d..ac8252b314 100644 --- a/external_libraries.py +++ b/external_libraries.py @@ -1,6 +1,6 @@ import logging import logging.config -import urllib2 +import urllib.request, urllib.error, urllib.parse import re import os import hashlib @@ -98,7 +98,7 @@ def format_local_name(url): filename = url.split("/")[-1] filename = re.sub(r"[+,?@=:]", "", filename) - url_hash = hashlib.sha256(url).hexdigest()[0:12] + url_hash = hashlib.sha256(url.encode("utf-8")).hexdigest()[0:12] filename += "-" + url_hash if not filename.endswith(".css") and not filename.endswith(".js"): @@ -113,9 +113,9 @@ def format_local_name(url): def _download_url(url): for index in range(0, MAX_RETRY_COUNT): try: - response = urllib2.urlopen(url) + response = urllib.request.urlopen(url) return response.read() - except urllib2.URLError: + except urllib.error.URLError: logger.exception( "Got exception when trying to download URL %s (try #%s)", url, index + 1 ) @@ -141,7 +141,7 @@ def _download_url(url): filename = os.path.basename(url).split("?")[0] path = os.path.join(local_directory, filename) - print "Downloading %s to %s" % (url, path) + print("Downloading %s to %s" % (url, path)) contents = _download_url("https://" + url) with open(path, "wb") as local_file: diff --git a/features/__init__.py b/features/__init__.py index fa20cf2a1a..0242a4667d 100644 --- a/features/__init__.py +++ b/features/__init__.py @@ -2,7 +2,7 @@ def import_features(config_dict): - for feature, feature_val in config_dict.items(): + for feature, feature_val in list(config_dict.items()): if feature.startswith("FEATURE_"): feature_name = feature[8:] _FEATURES[feature_name] = globals()[feature_name] = FeatureNameValue( @@ -28,5 +28,8 @@ def __repr__(self): def __cmp__(self, other): return self.value.__cmp__(other) - def __nonzero__(self): - return self.value.__nonzero__() + def __bool__(self): + if isinstance(self.value, str): + return self.value.lower() == "true" + + return bool(self.value) diff --git a/image/appc/__init__.py b/image/appc/__init__.py index 5db24538da..60c74a4156 100644 --- a/image/appc/__init__.py +++ b/image/appc/__init__.py @@ -40,7 +40,7 @@ def stream_generator( aci_manifest = json.dumps( DockerV1ToACIManifestTranslator.build_manifest(tag, parsed_manifest, synthetic_image_id) ) - yield self.tar_file("manifest", aci_manifest, mtime=image_mtime) + yield self.tar_file("manifest", aci_manifest.encode("utf-8"), mtime=image_mtime) # Yield the merged layer dtaa. yield self.tar_folder("rootfs", mtime=image_mtime) @@ -102,7 +102,7 @@ def _build_ports(docker_config): exposed_ports = docker_config["ExposedPorts"] if exposed_ports is not None: - port_list = exposed_ports.keys() + port_list = list(exposed_ports.keys()) else: port_list = docker_config["Ports"] or docker_config["ports"] or [] @@ -144,7 +144,7 @@ def get_name(docker_volume_path): return "volume-%s" % volume_name volume_list = docker_config["Volumes"] or docker_config["volumes"] or {} - for docker_volume_path in volume_list.iterkeys(): + for docker_volume_path in volume_list.keys(): if not docker_volume_path: continue diff --git a/image/common.py b/image/common.py index 207a00cbd5..7efd9731f4 100644 --- a/image/common.py +++ b/image/common.py @@ -46,6 +46,7 @@ def tar_file(self, name, contents, mtime=None): """ Returns the tar binary representation for a file with the given name and file contents. """ + assert isinstance(contents, bytes) length = len(contents) tar_data = self.tar_file_header(name, length, mtime=mtime) tar_data += contents @@ -57,9 +58,9 @@ def tar_file_padding(self, length): Returns tar file padding for file data of the given length. """ if length % 512 != 0: - return "\0" * (512 - (length % 512)) + return b"\0" * (512 - (length % 512)) - return "" + return b"" def tar_file_header(self, name, file_size, mtime=None): """ @@ -84,5 +85,5 @@ def tar_folder(self, name, mtime=None): info.mtime = mtime # allow the directory to be readable by non-root users - info.mode = 0755 + info.mode = 0o755 return info.tobuf() diff --git a/image/docker/schema1.py b/image/docker/schema1.py index 156624f3f7..3d1bde8ec2 100644 --- a/image/docker/schema1.py +++ b/image/docker/schema1.py @@ -197,8 +197,9 @@ def _validate(self): payload_str = self._payload for signature in self._signatures: - bytes_to_verify = "{0}.{1}".format( - signature["protected"], base64url_encode(payload_str) + bytes_to_verify = b"%s.%s" % ( + Bytes.for_string_or_unicode(signature["protected"]).as_encoded_str(), + base64url_encode(payload_str), ) signer = SIGNER_ALGS[signature["header"]["alg"]] key = keyrep(signature["header"]["jwk"]) @@ -413,8 +414,11 @@ def _generate_layers(self, allow_missing_ids=False): starting from the base image and working toward the leaf node. """ for blob_sum_obj, history_obj in reversed( - zip( - self._parsed[DOCKER_SCHEMA1_FS_LAYERS_KEY], self._parsed[DOCKER_SCHEMA1_HISTORY_KEY] + list( + zip( + self._parsed[DOCKER_SCHEMA1_FS_LAYERS_KEY], + self._parsed[DOCKER_SCHEMA1_HISTORY_KEY], + ) ) ): @@ -538,10 +542,10 @@ def rewrite_invalid_image_ids(self, images_map): working_image_id = extracted_v1_metadata.image_id # Update our digest_history hash for the new layer data. - digest_history.update(digest_str) - digest_history.update("@") + digest_history.update(digest_str.encode("utf-8")) + digest_history.update("@".encode("utf-8")) digest_history.update(layer.raw_v1_metadata.encode("utf-8")) - digest_history.update("|") + digest_history.update("|".encode("utf-8")) # Ensure that the v1 image's storage matches the V2 blob. If not, we've # found a data inconsistency and need to create a new layer ID for the V1 @@ -693,17 +697,19 @@ def build(self, json_web_key=None, ensure_ascii=True): return DockerSchema1Manifest(Bytes.for_string_or_unicode(payload_str)) payload_str = Bytes.for_string_or_unicode(payload_str).as_encoded_str() - split_point = payload_str.rfind("\n}") + split_point = payload_str.rfind(b"\n}") protected_payload = { - "formatTail": base64url_encode(payload_str[split_point:]), + "formatTail": base64url_encode(payload_str[split_point:]).decode("ascii"), "formatLength": split_point, "time": datetime.utcnow().strftime(_ISO_DATETIME_FORMAT_ZULU), } - protected = base64url_encode(json.dumps(protected_payload, ensure_ascii=ensure_ascii)) + protected = base64url_encode( + json.dumps(protected_payload, ensure_ascii=ensure_ascii).encode("utf-8") + ) logger.debug("Generated protected block: %s", protected) - bytes_to_sign = "{0}.{1}".format(protected, base64url_encode(payload_str)) + bytes_to_sign = b"%s.%s" % (protected, base64url_encode(payload_str)) signer = SIGNER_ALGS[_JWS_SIGNING_ALGORITHM] signature = base64url_encode(signer.sign(bytes_to_sign, json_web_key.get_key())) @@ -711,13 +717,15 @@ def build(self, json_web_key=None, ensure_ascii=True): public_members = set(json_web_key.public_members) public_key = { - comp: value for comp, value in json_web_key.to_dict().items() if comp in public_members + comp: value + for comp, value in list(json_web_key.to_dict().items()) + if comp in public_members } signature_block = { DOCKER_SCHEMA1_HEADER_KEY: {"jwk": public_key, "alg": _JWS_SIGNING_ALGORITHM}, - DOCKER_SCHEMA1_SIGNATURE_KEY: signature, - DOCKER_SCHEMA1_PROTECTED_KEY: protected, + DOCKER_SCHEMA1_SIGNATURE_KEY: signature.decode("ascii"), + DOCKER_SCHEMA1_PROTECTED_KEY: protected.decode("ascii"), } logger.debug("Encoded signature block: %s", json.dumps(signature_block)) diff --git a/image/docker/schema2/__init__.py b/image/docker/schema2/__init__.py index 240c1d6813..cbf2efd54d 100644 --- a/image/docker/schema2/__init__.py +++ b/image/docker/schema2/__init__.py @@ -26,9 +26,8 @@ # https://github.com/docker/distribution/blob/749f6afb4572201e3c37325d0ffedb6f32be8950/manifest/schema1/config_builder.go#L22 EMPTY_LAYER_BLOB_DIGEST = "sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16422d00e8a7c22955b46d4" EMPTY_LAYER_SIZE = 32 -EMPTY_LAYER_BYTES = "".join( - map( - chr, +EMPTY_LAYER_BYTES = bytes( + bytearray( [ 31, 139, @@ -62,6 +61,6 @@ 4, 0, 0, - ], + ] ) ) diff --git a/image/docker/schema2/manifest.py b/image/docker/schema2/manifest.py index f0f0c07987..8716851a45 100644 --- a/image/docker/schema2/manifest.py +++ b/image/docker/schema2/manifest.py @@ -297,12 +297,12 @@ def _manifest_image_layers(self, content_retriever): # Create a new synthesized V1 ID for the history layer by hashing its content and # the blob associated with it. - digest_history.update(json.dumps(history_entry.raw_entry or "empty")) - digest_history.update("|") - digest_history.update(str(history_index)) - digest_history.update("|") - digest_history.update(blob_digest) - digest_history.update("||") + digest_history.update(json.dumps(history_entry.raw_entry).encode("utf-8") or b"empty") + digest_history.update("|".encode("utf-8")) + digest_history.update(str(history_index).encode("utf-8")) + digest_history.update("|".encode("utf-8")) + digest_history.update(blob_digest.encode("utf-8")) + digest_history.update("||".encode("utf-8")) v1_layer_id = digest_history.hexdigest() yield DockerV2ManifestImageLayer( diff --git a/image/docker/schema2/test/test_list.py b/image/docker/schema2/test/test_list.py index fb2cbf0795..568a67521a 100644 --- a/image/docker/schema2/test/test_list.py +++ b/image/docker/schema2/test/test_list.py @@ -56,7 +56,7 @@ def test_malformed_manifest_lists(json_data): }, ], } -) +).encode("utf-8") NO_AMD_MANIFESTLIST_BYTES = json.dumps( { @@ -71,7 +71,7 @@ def test_malformed_manifest_lists(json_data): }, ], } -) +).encode("utf-8") retriever = ContentRetrieverForTesting({"sha256:e6": v22_bytes, "sha256:5b": v21_bytes,}) diff --git a/image/docker/schema2/test/test_manifest.py b/image/docker/schema2/test/test_manifest.py index 484216ebbe..ca1d4e7f7a 100644 --- a/image/docker/schema2/test/test_manifest.py +++ b/image/docker/schema2/test/test_manifest.py @@ -71,7 +71,7 @@ def test_malformed_manifests(json_data): }, ], } -) +).encode("utf-8") REMOTE_MANIFEST_BYTES = json.dumps( { @@ -106,7 +106,7 @@ def test_malformed_manifests(json_data): }, ], } -) +).encode("utf-8") def test_valid_manifest(): @@ -374,13 +374,13 @@ def test_unencoded_unicode_manifest(): retriever = ContentRetrieverForTesting.for_config( { - "config": {"author": u"Sômé guy",}, + "config": {"author": "Sômé guy",}, "rootfs": {"type": "layers", "diff_ids": []}, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "base", - "author": u"Sômé guy", + "author": "Sômé guy", }, ], }, @@ -390,19 +390,19 @@ def test_unencoded_unicode_manifest(): ) layers = list(manifest.get_layers(retriever)) - assert layers[0].author == u"Sômé guy" + assert layers[0].author == "Sômé guy" def test_build_unencoded_unicode_manifest(): config_json = json.dumps( { - "config": {"author": u"Sômé guy",}, + "config": {"author": "Sômé guy",}, "rootfs": {"type": "layers", "diff_ids": []}, "history": [ { "created": "2018-04-03T18:37:09.284840891Z", "created_by": "base", - "author": u"Sômé guy", + "author": "Sômé guy", }, ], }, @@ -419,13 +419,13 @@ def test_build_unencoded_unicode_manifest(): def test_load_unicode_manifest(): test_dir = os.path.dirname(os.path.abspath(__file__)) - with open(os.path.join(test_dir, "unicode_manifest_config.json"), "r") as f: + with open(os.path.join(test_dir, "unicode_manifest_config.json"), "rb") as f: retriever = ContentRetrieverForTesting() retriever.add_digest( "sha256:5bdd65cdd055c7f3bbaecdc9fd6c75f155322520f85953aa0e2724cab006d407", f.read() ) - with open(os.path.join(test_dir, "unicode_manifest.json"), "r") as f: + with open(os.path.join(test_dir, "unicode_manifest.json"), "rb") as f: manifest_bytes = f.read() manifest = DockerSchema2Manifest(Bytes.for_string_or_unicode(manifest_bytes)) @@ -434,4 +434,4 @@ def test_load_unicode_manifest(): ) layers = list(manifest.get_layers(retriever)) - assert layers[-1].author == u"Sômé guy" + assert layers[-1].author == "Sômé guy" diff --git a/image/docker/squashed.py b/image/docker/squashed.py index 79e15ee5b0..f4927f3783 100644 --- a/image/docker/squashed.py +++ b/image/docker/squashed.py @@ -63,7 +63,9 @@ def stream_generator( repository = tag.repository.name repositories[hostname + "/" + namespace + "/" + repository] = synthetic_layer_info - yield self.tar_file("repositories", json.dumps(repositories), mtime=image_mtime) + yield self.tar_file( + "repositories", json.dumps(repositories).encode("utf-8"), mtime=image_mtime + ) # Yield the image ID folder. yield self.tar_folder(synthetic_image_id, mtime=image_mtime) @@ -72,10 +74,12 @@ def stream_generator( layer_json = SquashedDockerImageFormatter._build_layer_json( parsed_manifest, synthetic_image_id ) - yield self.tar_file(synthetic_image_id + "/json", json.dumps(layer_json), mtime=image_mtime) + yield self.tar_file( + synthetic_image_id + "/json", json.dumps(layer_json).encode("utf-8"), mtime=image_mtime + ) # Yield the VERSION file. - yield self.tar_file(synthetic_image_id + "/VERSION", "1.0", mtime=image_mtime) + yield self.tar_file(synthetic_image_id + "/VERSION", b"1.0", mtime=image_mtime) # Yield the merged layer data's header. estimated_file_size = 0 @@ -118,15 +122,15 @@ def stream_generator( to_yield = estimated_file_size - yielded_size while to_yield > 0: yielded = min(to_yield, GZIP_BUFFER_SIZE) - yield "\0" * yielded + yield b"\0" * yielded to_yield -= yielded # Yield any file padding to 512 bytes that is necessary. yield self.tar_file_padding(estimated_file_size) # Last two records are empty in tar spec. - yield "\0" * 512 - yield "\0" * 512 + yield b"\0" * 512 + yield b"\0" * 512 @staticmethod def _build_layer_json(manifest, synthetic_image_id): diff --git a/image/docker/test/test_schema1.py b/image/docker/test/test_schema1.py index 9e3ec65f75..b86270e979 100644 --- a/image/docker/test/test_schema1.py +++ b/image/docker/test/test_schema1.py @@ -148,14 +148,14 @@ def test_validate_manifest_with_unencoded_unicode(): assert manifest.created_datetime layers = list(manifest.get_layers(None)) - assert layers[-1].author == u"Sômé guy" + assert layers[-1].author == "Sômé guy" @pytest.mark.parametrize("with_key", [None, docker_v2_signing_key,]) def test_build_unencoded_unicode_manifest(with_key): builder = DockerSchema1ManifestBuilder("somenamespace", "somerepo", "sometag") builder.add_layer( - "sha256:abcde", json.dumps({"id": "someid", "author": u"Sômé guy",}, ensure_ascii=False) + "sha256:abcde", json.dumps({"id": "someid", "author": "Sômé guy",}, ensure_ascii=False) ) built = builder.build(with_key, ensure_ascii=False) @@ -180,7 +180,7 @@ def test_validate_manifest_known_issue(): def test_validate_manifest_with_emoji(with_key): builder = DockerSchema1ManifestBuilder("somenamespace", "somerepo", "sometag") builder.add_layer( - "sha256:abcde", json.dumps({"id": "someid", "author": u"😱",}, ensure_ascii=False) + "sha256:abcde", json.dumps({"id": "someid", "author": "😱",}, ensure_ascii=False) ) built = builder.build(with_key, ensure_ascii=False) @@ -212,7 +212,7 @@ def test_build_with_metadata_removed(): { "id": "someid", "parent": "someid", - "author": u"😱", + "author": "😱", "comment": "hello world!", "created": "1975-01-02 12:34", "Size": 5678, @@ -225,7 +225,7 @@ def test_build_with_metadata_removed(): json.dumps( { "id": "anotherid", - "author": u"😱", + "author": "😱", "created": "1985-02-03 12:34", "Size": 1234, "container_config": {"Cmd": "barbaz", "more": "stuff", "goes": "here",}, diff --git a/image/oci/manifest.py b/image/oci/manifest.py index 3738a2c097..909c75263f 100644 --- a/image/oci/manifest.py +++ b/image/oci/manifest.py @@ -248,7 +248,7 @@ def get_layers(self, content_retriever): not support layers. """ if not self.is_image_manifest: - raise StopIteration() + return for image_layer in self._manifest_image_layers(content_retriever): is_remote = image_layer.blob_layer.is_remote if image_layer.blob_layer else False @@ -288,8 +288,8 @@ def _manifest_image_layers(self, content_retriever): # "images" based on the layer data, with empty config (with exception of the final layer). if not history: for index, filesystem_layer in enumerate(self.filesystem_layers): - digest_history.update(str(filesystem_layer.digest)) - digest_history.update("||") + digest_history.update(str(filesystem_layer.digest).encode("ascii")) + digest_history.update(b"||") v1_layer_parent_id = v1_layer_id v1_layer_id = digest_history.hexdigest() @@ -304,7 +304,7 @@ def _manifest_image_layers(self, content_retriever): v1_parent_id=v1_layer_parent_id, compressed_size=filesystem_layer.compressed_size, ) - raise StopIteration() + return # Make sure we aren't missing any history entries if it was specified. if len(history) < len(self.filesystem_layers): @@ -325,12 +325,12 @@ def _manifest_image_layers(self, content_retriever): # Create a new synthesized V1 ID for the history layer by hashing its content and # the blob associated with it. - digest_history.update(json.dumps(history_entry.raw_entry)) - digest_history.update("|") - digest_history.update(str(history_index)) - digest_history.update("|") - digest_history.update(blob_digest) - digest_history.update("||") + digest_history.update(json.dumps(history_entry.raw_entry).encode("utf-8")) + digest_history.update(b"|") + digest_history.update(b"%d" % history_index) + digest_history.update(b"|") + digest_history.update(blob_digest.encode("ascii")) + digest_history.update(b"||") v1_layer_id = digest_history.hexdigest() yield OCIManifestImageLayer( diff --git a/image/oci/test/test_oci_config.py b/image/oci/test/test_oci_config.py index a5146cc85f..4661ea1721 100644 --- a/image/oci/test/test_oci_config.py +++ b/image/oci/test/test_oci_config.py @@ -75,28 +75,28 @@ def test_parse_basic_config(): expected = [ LayerHistory( - created=u"2015-10-31T22:22:54.690851953Z", + created="2015-10-31T22:22:54.690851953Z", created_datetime=datetime.datetime(2015, 10, 31, 22, 22, 54, 690851, tzinfo=tzutc()), - command=u"/bin/sh -c #(nop) ADD file:a3bc1e842b69636f9df5256c49c5374fb4eef1e281fe3f282c65fb853ee171c5 in /", + command="/bin/sh -c #(nop) ADD file:a3bc1e842b69636f9df5256c49c5374fb4eef1e281fe3f282c65fb853ee171c5 in /", is_empty=False, author=None, comment=None, raw_entry={ - u"created_by": u"/bin/sh -c #(nop) ADD file:a3bc1e842b69636f9df5256c49c5374fb4eef1e281fe3f282c65fb853ee171c5 in /", - u"created": u"2015-10-31T22:22:54.690851953Z", + "created_by": "/bin/sh -c #(nop) ADD file:a3bc1e842b69636f9df5256c49c5374fb4eef1e281fe3f282c65fb853ee171c5 in /", + "created": "2015-10-31T22:22:54.690851953Z", }, ), LayerHistory( - created=u"2015-10-31T22:22:55.613815829Z", + created="2015-10-31T22:22:55.613815829Z", created_datetime=datetime.datetime(2015, 10, 31, 22, 22, 55, 613815, tzinfo=tzutc()), - command=u'/bin/sh -c #(nop) CMD ["sh"]', + command='/bin/sh -c #(nop) CMD ["sh"]', is_empty=True, author=None, comment=None, raw_entry={ - u"empty_layer": True, - u"created_by": u'/bin/sh -c #(nop) CMD ["sh"]', - u"created": u"2015-10-31T22:22:55.613815829Z", + "empty_layer": True, + "created_by": '/bin/sh -c #(nop) CMD ["sh"]', + "created": "2015-10-31T22:22:55.613815829Z", }, ), ] diff --git a/image/oci/test/test_oci_index.py b/image/oci/test/test_oci_index.py index ffabffac3e..84c8d747c3 100644 --- a/image/oci/test/test_oci_index.py +++ b/image/oci/test/test_oci_index.py @@ -40,8 +40,8 @@ def test_parse_basic_index(): assert index.digest == "sha256:b1a216e8ed6a267bd3f0234d0d096c04658b28cb08b2b16bf812cf72694d7d04" assert index.local_blob_digests == [] assert index.child_manifest_digests() == [ - u"sha256:e692418e4cbaf90ca69d05a66403747baa33ee08806650b51fab815ad7fc331f", - u"sha256:5b0bcabd1ed22e9fb1310cf6c2dec7cdef19f0ad69efa1f392e94a4333501270", + "sha256:e692418e4cbaf90ca69d05a66403747baa33ee08806650b51fab815ad7fc331f", + "sha256:5b0bcabd1ed22e9fb1310cf6c2dec7cdef19f0ad69efa1f392e94a4333501270", ] diff --git a/image/shared/schemautil.py b/image/shared/schemautil.py index c7aa7ce94d..4d951f613e 100644 --- a/image/shared/schemautil.py +++ b/image/shared/schemautil.py @@ -30,7 +30,7 @@ def for_config(cls, config_obj, digest, size, ensure_ascii=True): class _CustomEncoder(json.JSONEncoder): def encode(self, o): encoded = super(_CustomEncoder, self).encode(o) - if isinstance(o, basestring): + if isinstance(o, str): encoded = encoded.replace("<", "\\u003c") encoded = encoded.replace(">", "\\u003e") encoded = encoded.replace("&", "\\u0026") diff --git a/image/shared/test/test_schemautil.py b/image/shared/test/test_schemautil.py index 14e3ab744b..af3c3e91a9 100644 --- a/image/shared/test/test_schemautil.py +++ b/image/shared/test/test_schemautil.py @@ -24,4 +24,3 @@ def test_to_canonical_json(input, expected_output): # Ensure the result is utf-8. assert isinstance(result, str) - result.decode("utf-8") diff --git a/initdb.py b/initdb.py index f0648c3ec0..dd21461394 100644 --- a/initdb.py +++ b/initdb.py @@ -131,7 +131,7 @@ def __generate_service_key( def _populate_blob(repo, content): assert isinstance(repo, Repository) - assert isinstance(content, basestring) + assert isinstance(content, bytes) digest = str(sha256_digest(content)) location = ImageStorageLocation.get(name="local_us") blob = model.blob.store_blob_record_and_temp_link_in_repo( @@ -166,7 +166,7 @@ def __create_manifest_and_tags( leaf_id = None for layer_index in range(0, num_layers): content = "layer-%s-%s-%s" % (layer_index, current_level, get_epoch_timestamp_ms()) - _, digest = _populate_blob(repo, content) + _, digest = _populate_blob(repo, content.encode("ascii")) current_id = "abcdef%s%s%s" % (layer_index, current_level, get_epoch_timestamp_ms()) if layer_index == num_layers - 1: diff --git a/loghandler.py b/loghandler.py index a3eac4a233..288055f303 100755 --- a/loghandler.py +++ b/loghandler.py @@ -1,7 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from __future__ import absolute_import import datetime import json @@ -103,7 +102,7 @@ def add_fields(self, log_record, record, message_dict): log_record[self.prefix_key] = {} target = log_record[self.prefix_key] - for field, value in record.__dict__.iteritems(): + for field, value in record.__dict__.items(): if field in self._fmt_parameters and field in RESERVED_ATTRS: log_record[field] = value elif field not in RESERVED_ATTRS: diff --git a/notifications/notificationmethod.py b/notifications/notificationmethod.py index fb541e0f82..38483c57dc 100644 --- a/notifications/notificationmethod.py +++ b/notifications/notificationmethod.py @@ -4,7 +4,7 @@ import requests from flask_mail import Message -from urlparse import urlparse +from urllib.parse import urlparse from app import mail, app, OVERRIDE_CONFIG_DIRECTORY from data import model @@ -177,7 +177,7 @@ def perform(self, notification_obj, event_handler, notification_data): mail.send(msg) except Exception as ex: logger.exception("Email was unable to be sent") - raise NotificationMethodPerformException(ex.message) + raise NotificationMethodPerformException(str(ex)) class WebhookMethod(NotificationMethod): @@ -242,7 +242,7 @@ def perform(self, notification_obj, event_handler, notification_data): cert=_ssl_cert(), timeout=METHOD_TIMEOUT, ) - if resp.status_code / 100 != 2: + if resp.status_code // 100 != 2: error_message = "%s response for webhook to url: %s" % (resp.status_code, url) logger.error(error_message) logger.error(resp.content) @@ -250,7 +250,7 @@ def perform(self, notification_obj, event_handler, notification_data): except requests.exceptions.RequestException as ex: logger.exception("Webhook was unable to be sent") - raise NotificationMethodPerformException(ex.message) + raise NotificationMethodPerformException(str(ex)) class FlowdockMethod(NotificationMethod): @@ -303,7 +303,7 @@ def perform(self, notification_obj, event_handler, notification_data): resp = requests.post( url, data=json.dumps(payload), headers=headers, timeout=METHOD_TIMEOUT ) - if resp.status_code / 100 != 2: + if resp.status_code // 100 != 2: error_message = "%s response for flowdock to url: %s" % (resp.status_code, url) logger.error(error_message) logger.error(resp.content) @@ -311,7 +311,7 @@ def perform(self, notification_obj, event_handler, notification_data): except requests.exceptions.RequestException as ex: logger.exception("Flowdock method was unable to be sent") - raise NotificationMethodPerformException(ex.message) + raise NotificationMethodPerformException(str(ex)) class HipchatMethod(NotificationMethod): @@ -372,7 +372,7 @@ def perform(self, notification_obj, event_handler, notification_data): resp = requests.post( url, data=json.dumps(payload), headers=headers, timeout=METHOD_TIMEOUT ) - if resp.status_code / 100 != 2: + if resp.status_code // 100 != 2: error_message = "%s response for hipchat to url: %s" % (resp.status_code, url) logger.error(error_message) logger.error(resp.content) @@ -380,14 +380,15 @@ def perform(self, notification_obj, event_handler, notification_data): except requests.exceptions.RequestException as ex: logger.exception("Hipchat method was unable to be sent") - raise NotificationMethodPerformException(ex.message) + raise NotificationMethodPerformException(str(ex)) -from HTMLParser import HTMLParser +from html.parser import HTMLParser class SlackAdjuster(HTMLParser): def __init__(self): + super().__init__() self.reset() self.result = [] @@ -496,12 +497,12 @@ def perform(self, notification_obj, event_handler, notification_data): resp = requests.post( url, data=json.dumps(payload), headers=headers, timeout=METHOD_TIMEOUT ) - if resp.status_code / 100 != 2: + if resp.status_code // 100 != 2: error_message = "%s response for Slack to url: %s" % (resp.status_code, url) logger.error(error_message) logger.error(resp.content) raise NotificationMethodPerformException(error_message) except requests.exceptions.RequestException as ex: - logger.exception("Slack method was unable to be sent: %s", ex.message) - raise NotificationMethodPerformException(ex.message) + logger.exception("Slack method was unable to be sent: %s", str(ex)) + raise NotificationMethodPerformException(str(ex)) diff --git a/oauth/base.py b/oauth/base.py index 49e2941d78..0b0edcd211 100644 --- a/oauth/base.py +++ b/oauth/base.py @@ -1,7 +1,7 @@ import copy import logging -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse from abc import ABCMeta, abstractmethod from six import add_metaclass @@ -27,9 +27,9 @@ def with_params(self, parameters): return OAuthEndpoint(self.base_url, params_copy) def to_url(self): - (scheme, netloc, path, _, fragment) = urlparse.urlsplit(self.base_url) - updated_query = urllib.urlencode(self.params) - return urlparse.urlunsplit((scheme, netloc, path, updated_query, fragment)) + (scheme, netloc, path, _, fragment) = urllib.parse.urlsplit(self.base_url) + updated_query = urllib.parse.urlencode(self.params) + return urllib.parse.urlunsplit((scheme, netloc, path, updated_query, fragment)) class OAuthExchangeCodeException(Exception): diff --git a/oauth/loginmanager.py b/oauth/loginmanager.py index 4a5888282f..1acafde7e6 100644 --- a/oauth/loginmanager.py +++ b/oauth/loginmanager.py @@ -19,7 +19,7 @@ def __init__(self, config, client=None): self.services = [] # Register the endpoints for each of the OAuth login services. - for key in config.keys(): + for key in list(config.keys()): # All keys which end in _LOGIN_CONFIG setup a login service. if key.endswith("_LOGIN_CONFIG"): if key in CUSTOM_LOGIN_SERVICES: diff --git a/oauth/oidc.py b/oauth/oidc.py index 8c81effe48..f7f6e89e15 100644 --- a/oauth/oidc.py +++ b/oauth/oidc.py @@ -1,7 +1,7 @@ import time import json import logging -import urlparse +import urllib.parse import jwt @@ -100,16 +100,16 @@ def _get_endpoint(self, endpoint_key, **kwargs): if not endpoint: return None - (scheme, netloc, path, query, fragment) = urlparse.urlsplit(endpoint) + (scheme, netloc, path, query, fragment) = urllib.parse.urlsplit(endpoint) # Add the query parameters from the kwargs and the config. custom_parameters = self.config.get("OIDC_ENDPOINT_CUSTOM_PARAMS", {}).get(endpoint_key, {}) - query_params = urlparse.parse_qs(query, keep_blank_values=True) + query_params = urllib.parse.parse_qs(query, keep_blank_values=True) query_params.update(kwargs) query_params.update(custom_parameters) return OAuthEndpoint( - urlparse.urlunsplit((scheme, netloc, path, {}, fragment)), query_params + urllib.parse.urlunsplit((scheme, netloc, path, {}, fragment)), query_params ) def validate(self): @@ -249,7 +249,7 @@ def _load_oidc_config_via_discovery(self, is_debugging): if not oidc_server.startswith("https://") and not is_debugging: raise DiscoveryFailureException("OIDC server must be accessed over SSL") - discovery_url = urlparse.urljoin(oidc_server, OIDC_WELLKNOWN) + discovery_url = urllib.parse.urljoin(oidc_server, OIDC_WELLKNOWN) discovery = self._http_client.get(discovery_url, timeout=5, verify=is_debugging is False) if discovery.status_code // 100 != 2: logger.debug( diff --git a/oauth/provider.py b/oauth/provider.py new file mode 100644 index 0000000000..6a7fd9c2a4 --- /dev/null +++ b/oauth/provider.py @@ -0,0 +1,563 @@ +# Ported to Python 3 +# Originally from https://github.com/DeprecatedCode/oauth2lib/blob/d161b010f8a596826050a09e5e94d59443cc12d9/oauth2lib/provider.py + +import json +import logging +from requests import Response +from io import StringIO + +try: + from werkzeug.exceptions import Unauthorized +except ImportError: + Unauthorized = Exception + +from oauth import utils + + +class Provider(object): + """Base provider class for different types of OAuth 2.0 providers.""" + + def _handle_exception(self, exc): + """Handle an internal exception that was caught and suppressed. + + :param exc: Exception to process. + :type exc: Exception + """ + logger = logging.getLogger(__name__) + logger.exception(exc) + + def _make_response(self, body="", headers=None, status_code=200): + """Return a response object from the given parameters. + + :param body: Buffer/string containing the response body. + :type body: str + :param headers: Dict of headers to include in the requests. + :type headers: dict + :param status_code: HTTP status code. + :type status_code: int + :rtype: requests.Response + """ + res = Response() + res.status_code = status_code + if headers is not None: + res.headers.update(headers) + res.raw = StringIO(body) + return res + + def _make_redirect_error_response(self, redirect_uri, err): + """Return a HTTP 302 redirect response object containing the error. + + :param redirect_uri: Client redirect URI. + :type redirect_uri: str + :param err: OAuth error message. + :type err: str + :rtype: requests.Response + """ + params = {"error": err, "response_type": None, "client_id": None, "redirect_uri": None} + redirect = utils.build_url(redirect_uri, params) + return self._make_response(headers={"Location": redirect}, status_code=302) + + def _make_json_response(self, data, headers=None, status_code=200): + """Return a response object from the given JSON data. + + :param data: Data to JSON-encode. + :type data: mixed + :param headers: Dict of headers to include in the requests. + :type headers: dict + :param status_code: HTTP status code. + :type status_code: int + :rtype: requests.Response + """ + response_headers = {} + if headers is not None: + response_headers.update(headers) + response_headers["Content-Type"] = "application/json;charset=UTF-8" + response_headers["Cache-Control"] = "no-store" + response_headers["Pragma"] = "no-cache" + return self._make_response(json.dumps(data), response_headers, status_code) + + def _make_json_error_response(self, err): + """Return a JSON-encoded response object representing the error. + + :param err: OAuth error message. + :type err: str + :rtype: requests.Response + """ + return self._make_json_response({"error": err}, status_code=400) + + def _invalid_redirect_uri_response(self): + """What to return when the redirect_uri parameter is missing. + + :rtype: requests.Response + """ + return self._make_json_error_response("invalid_request") + + +class AuthorizationProvider(Provider): + """OAuth 2.0 authorization provider. This class manages authorization + codes and access tokens. Certain methods MUST be overridden in a + subclass, thus this class cannot be directly used as a provider. + + These are the methods that must be implemented in a subclass: + + validate_client_id(self, client_id) + # Return True or False + + validate_client_secret(self, client_id, client_secret) + # Return True or False + + validate_scope(self, client_id, scope) + # Return True or False + + validate_redirect_uri(self, client_id, redirect_uri) + # Return True or False + + validate_access(self) # Use this to validate your app session user + # Return True or False + + from_authorization_code(self, client_id, code, scope) + # Return mixed data or None on invalid + + from_refresh_token(self, client_id, refresh_token, scope) + # Return mixed data or None on invalid + + persist_authorization_code(self, client_id, code, scope) + # Return value ignored + + persist_token_information(self, client_id, scope, access_token, + token_type, expires_in, refresh_token, + data) + # Return value ignored + + discard_authorization_code(self, client_id, code) + # Return value ignored + + discard_refresh_token(self, client_id, refresh_token) + # Return value ignored + + Optionally, the following may be overridden to acheive desired behavior: + + @property + token_length(self) + + @property + token_type(self) + + @property + token_expires_in(self) + + generate_authorization_code(self) + + generate_access_token(self) + + generate_refresh_token(self) + + """ + + @property + def token_length(self): + """Property method to get the length used to generate tokens. + + :rtype: int + """ + return 40 + + @property + def token_type(self): + """Property method to get the access token type. + + :rtype: str + """ + return "Bearer" + + @property + def token_expires_in(self): + """Property method to get the token expiration time in seconds. + + :rtype: int + """ + return 3600 + + def generate_authorization_code(self): + """Generate a random authorization code. + + :rtype: str + """ + return utils.random_ascii_string(self.token_length) + + def generate_access_token(self): + """Generate a random access token. + + :rtype: str + """ + return utils.random_ascii_string(self.token_length) + + def generate_refresh_token(self): + """Generate a random refresh token. + + :rtype: str + """ + return utils.random_ascii_string(self.token_length) + + def get_authorization_code(self, response_type, client_id, redirect_uri, **params): + """Generate authorization code HTTP response. + + :param response_type: Desired response type. Must be exactly "code". + :type response_type: str + :param client_id: Client ID. + :type client_id: str + :param redirect_uri: Client redirect URI. + :type redirect_uri: str + :rtype: requests.Response + """ + + # Ensure proper response_type + if response_type != "code": + err = "unsupported_response_type" + return self._make_redirect_error_response(redirect_uri, err) + + # Check redirect URI + is_valid_redirect_uri = self.validate_redirect_uri(client_id, redirect_uri) + if not is_valid_redirect_uri: + return self._invalid_redirect_uri_response() + + # Check conditions + is_valid_client_id = self.validate_client_id(client_id) + is_valid_access = self.validate_access() + scope = params.get("scope", "") + is_valid_scope = self.validate_scope(client_id, scope) + + # Return proper error responses on invalid conditions + if not is_valid_client_id: + err = "unauthorized_client" + return self._make_redirect_error_response(redirect_uri, err) + + if not is_valid_access: + err = "access_denied" + return self._make_redirect_error_response(redirect_uri, err) + + if not is_valid_scope: + err = "invalid_scope" + return self._make_redirect_error_response(redirect_uri, err) + + # Generate authorization code + code = self.generate_authorization_code() + + # Save information to be used to validate later requests + self.persist_authorization_code(client_id=client_id, code=code, scope=scope) + + # Return redirection response + params.update( + {"code": code, "response_type": None, "client_id": None, "redirect_uri": None} + ) + redirect = utils.build_url(redirect_uri, params) + return self._make_response(headers={"Location": redirect}, status_code=302) + + def refresh_token(self, grant_type, client_id, client_secret, refresh_token, **params): + """Generate access token HTTP response from a refresh token. + + :param grant_type: Desired grant type. Must be "refresh_token". + :type grant_type: str + :param client_id: Client ID. + :type client_id: str + :param client_secret: Client secret. + :type client_secret: str + :param refresh_token: Refresh token. + :type refresh_token: str + :rtype: requests.Response + """ + + # Ensure proper grant_type + if grant_type != "refresh_token": + return self._make_json_error_response("unsupported_grant_type") + + # Check conditions + is_valid_client_id = self.validate_client_id(client_id) + is_valid_client_secret = self.validate_client_secret(client_id, client_secret) + scope = params.get("scope", "") + is_valid_scope = self.validate_scope(client_id, scope) + data = self.from_refresh_token(client_id, refresh_token, scope) + is_valid_refresh_token = data is not None + + # Return proper error responses on invalid conditions + if not (is_valid_client_id and is_valid_client_secret): + return self._make_json_error_response("invalid_client") + + if not is_valid_scope: + return self._make_json_error_response("invalid_scope") + + if not is_valid_refresh_token: + return self._make_json_error_response("invalid_grant") + + # Discard original refresh token + self.discard_refresh_token(client_id, refresh_token) + + # Generate access tokens once all conditions have been met + access_token = self.generate_access_token() + token_type = self.token_type + expires_in = self.token_expires_in + refresh_token = self.generate_refresh_token() + + # Save information to be used to validate later requests + self.persist_token_information( + client_id=client_id, + scope=scope, + access_token=access_token, + token_type=token_type, + expires_in=expires_in, + refresh_token=refresh_token, + data=data, + ) + + # Return json response + return self._make_json_response( + { + "access_token": access_token, + "token_type": token_type, + "expires_in": expires_in, + "refresh_token": refresh_token, + } + ) + + def get_token(self, grant_type, client_id, client_secret, redirect_uri, code, **params): + """Generate access token HTTP response. + + :param grant_type: Desired grant type. Must be "authorization_code". + :type grant_type: str + :param client_id: Client ID. + :type client_id: str + :param client_secret: Client secret. + :type client_secret: str + :param redirect_uri: Client redirect URI. + :type redirect_uri: str + :param code: Authorization code. + :type code: str + :rtype: requests.Response + """ + + # Ensure proper grant_type + if grant_type != "authorization_code": + return self._make_json_error_response("unsupported_grant_type") + + # Check conditions + is_valid_client_id = self.validate_client_id(client_id) + is_valid_client_secret = self.validate_client_secret(client_id, client_secret) + is_valid_redirect_uri = self.validate_redirect_uri(client_id, redirect_uri) + + scope = params.get("scope", "") + is_valid_scope = self.validate_scope(client_id, scope) + data = self.from_authorization_code(client_id, code, scope) + is_valid_grant = data is not None + + # Return proper error responses on invalid conditions + if not (is_valid_client_id and is_valid_client_secret): + return self._make_json_error_response("invalid_client") + + if not is_valid_grant or not is_valid_redirect_uri: + return self._make_json_error_response("invalid_grant") + + if not is_valid_scope: + return self._make_json_error_response("invalid_scope") + + # Discard original authorization code + self.discard_authorization_code(client_id, code) + + # Generate access tokens once all conditions have been met + access_token = self.generate_access_token() + token_type = self.token_type + expires_in = self.token_expires_in + refresh_token = self.generate_refresh_token() + + # Save information to be used to validate later requests + self.persist_token_information( + client_id=client_id, + scope=scope, + access_token=access_token, + token_type=token_type, + expires_in=expires_in, + refresh_token=refresh_token, + data=data, + ) + + # Return json response + return self._make_json_response( + { + "access_token": access_token, + "token_type": token_type, + "expires_in": expires_in, + "refresh_token": refresh_token, + } + ) + + def get_authorization_code_from_uri(self, uri): + """Get authorization code response from a URI. This method will + ignore the domain and path of the request, instead + automatically parsing the query string parameters. + + :param uri: URI to parse for authorization information. + :type uri: str + :rtype: requests.Response + """ + params = utils.url_query_params(uri) + try: + if "response_type" not in params: + raise TypeError("Missing parameter response_type in URL query") + + if "client_id" not in params: + raise TypeError("Missing parameter client_id in URL query") + + if "redirect_uri" not in params: + raise TypeError("Missing parameter redirect_uri in URL query") + + return self.get_authorization_code(**params) + except TypeError as exc: + self._handle_exception(exc) + + # Catch missing parameters in request + err = "invalid_request" + if "redirect_uri" in params: + u = params["redirect_uri"] + return self._make_redirect_error_response(u, err) + else: + return self._invalid_redirect_uri_response() + except StandardError as exc: + self._handle_exception(exc) + + # Catch all other server errors + err = "server_error" + u = params["redirect_uri"] + return self._make_redirect_error_response(u, err) + + def get_token_from_post_data(self, data): + """Get a token response from POST data. + + :param data: POST data containing authorization information. + :type data: dict + :rtype: requests.Response + """ + try: + # Verify OAuth 2.0 Parameters + for x in ["grant_type", "client_id", "client_secret"]: + if not data.get(x): + raise TypeError("Missing required OAuth 2.0 POST param: {0}".format(x)) + + # Handle get token from refresh_token + if "refresh_token" in data: + return self.refresh_token(**data) + + # Handle get token from authorization code + for x in ["redirect_uri", "code"]: + if not data.get(x): + raise TypeError("Missing required OAuth 2.0 POST param: {0}".format(x)) + return self.get_token(**data) + except TypeError as exc: + self._handle_exception(exc) + + # Catch missing parameters in request + return self._make_json_error_response("invalid_request") + except StandardError as exc: + self._handle_exception(exc) + + # Catch all other server errors + return self._make_json_error_response("server_error") + + def validate_client_id(self, client_id): + raise NotImplementedError("Subclasses must implement " "validate_client_id.") + + def validate_client_secret(self, client_id, client_secret): + raise NotImplementedError("Subclasses must implement " "validate_client_secret.") + + def validate_redirect_uri(self, client_id, redirect_uri): + raise NotImplementedError("Subclasses must implement " "validate_redirect_uri.") + + def validate_scope(self, client_id, scope): + raise NotImplementedError("Subclasses must implement " "validate_scope.") + + def validate_access(self): + raise NotImplementedError("Subclasses must implement " "validate_access.") + + def from_authorization_code(self, client_id, code, scope): + raise NotImplementedError("Subclasses must implement " "from_authorization_code.") + + def from_refresh_token(self, client_id, refresh_token, scope): + raise NotImplementedError("Subclasses must implement " "from_refresh_token.") + + def persist_authorization_code(self, client_id, code, scope): + raise NotImplementedError("Subclasses must implement " "persist_authorization_code.") + + def persist_token_information( + self, client_id, scope, access_token, token_type, expires_in, refresh_token, data + ): + raise NotImplementedError("Subclasses must implement " "persist_token_information.") + + def discard_authorization_code(self, client_id, code): + raise NotImplementedError("Subclasses must implement " "discard_authorization_code.") + + def discard_refresh_token(self, client_id, refresh_token): + raise NotImplementedError("Subclasses must implement " "discard_refresh_token.") + + +class OAuthError(Unauthorized): + """OAuth error, including the OAuth error reason.""" + + def __init__(self, reason, *args, **kwargs): + self.reason = reason + super(OAuthError, self).__init__(*args, **kwargs) + + +class ResourceAuthorization(object): + """A class containing an OAuth 2.0 authorization.""" + + is_oauth = False + is_valid = None + token = None + client_id = None + expires_in = None + error = None + + def raise_error_if_invalid(self): + if not self.is_valid: + raise OAuthError(self.error, "OAuth authorization error") + + +class ResourceProvider(Provider): + """OAuth 2.0 resource provider. This class provides an interface + to validate an incoming request and authenticate resource access. + Certain methods MUST be overridden in a subclass, thus this + class cannot be directly used as a resource provider. + + These are the methods that must be implemented in a subclass: + + get_authorization_header(self) + # Return header string for key "Authorization" or None + + validate_access_token(self, access_token, authorization) + # Set is_valid=True, client_id, and expires_in attributes + # on authorization if authorization was successful. + # Return value is ignored + """ + + @property + def authorization_class(self): + return ResourceAuthorization + + def get_authorization(self): + """Get authorization object representing status of authentication.""" + auth = self.authorization_class() + header = self.get_authorization_header() + if not header or not header.split: + return auth + header = header.split() + if len(header) > 1 and header[0] == "Bearer": + auth.is_oauth = True + access_token = header[1] + self.validate_access_token(access_token, auth) + if not auth.is_valid: + auth.error = "access_denied" + return auth + + def get_authorization_header(self): + raise NotImplementedError("Subclasses must implement " "get_authorization_header.") + + def validate_access_token(self, access_token, authorization): + raise NotImplementedError("Subclasses must implement " "validate_token.") diff --git a/oauth/test/test_oidc.py b/oauth/test/test_oidc.py index 63b8fa12cc..91b48b2f07 100644 --- a/oauth/test/test_oidc.py +++ b/oauth/test/test_oidc.py @@ -2,7 +2,7 @@ import json import time -import urlparse +import urllib.parse import jwt import pytest @@ -166,8 +166,8 @@ def handler(_, __): def authorize_handler(discovery_content): @urlmatch(netloc=r"fakeoidc", path=r"/authorize") def handler(_, request): - parsed = urlparse.urlparse(request.url) - params = urlparse.parse_qs(parsed.query) + parsed = urllib.parse.urlparse(request.url) + params = urllib.parse.parse_qs(parsed.query) return json.dumps( {"authorized": True, "scope": params["scope"][0], "state": params["state"][0]} ) @@ -179,7 +179,7 @@ def handler(_, request): def token_handler(oidc_service, id_token, valid_code): @urlmatch(netloc=r"fakeoidc", path=r"/token") def handler(_, request): - params = urlparse.parse_qs(request.body) + params = urllib.parse.parse_qs(request.body) if params.get("redirect_uri")[0] != "http://localhost/oauth2/someoidc/callback": return {"status_code": 400, "content": "Invalid redirect URI"} @@ -197,7 +197,7 @@ def handler(_, request): content = { "access_token": "sometoken", - "id_token": id_token, + "id_token": id_token.decode("ascii"), } return {"status_code": 200, "content": json.dumps(content)} @@ -294,7 +294,7 @@ def test_public_config(oidc_service, discovery_handler): assert oidc_service.get_public_config()["CLIENT_ID"] == "foo" assert "CLIENT_SECRET" not in oidc_service.get_public_config() - assert "bar" not in oidc_service.get_public_config().values() + assert "bar" not in list(oidc_service.get_public_config().values()) def test_auth_url(oidc_service, discovery_handler, http_client, authorize_handler): diff --git a/oauth/utils.py b/oauth/utils.py new file mode 100644 index 0000000000..0a91f431a5 --- /dev/null +++ b/oauth/utils.py @@ -0,0 +1,65 @@ +# Ported to Python 3 +# Originally from https://github.com/DeprecatedCode/oauth2lib/blob/d161b010f8a596826050a09e5e94d59443cc12d9/oauth2lib/provider.py + +import string +import urllib.parse +from random import SystemRandom + +UNICODE_ASCII_CHARACTERS = string.ascii_letters + string.digits + + +def random_ascii_string(length): + random = SystemRandom() + return "".join([random.choice(UNICODE_ASCII_CHARACTERS) for _ in range(length)]) + + +def url_query_params(url): + """Return query parameters as a dict from the specified URL. + + :param url: URL. + :type url: str + :rtype: dict + """ + return dict(urllib.parse.parse_qsl(urllib.parse.urlparse(url).query, True)) + + +def url_dequery(url): + """Return a URL with the query component removed. + + :param url: URL to dequery. + :type url: str + :rtype: str + """ + url = urllib.parse.urlparse(url) + return urllib.parse.urlunparse((url.scheme, url.netloc, url.path, url.params, "", url.fragment)) + + +def build_url(base, additional_params=None): + """Construct a URL based off of base containing all parameters in + the query portion of base plus any additional parameters. + + :param base: Base URL + :type base: str + ::param additional_params: Additional query parameters to include. + :type additional_params: dict + :rtype: str + """ + url = urllib.parse.urlparse(base) + query_params = {} + query_params.update(urllib.parse.parse_qsl(url.query, True)) + if additional_params is not None: + query_params.update(additional_params) + for k, v in additional_params.items(): + if v is None: + query_params.pop(k) + + return urllib.parse.urlunparse( + ( + url.scheme, + url.netloc, + url.path, + url.params, + urllib.parse.urlencode(query_params), + url.fragment, + ) + ) diff --git a/quay-entrypoint.sh b/quay-entrypoint.sh index 83d20a3478..55174c1500 100755 --- a/quay-entrypoint.sh +++ b/quay-entrypoint.sh @@ -45,7 +45,7 @@ EOF export DB_CONNECTION_POOLING_REGISTRY=${DB_CONNECTION_POOLING:-"true"} # Forcibly export the scl environment -eval "$(scl enable python27 rh-nginx112 'export -p')" +eval "$(scl enable python37 rh-nginx112 'export -p')" case "$QUAYENTRY" in "shell") diff --git a/registry.py b/registry.py index ed0e5b4f77..627982e10d 100644 --- a/registry.py +++ b/registry.py @@ -1,8 +1,3 @@ -# NOTE: Must be before we import or call anything that may be synchronous. -from gevent import monkey - -monkey.patch_all() - import endpoints.decorated # Note: We need to import this module to make sure the decorators are registered. import features diff --git a/requirements-dev.txt b/requirements-dev.txt index 41df8d8b47..06981ad43c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,6 +1,4 @@ --e git+https://github.com/ant31/pytest-sugar.git#egg=pytest-sugar --e git+https://github.com/coreos/mockldap.git@59a46efbe8c7cd8146a87a7c4f2b09746b953e11#egg=mockldap -# black -- this requires py3 +black freezegun==0.3.12 httmock==1.3.0 ipdb @@ -14,6 +12,7 @@ pytest-runner pytest-timeout pytest-xdist python-coveralls +pytest-sugar tox tox-docker tqdm diff --git a/requirements-nover.txt b/requirements-nover.txt index e15f26d9ac..9871e0adb2 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -1,9 +1,8 @@ -e git+https://github.com/DevTable/aniso8601-fake.git#egg=aniso8610 --e git+https://github.com/DevTable/anunidecode.git#egg=anunidecode -e git+https://github.com/DevTable/container-cloud-config.git#egg=container-cloud-config -e git+https://github.com/DevTable/python-etcd.git@sslfix#egg=python-etcd -e git+https://github.com/NateFerrero/oauth2lib.git#egg=oauth2lib --e git+https://github.com/coreos/mockldap.git@v0.1.x#egg=mockldap +-e git+https://github.com/quay/mockldap.git@4265554a3d89fe39bf05b18e91607bec3fcf215a#egg=mockldap -e git+https://github.com/coreos/py-bitbucket.git#egg=py-bitbucket -e git+https://github.com/coreos/resumablehashlib.git#egg=resumablehashlib -e git+https://github.com/app-registry/appr-server.git@c2ef3b88afe926a92ef5f2e11e7d4a259e286a17#egg=cnr_server # naming has changed @@ -55,7 +54,7 @@ pyasn1 py-bcrypt pyOpenSSL pycryptodome -pygpgme +gpg pyjwkest pyjwt pymemcache @@ -70,7 +69,7 @@ pyyaml raven redis redlock -reportlab==2.7 +reportlab requests-aws4auth semantic-version sqlalchemy @@ -79,9 +78,9 @@ stripe supervisor supervisor-stdout supervisor-logging +text_unidecode tldextract toposort -trollius tzlocal xhtml2pdf recaptcha2 diff --git a/requirements.txt b/requirements.txt index 2b1eaf087f..2afbb899bb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,100 +1,105 @@ --e git+https://github.com/app-registry/appr-server.git@c2ef3b88afe926a92ef5f2e11e7d4a259e286a17#egg=cnr_server --e git+https://github.com/coreos/py-bitbucket.git@55a1ada645f2fb6369147996ec71edd7828d91c8#egg=py_bitbucket --e git+https://github.com/coreos/resumablehashlib.git@b1b631249589b07adf40e0ee545b323a501340b4#egg=resumablehashlib --e git+https://github.com/devtable/aniso8601-fake.git@bd7762c7dea0498706d3f57db60cd8a8af44ba90#egg=aniso8601 --e git+https://github.com/devtable/anunidecode.git@d59236a822e578ba3a0e5e5abbd3855873fa7a88#egg=anunidecode --e git+https://github.com/devtable/boto.git@a6a5c00bd199b1492e99199251b10451970b5b08#egg=boto --e git+https://github.com/devtable/container-cloud-config.git@44e06879a710661f01054b300dc78a12a268b6b5#egg=container_cloud_config --e git+https://github.com/devtable/python-etcd.git@f1168cb02a2a8c83bec1108c6fcd8615ef463b14#egg=python_etcd --e git+https://github.com/jarus/flask-testing.git@17f19d7fee0e1e176703fc7cb04917a77913ba1a#egg=flask_testing --e git+https://github.com/nateferrero/oauth2lib.git@d161b010f8a596826050a09e5e94d59443cc12d9#egg=oauth2lib +-e git+https://github.com/quay/appr.git@58c88e4952e95935c0dd72d4a24b0c44f2249f5b#egg=cnr_server +-e git+https://github.com/DevTable/aniso8601-fake.git@bd7762c7dea0498706d3f57db60cd8a8af44ba90#egg=aniso8601 +-e git+https://github.com/DevTable/boto.git@a6a5c00bd199b1492e99199251b10451970b5b08#egg=boto +-e git+https://github.com/DevTable/container-cloud-config.git@7d6c1545554b81ac65edd6d1bd1ab9f8c462209a#egg=container_cloud_config +-e git+https://github.com/jarus/flask-testing.git@17f19d7fee0e1e176703fc7cb04917a77913ba1a#egg=Flask_Testing +-e git+https://github.com/quay/mockldap.git@4265554a3d89fe39bf05b18e91607bec3fcf215a#egg=mockldap +-e git+https://github.com/quay/py-bitbucket.git@85301693ce3682f8e1244e90bd8a903181844bde#egg=py_bitbucket +-e git+https://github.com/DevTable/python-etcd.git@f1168cb02a2a8c83bec1108c6fcd8615ef463b14#egg=python_etcd +-e git+https://github.com/coderanger/supervisor-stdout.git@87632a7e522bf1888587c39e7fd61535b7a43cd7#egg=supervisor_stdout aiowsgi==0.7 -alembic==1.1.0 -apscheduler==3.6.1 -asn1crypto==0.24.0 -attrs==19.1.0 -autobahn==0.9.3.post3 -aws-sam-translator==1.14.0 -aws-xray-sdk==2.4.2 +alembic==1.3.3 +APScheduler==3.6.3 +asn1crypto==1.3.0 +attrs==19.3.0 +autobahn==20.1.3 +aws-sam-translator==1.20.1 +aws-xray-sdk==2.4.3 azure-common==1.1.10 azure-nspkg==2.0.0 azure-storage-blob==1.1.0 azure-storage-common==1.1.0 azure-storage-nspkg==3.0.0 -babel==2.7.0 -backoff==1.8.0 -backports.functools-lru-cache==1.5 +Babel==2.8.0 +backoff==1.10.0 +backports.functools-lru-cache==1.6.1 backports.ssl-match-hostname==3.7.0.1 backports.tempfile==1.0 backports.weakref==1.0.post1 -beautifulsoup4==4.8.0 -bencode==1.0 -bintrees==2.0.7 +bcrypt==3.1.7 +beautifulsoup4==4.8.2 +bencode.py==2.1.0 +bintrees==2.1.0 bitmath==1.3.3.1 blinker==1.4 -boto3==1.9.228 -botocore==1.12.228 -cachetools==3.1.1 -certifi==2019.6.16 -cffi==1.12.3 -cfn-lint==0.24.1 +boto3==1.11.9 +botocore==1.14.9 +cachetools==4.0.0 +certifi==2019.11.28 +cffi==1.13.2 +cfn-lint==0.27.2 chardet==3.0.4 -click==7.0 -contextlib2==0.5.5 +Click==7.0 +contextlib2==0.6.0.post1 cookies==2.2.1 -cryptography==2.7 -datetime==4.3 +cryptography==2.8 +DateTime==4.3 debtcollector==1.22.0 -decorator==4.4.0 -deprecated==1.2.6 -docker==4.0.2 -ecdsa==0.13.3 -elasticsearch-dsl==7.0.0 +decorator==4.4.1 +Deprecated==1.2.7 +docker==4.1.0 +docutils==0.15.2 +ecdsa==0.15 elasticsearch==7.0.4 +elasticsearch-dsl==7.0.0 enum34==1.1.6 -flask-cors==3.0.8 -flask-login==0.4.1 -flask-mail==0.9.1 -flask-principal==0.4.0 -flask-restful==0.3.7 -flask==1.1.1 +fakeredis==1.1.0 +Flask==1.1.1 +Flask-Cors==3.0.8 +Flask-Login==0.4.1 +Flask-Mail==0.9.1 +Flask-Principal==0.4.0 +Flask-RESTful==0.3.7 funcparserlib==0.3.6 funcsigs==1.0.2 -functools32==3.2.3.post2 -furl==2.0.0 -future==0.17.1 -futures==3.3.0 -geoip2==2.9.0 +furl==2.1.0 +future==0.18.2 +futures==3.1.1 +geoip2==3.0.0 gevent==1.4.0 gipc==1.0.1 +gpg==1.10.0 greenlet==0.4.15 -gunicorn==19.9.0 -hiredis==1.0.0 -html5lib==0.9999999 -httpretty==0.8.10 +gunicorn==20.0.4 +hiredis==1.0.1 +html5lib==1.0.1 +httmock==1.3.0 +httpretty==0.9.7 idna==2.8 -ipaddress==1.0.22 +importlib-metadata==1.4.0 +ipaddress==1.0.23 iso8601==0.1.12 itsdangerous==1.1.0 -jinja2==2.10.1 +Jinja2==2.11.0 jmespath==0.9.4 jsondiff==1.1.2 jsonpatch==1.24 jsonpath-rw==1.4.0 jsonpickle==1.2 jsonpointer==2.0 -jsonschema==3.0.2 -kafka-python==1.4.6 -keystoneauth1==3.17.1 -mako==1.1.0 +jsonschema==3.2.0 +kafka-python==1.4.7 +keystoneauth1==3.18.0 +Mako==1.1.1 marisa-trie==0.7.5 -markupsafe==1.1.1 -maxminddb==1.4.1 +MarkupSafe==1.1.1 +maxminddb==1.5.2 meld3==2.0.0 mixpanel==4.5.0 +mock==3.0.5 monotonic==1.5 -moto==1.3.13 -msgpack==0.6.1 +moto==1.3.14 +msgpack==0.6.2 namedlist==1.7 ndg-httpsclient==0.5.1 netaddr==0.7.19 @@ -102,80 +107,85 @@ netifaces==0.10.9 oauthlib==3.1.0 orderedmultidict==1.0.1 os-service-types==1.7.0 -oslo.config==6.11.1 -oslo.i18n==3.24.0 +oslo.config==7.0.0 +oslo.i18n==3.25.1 oslo.serialization==2.29.2 -oslo.utils==3.41.1 -pathlib2==2.3.4 -pathvalidate==0.29.0 -pbr==5.4.3 -peewee==3.11.2 -pillow==6.1.0 +oslo.utils==3.42.1 +pathlib2==2.3.5 +pathspec==0.7.0 +pathvalidate==2.0.1 +pbr==5.4.4 +peewee==3.13.1 +Pillow==7.0.0 ply==3.11 prometheus-client==0.7.1 -psutil==5.6.6 -psycopg2-binary==2.8.3 -py-bcrypt==0.4 -pyasn1-modules==0.2.6 -pyasn1==0.4.7 +psutil==5.6.7 +psycopg2-binary==2.8.4 +pyasn1==0.4.8 +pyasn1-modules==0.2.8 pycparser==2.19 -pycryptodome==3.9.0 -pycryptodomex==3.9.0 -pygithub==1.45 -pygpgme==0.3 +pycryptodome==3.9.4 +pycryptodomex==3.9.4 +PyGithub==1.45 pyjwkest==1.4.2 -pyjwt==1.7.1 -pymemcache==2.2.2 -pymysql==0.9.3 -pyopenssl==19.0.0 -pyparsing==2.4.2 -pypdf2==1.26.0 -pyrsistent==0.15.4 -python-dateutil==2.8.0 +PyJWT==1.7.1 +pymemcache==3.0.0 +PyMySQL==0.9.3 +pyOpenSSL==19.1.0 +pyparsing==2.4.6 +PyPDF2==1.26.0 +pyrsistent==0.15.7 +python-dateutil==2.8.1 python-editor==1.0.4 -python-gitlab==1.11.0 -python-jose==3.0.1 -python-keystoneclient==3.21.0 +python-gitlab==2.0.0 +python-jose==3.1.0 +python-keystoneclient==3.22.0 python-ldap==3.2.0 python-magic==0.4.15 -python-swiftclient==3.8.0 -pytz==2019.2 -pyyaml==5.1.2 +python-swiftclient==3.8.1 +pytz==2019.3 +PyYAML==5.3 raven==6.10.0 recaptcha2==0.1 -redis==3.3.8 +redis==3.3.11 redlock==1.2.0 -reportlab==2.7 +rehash==1.0.0 +reportlab==3.5.34 +requests==2.22.0 requests-aws4auth==0.9 requests-file==1.4.3 -requests-oauthlib==1.2.0 -requests==2.22.0 -responses==0.10.6 +requests-oauthlib==1.3.0 +responses==0.10.9 rfc3986==1.3.2 rsa==4.0 -s3transfer==0.2.1 +s3transfer==0.3.2 scandir==1.10.0 -semantic-version==2.8.2 -six==1.12.0 -soupsieve==1.9.3 -sqlalchemy==1.3.8 +semantic-version==2.8.4 +six==1.14.0 +sortedcontainers==2.1.0 +soupsieve==1.9.5 +SQLAlchemy==1.3.13 sshpubkeys==3.1.0 stevedore==1.31.0 stringscore==0.1.0 -stripe==2.36.2 +stripe==2.42.0 +supervisor==4.1.0 supervisor-logging==0.0.9 -supervisor-stdout==0.1.1 -supervisor==4.0.4 -tldextract==2.2.1 +tabulate==0.8.6 +termcolor==1.1.0 +text-unidecode==1.3 +tldextract==2.2.2 toposort==1.5 -trollius==2.2.post1 tzlocal==2.0.0 -urllib3==1.25.3 +urllib3==1.25.8 waitress==1.4.2 -webob==1.8.5 -websocket-client==0.56.0 -werkzeug==0.15.6 +webencodings==0.5.1 +WebOb==1.8.6 +websocket-client==0.57.0 +Werkzeug==0.16.1 wrapt==1.11.2 -xhtml2pdf==0.2.3 +xhtml2pdf==0.2.4 xmltodict==0.12.0 -zope.interface==4.6.0 +yapf==0.29.0 +zipp==2.1.0 +zope.interface==4.7.1 diff --git a/secscan.py b/secscan.py index 7ffee924cf..8c8c5a0d6f 100644 --- a/secscan.py +++ b/secscan.py @@ -1,8 +1,3 @@ -# NOTE: Must be before we import or call anything that may be synchronous. -from gevent import monkey - -monkey.patch_all() - from app import app as application from endpoints.secscan import secscan diff --git a/storage/__init__.py b/storage/__init__.py index e2a6bfe341..932ca99fdb 100644 --- a/storage/__init__.py +++ b/storage/__init__.py @@ -66,14 +66,14 @@ def __init__( def init_app(self, app, chunk_cleanup_queue, instance_keys, config_provider, ip_resolver): storages = {} - for location, storage_params in app.config.get("DISTRIBUTED_STORAGE_CONFIG").items(): + for location, storage_params in list(app.config.get("DISTRIBUTED_STORAGE_CONFIG").items()): storages[location] = get_storage_driver( location, chunk_cleanup_queue, config_provider, ip_resolver, storage_params, ) preference = app.config.get("DISTRIBUTED_STORAGE_PREFERENCE", None) if not preference: - preference = storages.keys() + preference = list(storages.keys()) default_locations = app.config.get("DISTRIBUTED_STORAGE_DEFAULT_LOCATIONS") or [] diff --git a/storage/azurestorage.py b/storage/azurestorage.py index c6486ac968..fee5956658 100644 --- a/storage/azurestorage.py +++ b/storage/azurestorage.py @@ -154,9 +154,9 @@ def stream_write(self, path, fp, content_type=None, content_encoding=None): self._blob_service.create_blob_from_stream( self._azure_container, blob_name, fp, content_settings=content_settings ) - except AzureException: + except AzureException as ae: logger.exception("Exception when trying to stream_write path %s", path) - raise IOError("Exception when trying to stream_write path") + raise IOError("Exception when trying to stream_write path", ae) def exists(self, path): blob_name = self._blob_name_from_path(path) diff --git a/storage/basestorage.py b/storage/basestorage.py index a4ce54ea9a..551bb38c08 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -51,7 +51,7 @@ def validate(self, client): The client is an HTTP client to use for any external calls. """ # Put a temporary file to make sure the normal storage paths work. - self.put_content("_verify", "testing 123") + self.put_content("_verify", b"testing 123") if not self.exists("_verify"): raise Exception("Could not find verification file") diff --git a/storage/cloud.py b/storage/cloud.py index 54accf81a4..43a509da89 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -1,11 +1,10 @@ -import cStringIO as StringIO import os import logging import copy from collections import namedtuple from datetime import datetime, timedelta -from io import BufferedIOBase +from io import BufferedIOBase, StringIO, BytesIO from itertools import chain from uuid import uuid4 @@ -113,10 +112,10 @@ def _debug_key(self, key): orig_meth = key.bucket.connection.make_request def new_meth(*args, **kwargs): - print "#" * 16 - print args - print kwargs - print "#" * 16 + print("#" * 16) + print(args) + print(kwargs) + print("#" * 16) return orig_meth(*args, **kwargs) key.bucket.connection.make_request = new_meth @@ -249,7 +248,7 @@ def _stream_write_internal( return 0, e # We are going to reuse this but be VERY careful to only read the number of bytes written to it - buf = StringIO.StringIO() + buf = BytesIO() num_part = 1 total_bytes_written = 0 @@ -474,7 +473,7 @@ def _rechunk(chunk, max_chunk_size): if max_chunk_size is None or chunk.length <= max_chunk_size: yield chunk else: - newchunk_length = chunk.length / 2 + newchunk_length = chunk.length // 2 first_subchunk = _PartUploadMetadata(chunk.path, chunk.offset, newchunk_length) second_subchunk = _PartUploadMetadata( chunk.path, chunk.offset + newchunk_length, chunk.length - newchunk_length @@ -775,7 +774,7 @@ def __init__( storage_path, s3_bucket, *args, - **kwargs + **kwargs, ): super(CloudFrontedS3Storage, self).__init__( context, storage_path, s3_bucket, *args, **kwargs @@ -841,7 +840,7 @@ def _load_private_key(self, cloudfront_privatekey_filename): return None with self._context.config_provider.get_volume_file( - cloudfront_privatekey_filename + cloudfront_privatekey_filename, mode="rb", ) as key_file: return serialization.load_pem_private_key( key_file.read(), password=None, backend=default_backend() diff --git a/storage/downloadproxy.py b/storage/downloadproxy.py index 4238c77701..34623bcce7 100644 --- a/storage/downloadproxy.py +++ b/storage/downloadproxy.py @@ -1,8 +1,8 @@ import logging import base64 -import urllib +import urllib.request, urllib.parse, urllib.error -from urlparse import urlparse +from urllib.parse import urlparse from flask import abort, request from jsonschema import validate, ValidationError @@ -103,7 +103,7 @@ def proxy_download_url(self, download_url): proxy_url = "%s://%s/_storage_proxy/%s/%s/%s/%s" % ( url_scheme, server_hostname, - encoded_token, + encoded_token.decode("ascii"), parsed.scheme, parsed.netloc, path, @@ -134,7 +134,7 @@ def _validate_proxy_url(self): encoded_token, scheme, host, uri = parts try: - token = base64.urlsafe_b64decode(str(encoded_token)) + token = base64.urlsafe_b64decode(encoded_token) except ValueError: logger.exception("Could not decode proxy token") abort(401) diff --git a/storage/fakestorage.py b/storage/fakestorage.py index 54a48bfe91..7d19c8aaa3 100644 --- a/storage/fakestorage.py +++ b/storage/fakestorage.py @@ -1,4 +1,4 @@ -import cStringIO as StringIO +from io import BytesIO import hashlib from collections import defaultdict @@ -6,14 +6,14 @@ from storage.basestorage import BaseStorageV2 -_GLOBAL_FAKE_STORAGE_MAP = defaultdict(StringIO.StringIO) +_GLOBAL_FAKE_STORAGE_MAP = defaultdict(BytesIO) class FakeStorage(BaseStorageV2): def __init__(self, context): super(FakeStorage, self).__init__() self._fake_storage_map = ( - defaultdict(StringIO.StringIO) if context == "local" else _GLOBAL_FAKE_STORAGE_MAP + defaultdict(BytesIO) if context == "local" else _GLOBAL_FAKE_STORAGE_MAP ) def _init_path(self, path=None, create=False): @@ -23,7 +23,7 @@ def get_direct_download_url( self, path, request_ip=None, expires_in=60, requires_cors=False, head=False ): try: - if self.get_content("supports_direct_download") == "true": + if self.get_content("supports_direct_download") == b"true": return "http://somefakeurl?goes=here" except: pass @@ -33,7 +33,7 @@ def get_direct_download_url( def get_content(self, path): if not path in self._fake_storage_map: raise IOError( - "Fake file %s not found. Exist: %s" % (path, self._fake_storage_map.keys()) + "Fake file %s not found. Exist: %s" % (path, list(self._fake_storage_map.keys())) ) self._fake_storage_map.get(path).seek(0) @@ -53,7 +53,7 @@ def stream_read(self, path): yield buf def stream_read_file(self, path): - return StringIO.StringIO(self.get_content(path)) + return BytesIO(self.get_content(path)) def stream_write(self, path, fp, content_type=None, content_encoding=None): out_fp = self._fake_storage_map[path] diff --git a/storage/swift.py b/storage/swift.py index dfca14d87b..19dddb1bcb 100644 --- a/storage/swift.py +++ b/storage/swift.py @@ -9,14 +9,15 @@ import string import logging import json +import sys -from _pyio import BufferedReader +from _pyio import BufferedReader, BufferedIOBase from collections import namedtuple from hashlib import sha1 from random import SystemRandom from time import time -from urlparse import urlparse +from urllib.parse import urlparse from uuid import uuid4 from cachetools.func import lru_cache @@ -120,6 +121,14 @@ def _get_object(self, path, chunk_size=None): def _put_object( self, path, content, chunk=None, content_type=None, content_encoding=None, headers=None ): + # ReadableToIterable supports both file-like objects yielding str or bytes, + # and will try utf-8 encode the result if it is a string. + # The following assertion make sure that the content is either some bytes or + # a file-like stream of bytes, for consistency across all storage implementations. + assert isinstance(content, bytes) or issubclass( + type(content), (BufferedIOBase, GeneratorFile, ReadableToIterable) + ) + path = self._normalize_path(path) headers = headers or {} @@ -249,7 +258,7 @@ def put_content(self, path, content): def stream_read(self, path): for data in self._get_object(path, self.buffer_size): - yield data + yield data.to_bytes(1, sys.byteorder) def stream_read_file(self, path): return GeneratorFile(self.stream_read(path)) @@ -455,7 +464,7 @@ def complete_chunked_upload(self, uuid, final_path, storage_metadata): contained_segments_prefix_path = "%s/%s" % (self._swift_container, segments_prefix_path) self._put_object( - final_path, "", headers={"X-Object-Manifest": contained_segments_prefix_path} + final_path, b"", headers={"X-Object-Manifest": contained_segments_prefix_path} ) def cancel_chunked_upload(self, uuid, storage_metadata): diff --git a/storage/test/test_azure.py b/storage/test/test_azure.py index fe4bb9e8c5..d9538ca671 100644 --- a/storage/test/test_azure.py +++ b/storage/test/test_azure.py @@ -1,10 +1,10 @@ import base64 -import md5 +from hashlib import md5 import pytest import io from contextlib import contextmanager -from urlparse import parse_qs, urlparse +from urllib.parse import parse_qs, urlparse from httmock import urlmatch, HTTMock from xml.dom import minidom @@ -65,7 +65,7 @@ def container_file(url, request): "status_code": 201, "content": "{}", "headers": { - "Content-MD5": base64.b64encode(md5.new(request.body).digest()), + "Content-MD5": base64.b64encode(md5(request.body).digest()), "ETag": "foo", "x-ms-request-server-encrypted": "false", "last-modified": "Wed, 21 Oct 2015 07:28:00 GMT", @@ -79,12 +79,12 @@ def container_file(url, request): for latest_block in latest: combined.append(files[filename][latest_block.childNodes[0].data]) - files[filename] = "".join(combined) + files[filename] = b"".join(combined) return { "status_code": 201, "content": "{}", "headers": { - "Content-MD5": base64.b64encode(md5.new(files[filename]).digest()), + "Content-MD5": base64.b64encode(md5(files[filename]).digest()), "ETag": "foo", "x-ms-request-server-encrypted": "false", "last-modified": "Wed, 21 Oct 2015 07:28:00 GMT", @@ -111,7 +111,7 @@ def container_file(url, request): "status_code": 201, "content": "{}", "headers": { - "Content-MD5": base64.b64encode(md5.new(request.body).digest()), + "Content-MD5": base64.b64encode(md5(request.body).digest()), "ETag": "foo", "x-ms-request-server-encrypted": "false", "last-modified": "Wed, 21 Oct 2015 07:28:00 GMT", @@ -135,12 +135,12 @@ def test_validate(): def test_basics(): with fake_azure_storage() as s: - s.put_content("hello", "hello world") + s.put_content("hello", b"hello world") assert s.exists("hello") - assert s.get_content("hello") == "hello world" + assert s.get_content("hello") == b"hello world" assert s.get_checksum("hello") - assert "".join(list(s.stream_read("hello"))) == "hello world" - assert s.stream_read_file("hello").read() == "hello world" + assert b"".join(list(s.stream_read("hello"))) == b"hello world" + assert s.stream_read_file("hello").read() == b"hello world" s.remove("hello") assert not s.exists("hello") @@ -165,19 +165,19 @@ def test_does_not_exist(): def test_stream_write(): fp = io.BytesIO() - fp.write("hello world!") + fp.write(b"hello world!") fp.seek(0) with fake_azure_storage() as s: s.stream_write("hello", fp) - assert s.get_content("hello") == "hello world!" + assert s.get_content("hello") == b"hello world!" @pytest.mark.parametrize("chunk_size", [(1), (5), (10),]) def test_chunked_uploading(chunk_size): with fake_azure_storage() as s: - string_data = "hello world!" + string_data = b"hello world!" chunks = [ string_data[index : index + chunk_size] for index in range(0, len(string_data), chunk_size) @@ -206,7 +206,7 @@ def test_chunked_uploading(chunk_size): def test_get_direct_download_url(): with fake_azure_storage() as s: - s.put_content("hello", "world") + s.put_content("hello", b"world") assert "sig" in s.get_direct_download_url("hello") @@ -214,7 +214,7 @@ def test_copy_to(): files = {} with fake_azure_storage(files=files) as s: - s.put_content("hello", "hello world") + s.put_content("hello", b"hello world") with fake_azure_storage(files=files) as s2: s.copy_to(s2, "hello") assert s2.exists("hello") diff --git a/storage/test/test_cloud_storage.py b/storage/test/test_cloud_storage.py index 984600152d..163347b4e5 100644 --- a/storage/test/test_cloud_storage.py +++ b/storage/test/test_cloud_storage.py @@ -1,6 +1,6 @@ import os -from StringIO import StringIO +from io import BytesIO import pytest @@ -83,7 +83,7 @@ def test_copy_with_error(storage_engine): def test_stream_read(storage_engine): # Read the streaming content. - data = "".join(storage_engine.stream_read(_TEST_PATH)) + data = b"".join(storage_engine.stream_read(_TEST_PATH)) assert data == _TEST_CONTENT @@ -94,7 +94,7 @@ def test_stream_read_file(storage_engine): def test_stream_write(storage_engine): new_data = os.urandom(4096) - storage_engine.stream_write(_TEST_PATH, StringIO(new_data), content_type="Cool/Type") + storage_engine.stream_write(_TEST_PATH, BytesIO(new_data), content_type="Cool/Type") assert storage_engine.get_content(_TEST_PATH) == new_data @@ -105,7 +105,7 @@ def test_stream_write_error(): # Attempt to write to the uncreated bucket, which should raise an error. with pytest.raises(IOError): - engine.stream_write(_TEST_PATH, StringIO("hello world"), content_type="Cool/Type") + engine.stream_write(_TEST_PATH, BytesIO(b"hello world"), content_type="Cool/Type") assert not engine.exists(_TEST_PATH) @@ -117,13 +117,13 @@ def test_chunk_upload(storage_engine, chunk_count, force_client_side): return upload_id, metadata = storage_engine.initiate_chunked_upload() - final_data = "" + final_data = b"" for index in range(0, chunk_count): chunk_data = os.urandom(1024) final_data = final_data + chunk_data bytes_written, new_metadata, error = storage_engine.stream_upload_chunk( - upload_id, 0, len(chunk_data), StringIO(chunk_data), metadata + upload_id, 0, len(chunk_data), BytesIO(chunk_data), metadata ) metadata = new_metadata @@ -147,7 +147,7 @@ def test_cancel_chunked_upload(storage_engine, chunk_count): for _ in range(0, chunk_count): chunk_data = os.urandom(1024) _, new_metadata, _ = storage_engine.stream_upload_chunk( - upload_id, 0, len(chunk_data), StringIO(chunk_data), metadata + upload_id, 0, len(chunk_data), BytesIO(chunk_data), metadata ) metadata = new_metadata @@ -168,7 +168,7 @@ def test_large_chunks_upload(storage_engine): # Write a "super large" chunk, to ensure that it is broken into smaller chunks. chunk_data = os.urandom(int(storage_engine.maximum_chunk_size * 2.5)) bytes_written, new_metadata, _ = storage_engine.stream_upload_chunk( - upload_id, 0, -1, StringIO(chunk_data), metadata + upload_id, 0, -1, BytesIO(chunk_data), metadata ) assert len(chunk_data) == bytes_written @@ -187,11 +187,11 @@ def test_large_chunks_with_ragged_edge(storage_engine): upload_id, metadata = storage_engine.initiate_chunked_upload() # Write a few "super large" chunks, to ensure that it is broken into smaller chunks. - all_data = "" + all_data = b"" for _ in range(0, 2): chunk_data = os.urandom(int(storage_engine.maximum_chunk_size) + 20) bytes_written, new_metadata, _ = storage_engine.stream_upload_chunk( - upload_id, 0, -1, StringIO(chunk_data), metadata + upload_id, 0, -1, BytesIO(chunk_data), metadata ) assert len(chunk_data) == bytes_written all_data = all_data + chunk_data diff --git a/storage/test/test_storageproxy.py b/storage/test/test_storageproxy.py index f5a356a018..0217590ebc 100644 --- a/storage/test/test_storageproxy.py +++ b/storage/test/test_storageproxy.py @@ -78,7 +78,7 @@ def test_storage_proxy_auth( storage, liveserver_app, liveserver_session, is_proxying_enabled, app_reloader ): # Activate direct download on the fake storage. - storage.put_content(["test"], "supports_direct_download", "true") + storage.put_content(["test"], "supports_direct_download", b"true") # Get the unwrapped URL. direct_download_url = storage.get_direct_download_url(["test"], "somepath") diff --git a/storage/test/test_swift.py b/storage/test/test_swift.py index 090dbb2ce4..6987ac04b0 100644 --- a/storage/test/test_swift.py +++ b/storage/test/test_swift.py @@ -1,4 +1,4 @@ -import io +import _pyio as io import pytest import hashlib import copy @@ -6,9 +6,10 @@ from collections import defaultdict from mock import MagicMock, patch +from swiftclient.client import ClientException, ReadableToIterable + from storage import StorageContext from storage.swift import SwiftStorage, _EMPTY_SEGMENTS_KEY -from swiftclient.client import ClientException base_args = { "context": StorageContext("nyc", None, None, None), @@ -32,6 +33,7 @@ def _get_connection(self): class FakeSwiftStorage(SwiftStorage): def __init__(self, fail_checksum=False, connection=None, *args, **kwargs): super(FakeSwiftStorage, self).__init__(*args, **kwargs) + self._retry_count = kwargs.get("retry_count") or 5 self._connection = connection or FakeSwift( fail_checksum=fail_checksum, temp_url_key=kwargs.get("temp_url_key") ) @@ -63,7 +65,7 @@ def copy_object(self, container, path, target): def get_container(self, container, prefix=None, full_listing=None): container_entries = self.containers[container] objs = [] - for path, data in list(container_entries.iteritems()): + for path, data in list(container_entries.items()): if not prefix or path.startswith(prefix): objs.append( {"name": path, "bytes": len(data["content"]),} @@ -73,11 +75,16 @@ def get_container(self, container, prefix=None, full_listing=None): def put_object( self, container, path, content, chunk_size=None, content_type=None, headers=None ): - if not isinstance(content, str): - if hasattr(content, "read"): - content = content.read() + digest = None + if not isinstance(content, bytes): + if isinstance(content, ReadableToIterable): + digest = content.get_md5sum() + if isinstance(content.content, bytes): + content = content.content + else: + content = content.content.read() else: - content = "".join(content) + raise ValueError("Only bytes or file-like objects yielding bytes are valid") self.containers[container][path] = { "content": content, @@ -86,23 +93,21 @@ def put_object( "headers": headers or {"is": True}, } - digest = hashlib.md5() - digest.update(content) - return digest.hexdigest() if not self.fail_checksum else "invalid" + return digest if not self.fail_checksum else "invalid" def get_object(self, container, path, resp_chunk_size=None): data = self.containers[container].get(path, {}) if "X-Object-Manifest" in data["headers"]: new_contents = [] prefix = data["headers"]["X-Object-Manifest"] - for key, value in self.containers[container].iteritems(): + for key, value in self.containers[container].items(): if ("container-name/" + key).startswith(prefix): new_contents.append((key, value["content"])) new_contents.sort(key=lambda value: value[0]) data = dict(data) - data["content"] = "".join([nc[1] for nc in new_contents]) + data["content"] = b"".join([nc[1] for nc in new_contents]) return bool(data), data.get("content") return bool(data), data.get("content") @@ -151,19 +156,19 @@ def test_simple_put_get(): swift = FakeSwiftStorage(**base_args) assert not swift.exists("somepath") - swift.put_content("somepath", "hello world!") + swift.put_content("somepath", b"hello world!") assert swift.exists("somepath") - assert swift.get_content("somepath") == "hello world!" + assert swift.get_content("somepath") == b"hello world!" def test_stream_read_write(): swift = FakeSwiftStorage(**base_args) assert not swift.exists("somepath") - swift.stream_write("somepath", io.BytesIO("some content here")) + swift.stream_write("somepath", io.BytesIO(b"some content here")) assert swift.exists("somepath") - assert swift.get_content("somepath") == "some content here" - assert "".join(list(swift.stream_read("somepath"))) == "some content here" + assert swift.get_content("somepath") == b"some content here" + assert b"".join([c for c in swift.stream_read("somepath")]) == b"some content here" def test_stream_read_write_invalid_checksum(): @@ -171,14 +176,14 @@ def test_stream_read_write_invalid_checksum(): assert not swift.exists("somepath") with pytest.raises(IOError): - swift.stream_write("somepath", io.BytesIO("some content here")) + swift.stream_write("somepath", io.BytesIO(b"some content here")) def test_remove(): swift = FakeSwiftStorage(**base_args) assert not swift.exists("somepath") - swift.put_content("somepath", "hello world!") + swift.put_content("somepath", b"hello world!") assert swift.exists("somepath") swift.remove("somepath") @@ -193,14 +198,14 @@ def test_copy_to(): another_swift = FakeSwiftStorage(connection=swift._connection, **modified_args) - swift.put_content("somepath", "some content here") + swift.put_content("somepath", b"some content here") swift.copy_to(another_swift, "somepath") assert swift.exists("somepath") assert another_swift.exists("somepath") - assert swift.get_content("somepath") == "some content here" - assert another_swift.get_content("somepath") == "some content here" + assert swift.get_content("somepath") == b"some content here" + assert another_swift.get_content("somepath") == b"some content here" def test_copy_to_different(): @@ -212,19 +217,19 @@ def test_copy_to_different(): another_swift = FakeSwiftStorage(**modified_args) - swift.put_content("somepath", "some content here") + swift.put_content("somepath", b"some content here") swift.copy_to(another_swift, "somepath") assert swift.exists("somepath") assert another_swift.exists("somepath") - assert swift.get_content("somepath") == "some content here" - assert another_swift.get_content("somepath") == "some content here" + assert swift.get_content("somepath") == b"some content here" + assert another_swift.get_content("somepath") == b"some content here" def test_checksum(): swift = FakeSwiftStorage(**base_args) - swift.put_content("somepath", "hello world!") + swift.put_content("somepath", b"hello world!") assert swift.get_checksum("somepath") is not None @@ -233,9 +238,9 @@ def test_checksum(): @pytest.mark.parametrize( "chunks", [ - (["this", "is", "some", "chunked", "data", ""]), - (["this is a very large chunk of data", ""]), - (["h", "e", "l", "l", "o", ""]), + ([b"this", b"is", b"some", b"chunked", b"data", b""]), + ([b"this is a very large chunk of data", b""]), + ([b"h", b"e", b"l", b"l", b"o", b""]), ], ) def test_chunked_upload(chunks, max_chunk_size, read_until_end): @@ -255,7 +260,7 @@ def test_chunked_upload(chunks, max_chunk_size, read_until_end): offset += len(chunk) swift.complete_chunked_upload(uuid, "somepath", metadata) - assert swift.get_content("somepath") == "".join(chunks) + assert swift.get_content("somepath") == b"".join(chunks) # Ensure each of the segments exist. for segment in metadata["segments"]: @@ -278,7 +283,7 @@ def test_cancel_chunked_upload(): swift = FakeSwiftStorage(**args) uuid, metadata = swift.initiate_chunked_upload() - chunks = ["this", "is", "some", "chunked", "data", ""] + chunks = [b"this", b"is", b"some", b"chunked", b"data", b""] offset = 0 for chunk in chunks: bytes_written, metadata, error = swift.stream_upload_chunk( @@ -302,7 +307,7 @@ def test_empty_chunks_queued_for_deletion(): swift = FakeSwiftStorage(**args) uuid, metadata = swift.initiate_chunked_upload() - chunks = ["this", "", "is", "some", "", "chunked", "data", ""] + chunks = [b"this", b"", b"is", b"some", b"", b"chunked", b"data", b""] offset = 0 for chunk in chunks: length = len(chunk) @@ -317,7 +322,7 @@ def test_empty_chunks_queued_for_deletion(): offset += len(chunk) swift.complete_chunked_upload(uuid, "somepath", metadata) - assert "".join(chunks) == swift.get_content("somepath") + assert b"".join(chunks) == swift.get_content("somepath") # Check the chunk deletion queue and ensure we have the last chunk queued. found = chunk_cleanup_queue.get() @@ -332,5 +337,5 @@ def test_empty_chunks_queued_for_deletion(): ) def test_get_direct_download_url(temp_url_key, expects_url): swift = FakeSwiftStorage(temp_url_key=temp_url_key, **base_args) - swift.put_content("somepath", "hello world!") + swift.put_content("somepath", b"hello world!") assert (swift.get_direct_download_url("somepath") is not None) == expects_url diff --git a/test/clients/clients_test.py b/test/clients/clients_test.py index fa41329aff..c1bb711661 100644 --- a/test/clients/clients_test.py +++ b/test/clients/clients_test.py @@ -9,11 +9,11 @@ from termcolor import colored -from client import DockerClient, PodmanClient, Command, FileCopy +from .client import DockerClient, PodmanClient, Command, FileCopy def remove_control_characters(s): - return "".join(ch for ch in unicode(s, "utf-8") if unicodedata.category(ch)[0] != "C") + return "".join(ch for ch in str(s, "utf-8") if unicodedata.category(ch)[0] != "C") # These tuples are the box&version and the client to use. @@ -157,8 +157,8 @@ def _run_and_wait(command, error_allowed=False): ) if failed: - print colored(">>> Command `%s` Failed:" % command, "red") - print output + print(colored(">>> Command `%s` Failed:" % command, "red")) + print(output) raise CommandFailedException() return output @@ -176,7 +176,7 @@ def scp_to_vagrant(source, destination): config = _run_and_wait(["vagrant", "ssh-config"]) config_lines = config.split("\n") params = ["scp"] - for i in xrange(len(config_lines)): + for i in range(len(config_lines)): if "Host default" in config_lines[i]: config_i = i + 1 while config_i < len(config_lines): @@ -202,7 +202,7 @@ def _run_commands(commands): try: last_result = _run_and_wait(["vagrant", "scp", command.source, command.destination]) except CommandFailedException as e: - print colored(">>> Retry FileCopy command without vagrant scp...", "red") + print(colored(">>> Retry FileCopy command without vagrant scp...", "red")) # sometimes the vagrant scp fails because of invalid ssh configuration. last_result = scp_to_vagrant(command.source, command.destination) @@ -212,11 +212,11 @@ def _run_commands(commands): def _run_box(box, client, registry, ca_cert): vagrant, vagrant_scp = _check_vagrant() if not vagrant: - print ("vagrant command not found") + print("vagrant command not found") return if not vagrant_scp: - print ("vagrant-scp plugin not installed") + print("vagrant-scp plugin not installed") return namespace = "devtable" @@ -224,8 +224,8 @@ def _run_box(box, client, registry, ca_cert): username = "devtable" password = "password" - print colored(">>> Box: %s" % box, attrs=["bold"]) - print colored(">>> Starting box", "yellow") + print(colored(">>> Box: %s" % box, attrs=["bold"])) + print(colored(">>> Starting box", "yellow")) _run_and_wait(["vagrant", "destroy", "-f"], error_allowed=True) _run_and_wait(["rm", "Vagrantfile"], error_allowed=True) _run_and_wait(["vagrant", "init"] + box.split(" ")) @@ -234,44 +234,44 @@ def _run_box(box, client, registry, ca_cert): _run_commands(_init_system(box)) if ca_cert: - print colored(">>> Setting up runtime with cert " + ca_cert, "yellow") + print(colored(">>> Setting up runtime with cert " + ca_cert, "yellow")) _run_commands(_load_ca(box, ca_cert)) _run_commands(client.setup_client(registry, verify_tls=True)) else: - print colored(">>> Setting up runtime with insecure HTTP(S)", "yellow") + print(colored(">>> Setting up runtime with insecure HTTP(S)", "yellow")) _run_commands(client.setup_client(registry, verify_tls=False)) - print colored(">>> Client version", "cyan") + print(colored(">>> Client version", "cyan")) runtime_version = _run_commands(client.print_version()) - print _indent(runtime_version, 4) + print(_indent(runtime_version, 4)) - print colored(">>> Populating test image", "yellow") + print(colored(">>> Populating test image", "yellow")) _run_commands(client.populate_test_image(registry, namespace, repo_name)) - print colored(">>> Testing login", "cyan") + print(colored(">>> Testing login", "cyan")) _run_commands(client.login(registry, username, password)) - print colored(">>> Testing push", "cyan") + print(colored(">>> Testing push", "cyan")) _run_commands(client.push(registry, namespace, repo_name)) - print colored(">>> Removing all images", "yellow") + print(colored(">>> Removing all images", "yellow")) _run_commands(client.pre_pull_cleanup(registry, namespace, repo_name)) - print colored(">>> Testing pull", "cyan") + print(colored(">>> Testing pull", "cyan")) _run_commands(client.pull(registry, namespace, repo_name)) - print colored(">>> Verifying", "cyan") + print(colored(">>> Verifying", "cyan")) _run_commands(client.verify(registry, namespace, repo_name)) - print colored(">>> Tearing down box", "magenta") + print(colored(">>> Tearing down box", "magenta")) _run_and_wait(["vagrant", "destroy", "-f"], error_allowed=True) - print colored(">>> Successfully tested box %s" % box, "green") - print "" + print(colored(">>> Successfully tested box %s" % box, "green")) + print("") def test_clients(registry="10.0.2.2:5000", ca_cert=""): - print colored(">>> Running against registry ", attrs=["bold"]) + colored(registry, "cyan") + print(colored(">>> Running against registry ", attrs=["bold"]) + colored(registry, "cyan")) for box, client in BOXES: try: _run_box(box, client, registry, ca_cert) diff --git a/test/conftest.py b/test/conftest.py index 1680053e5e..649f2f88ef 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import pytest diff --git a/test/fixtures.py b/test/fixtures.py index d6b5c12c13..ff235f6d0f 100644 --- a/test/fixtures.py +++ b/test/fixtures.py @@ -20,8 +20,6 @@ from data import model from data.database import close_db_filter, db, configure from data.model.user import LoginWrappedDBUser, create_robot, lookup_robot, create_user_noverify -from data.model.repository import create_repository -from data.model.repo_mirror import enable_mirroring_for_repository from data.userfiles import Userfiles from endpoints.api import api_bp from endpoints.appr import appr_bp diff --git a/test/helpers.py b/test/helpers.py index 644c93fdf7..1578561e0b 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -6,7 +6,7 @@ from contextlib import contextmanager -from playhouse.test_utils import assert_query_count, _QueryLogHandler +from playhouse.test_utils import _QueryLogHandler from data.database import LogEntryKind, LogEntry3 diff --git a/test/registry/fixtures.py b/test/registry/fixtures.py index 55db1f8c86..2132c251b6 100644 --- a/test/registry/fixtures.py +++ b/test/registry/fixtures.py @@ -42,7 +42,7 @@ def generate_csrf(): def set_supports_direct_download(enabled): storage.put_content( - ["local_us"], "supports_direct_download", "true" if enabled else "false" + ["local_us"], "supports_direct_download", b"true" if enabled else b"false" ) return "OK" diff --git a/test/registry/liveserverfixture.py b/test/registry/liveserverfixture.py index f0fe57e280..c12c28e9d9 100644 --- a/test/registry/liveserverfixture.py +++ b/test/registry/liveserverfixture.py @@ -6,7 +6,7 @@ import time from contextlib import contextmanager -from urlparse import urlparse, urljoin +from urllib.parse import urlparse, urljoin import pytest import requests @@ -267,7 +267,7 @@ def _(**kwargs): arg_values = request.get_json()["args"] return fn(*arg_values) - for fn_name, fn in self.funcs.iteritems(): + for fn_name, fn in self.funcs.items(): build_invoker(fn_name, fn) app.register_blueprint(testbp, url_prefix="/__test") diff --git a/test/registry/protocol_fixtures.py b/test/registry/protocol_fixtures.py index b929e9c63d..cdc77fb56f 100644 --- a/test/registry/protocol_fixtures.py +++ b/test/registry/protocol_fixtures.py @@ -20,8 +20,8 @@ def basic_images(): Returns basic images for push and pull testing. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("parent contents") - image_bytes = layer_bytes_for_contents("some contents") + parent_bytes = layer_bytes_for_contents(b"parent contents") + image_bytes = layer_bytes_for_contents(b"some contents") return [ Image(id="parentid", bytes=parent_bytes, parent_id=None), Image(id="someid", bytes=image_bytes, parent_id="parentid"), @@ -34,15 +34,15 @@ def unicode_images(): Returns basic images for push and pull testing that contain unicode in the image metadata. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("parent contents") - image_bytes = layer_bytes_for_contents("some contents") + parent_bytes = layer_bytes_for_contents(b"parent contents") + image_bytes = layer_bytes_for_contents(b"some contents") return [ Image(id="parentid", bytes=parent_bytes, parent_id=None), Image( id="someid", bytes=image_bytes, parent_id="parentid", - config={"comment": u"the Pawe\xc5\x82 Kami\xc5\x84ski image", "author": u"Sômé guy"}, + config={"comment": "the Pawe\xc5\x82 Kami\xc5\x84ski image", "author": "Sômé guy"}, ), ] @@ -53,8 +53,8 @@ def different_images(): Returns different basic images for push and pull testing. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("different parent contents") - image_bytes = layer_bytes_for_contents("some different contents") + parent_bytes = layer_bytes_for_contents(b"different parent contents") + image_bytes = layer_bytes_for_contents(b"some different contents") return [ Image(id="anotherparentid", bytes=parent_bytes, parent_id=None), Image(id="anothersomeid", bytes=image_bytes, parent_id="anotherparentid"), @@ -67,8 +67,8 @@ def sized_images(): Returns basic images (with sizes) for push and pull testing. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("parent contents", mode="") - image_bytes = layer_bytes_for_contents("some contents", mode="") + parent_bytes = layer_bytes_for_contents(b"parent contents", mode="") + image_bytes = layer_bytes_for_contents(b"some contents", mode="") return [ Image( id="parentid", @@ -95,23 +95,25 @@ def multi_layer_images(): """ # Note: order is from base layer down to leaf. layer1_bytes = layer_bytes_for_contents( - "layer 1 contents", mode="", other_files={"file1": "from-layer-1",} + b"layer 1 contents", mode="", other_files={"file1": b"from-layer-1",} ) layer2_bytes = layer_bytes_for_contents( - "layer 2 contents", mode="", other_files={"file2": "from-layer-2",} + b"layer 2 contents", mode="", other_files={"file2": b"from-layer-2",} ) layer3_bytes = layer_bytes_for_contents( - "layer 3 contents", mode="", other_files={"file1": "from-layer-3", "file3": "from-layer-3",} + b"layer 3 contents", + mode="", + other_files={"file1": b"from-layer-3", "file3": b"from-layer-3",}, ) layer4_bytes = layer_bytes_for_contents( - "layer 4 contents", mode="", other_files={"file3": "from-layer-4",} + b"layer 4 contents", mode="", other_files={"file3": b"from-layer-4",} ) layer5_bytes = layer_bytes_for_contents( - "layer 5 contents", mode="", other_files={"file4": "from-layer-5",} + b"layer 5 contents", mode="", other_files={"file4": b"from-layer-5",} ) return [ @@ -159,9 +161,9 @@ def remote_images(): Returns images with at least one remote layer for push and pull testing. """ # Note: order is from base layer down to leaf. - remote_bytes = layer_bytes_for_contents("remote contents") - parent_bytes = layer_bytes_for_contents("parent contents") - image_bytes = layer_bytes_for_contents("some contents") + remote_bytes = layer_bytes_for_contents(b"remote contents") + parent_bytes = layer_bytes_for_contents(b"parent contents") + image_bytes = layer_bytes_for_contents(b"some contents") return [ Image(id="remoteid", bytes=remote_bytes, parent_id=None, urls=["http://some/url"]), Image(id="parentid", bytes=parent_bytes, parent_id="remoteid"), @@ -175,10 +177,10 @@ def images_with_empty_layer(): Returns images for push and pull testing that contain an empty layer. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("parent contents") - empty_bytes = layer_bytes_for_contents("", empty=True) - image_bytes = layer_bytes_for_contents("some contents") - middle_bytes = layer_bytes_for_contents("middle") + parent_bytes = layer_bytes_for_contents(b"parent contents") + empty_bytes = layer_bytes_for_contents(b"", empty=True) + image_bytes = layer_bytes_for_contents(b"some contents") + middle_bytes = layer_bytes_for_contents(b"middle") return [ Image(id="parentid", bytes=parent_bytes, parent_id=None), @@ -195,15 +197,15 @@ def unicode_emoji_images(): Returns basic images for push and pull testing that contain unicode in the image metadata. """ # Note: order is from base layer down to leaf. - parent_bytes = layer_bytes_for_contents("parent contents") - image_bytes = layer_bytes_for_contents("some contents") + parent_bytes = layer_bytes_for_contents(b"parent contents") + image_bytes = layer_bytes_for_contents(b"some contents") return [ Image(id="parentid", bytes=parent_bytes, parent_id=None), Image( id="someid", bytes=image_bytes, parent_id="parentid", - config={"comment": u"😱", "author": u"Sômé guy"}, + config={"comment": "😱", "author": "Sômé guy"}, ), ] @@ -291,4 +293,4 @@ def loginer(request, jwk): def random_layer_data(): size = 4096 contents = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size)) - return layer_bytes_for_contents(contents) + return layer_bytes_for_contents(contents.encode("ascii")) diff --git a/test/registry/protocol_v1.py b/test/registry/protocol_v1.py index 767d9ac1e8..5f8d1ca6b0 100644 --- a/test/registry/protocol_v1.py +++ b/test/registry/protocol_v1.py @@ -1,6 +1,6 @@ import json -from cStringIO import StringIO +from io import BytesIO from enum import Enum, unique from digest.checksums import compute_simple, compute_tarsum @@ -238,6 +238,7 @@ def push( image_json_data["created"] = image.created image_json = json.dumps(image_json_data) + response = self.conduct( session, "PUT", @@ -250,7 +251,7 @@ def push( return # PUT /v1/images/{imageID}/checksum (old style) - old_checksum = compute_tarsum(StringIO(image.bytes), image_json) + old_checksum = compute_tarsum(BytesIO(image.bytes), image_json) checksum_headers = {"X-Docker-Checksum": old_checksum} checksum_headers.update(headers) @@ -263,12 +264,12 @@ def push( session, "PUT", "/v1/images/%s/layer" % image.id, - data=StringIO(image.bytes), + data=BytesIO(image.bytes), headers=headers, ) # PUT /v1/images/{imageID}/checksum (new style) - checksum = compute_simple(StringIO(image.bytes), image_json) + checksum = compute_simple(BytesIO(image.bytes), image_json) checksum_headers = {"X-Docker-Checksum-Payload": checksum} checksum_headers.update(headers) diff --git a/test/registry/protocol_v2.py b/test/registry/protocol_v2.py index 9b1ca2c93b..ee2b1873c9 100644 --- a/test/registry/protocol_v2.py +++ b/test/registry/protocol_v2.py @@ -128,9 +128,9 @@ def login(self, session, username, password, scopes, expect_success): response = session.get("/v2/auth", params=params, auth=auth) if expect_success: - assert response.status_code / 100 == 2 + assert response.status_code // 100 == 2 else: - assert response.status_code / 100 == 4 + assert response.status_code // 100 == 4 return response @@ -340,7 +340,7 @@ def build_oci(self, images, blobs, options): # If invalid blob references were requested, just make it up. if options.manifest_invalid_blob_references: - checksum = "sha256:" + hashlib.sha256("notarealthing").hexdigest() + checksum = "sha256:" + hashlib.sha256(b"notarealthing").hexdigest() if not image.is_empty: builder.add_layer(checksum, len(image.bytes), urls=image.urls) @@ -387,7 +387,7 @@ def build_schema2(self, images, blobs, options): # If invalid blob references were requested, just make it up. if options.manifest_invalid_blob_references: - checksum = "sha256:" + hashlib.sha256("notarealthing").hexdigest() + checksum = "sha256:" + hashlib.sha256(b"notarealthing").hexdigest() if not image.is_empty: builder.add_layer(checksum, len(image.bytes), urls=image.urls) @@ -444,7 +444,7 @@ def build_schema1(self, namespace, repo_name, tag_name, images, blobs, options, # If invalid blob references were requested, just make it up. if options.manifest_invalid_blob_references: - checksum = "sha256:" + hashlib.sha256("notarealthing").hexdigest() + checksum = "sha256:" + hashlib.sha256(b"notarealthing").hexdigest() layer_dict = {"id": image.id, "parent": image.parent_id} if image.config is not None: @@ -552,7 +552,7 @@ def push( return PushResult(manifests=manifests, headers=headers) def _push_blobs(self, blobs, session, namespace, repo_name, headers, options, expected_failure): - for blob_digest, blob_bytes in blobs.iteritems(): + for blob_digest, blob_bytes in blobs.items(): if not options.skip_head_checks: # Blob data should not yet exist. self.conduct( diff --git a/test/registry/protocols.py b/test/registry/protocols.py index 8200f4720c..aaea885444 100644 --- a/test/registry/protocols.py +++ b/test/registry/protocols.py @@ -3,7 +3,7 @@ from abc import ABCMeta, abstractmethod from collections import namedtuple -from cStringIO import StringIO +from io import BytesIO from enum import Enum, unique from six import add_metaclass @@ -22,7 +22,7 @@ def layer_bytes_for_contents(contents, mode="|gz", other_files=None, empty=False if empty: return EMPTY_LAYER_BYTES - layer_data = StringIO() + layer_data = BytesIO() tar_file = tarfile.open(fileobj=layer_data, mode="w" + mode) def add_file(name, contents): @@ -31,12 +31,12 @@ def add_file(name, contents): tar_file_info.size = len(contents) tar_file_info.mtime = 1 - tar_file.addfile(tar_file_info, StringIO(contents)) + tar_file.addfile(tar_file_info, BytesIO(contents)) add_file("contents", contents) if other_files is not None: - for file_name, file_contents in other_files.iteritems(): + for file_name, file_contents in other_files.items(): add_file(file_name, file_contents) tar_file.close() diff --git a/test/registry/registry_tests.py b/test/registry/registry_tests.py index 09451ae724..681c807d7f 100644 --- a/test/registry/registry_tests.py +++ b/test/registry/registry_tests.py @@ -2,11 +2,11 @@ import hashlib import tarfile -from cStringIO import StringIO +from io import BytesIO import binascii import bencode -import resumablehashlib +import rehash from werkzeug.datastructures import Accept @@ -189,7 +189,7 @@ def test_basic_push_pull_by_manifest( options = ProtocolOptions() options.require_matching_manifest_type = True - digests = [str(manifest.digest) for manifest in result.manifests.values()] + digests = [str(manifest.digest) for manifest in list(result.manifests.values())] manifest_protocol.pull( liveserver_session, "devtable", @@ -225,7 +225,7 @@ def test_basic_push_by_manifest_digest( expected_failure = None if manifest_protocol.schema == "schema1" else Failures.UNKNOWN_TAG # Pull the repository by digests to verify. - digests = [str(manifest.digest) for manifest in result.manifests.values()] + digests = [str(manifest.digest) for manifest in list(result.manifests.values())] manifest_protocol.pull( liveserver_session, "devtable", @@ -367,18 +367,18 @@ def test_middle_layer_different_sha(v2_protocol, v1_protocol, liveserver_session """ credentials = ("devtable", "password") first_images = [ - Image(id="baseimage", parent_id=None, size=None, bytes=layer_bytes_for_contents("base")), + Image(id="baseimage", parent_id=None, size=None, bytes=layer_bytes_for_contents(b"base")), Image( id="middleimage", parent_id="baseimage", size=None, - bytes=layer_bytes_for_contents("middle"), + bytes=layer_bytes_for_contents(b"middle"), ), Image( id="leafimage", parent_id="middleimage", size=None, - bytes=layer_bytes_for_contents("leaf"), + bytes=layer_bytes_for_contents(b"leaf"), ), ] @@ -399,7 +399,7 @@ def test_middle_layer_different_sha(v2_protocol, v1_protocol, liveserver_session id="middleimage", parent_id="baseimage", size=None, - bytes=layer_bytes_for_contents("different middle bytes"), + bytes=layer_bytes_for_contents(b"different middle bytes"), ) # Push and pull the image, ensuring that the produced ID for the middle and leaf layers @@ -481,7 +481,7 @@ def test_push_pull_logging( id="startimage", parent_id=None, size=None, - bytes=layer_bytes_for_contents("start image"), + bytes=layer_bytes_for_contents(b"start image"), ) ] pusher.push( @@ -836,7 +836,7 @@ def test_image_replication( ) # Ensure that entries were created for each image. - for image_id in result.image_ids.values(): + for image_id in list(result.image_ids.values()): r = registry_server_executor.on(liveserver).get_storage_replication_entry(image_id) assert r.text == "OK" @@ -873,7 +873,7 @@ def test_image_replication_empty_layers( ) # Ensure that entries were created for each image. - for image_id in result.image_ids.values(): + for image_id in list(result.image_ids.values()): r = registry_server_executor.on(liveserver).get_storage_replication_entry(image_id) assert r.text == "OK" @@ -951,7 +951,7 @@ def test_invalid_parent(legacy_pusher, liveserver_session, app_reloader): id="childimage", parent_id="parentimage", size=None, - bytes=layer_bytes_for_contents("child"), + bytes=layer_bytes_for_contents(b"child"), ), ] @@ -975,10 +975,10 @@ def test_wrong_image_order(legacy_pusher, liveserver_session, app_reloader): id="childimage", parent_id="parentimage", size=None, - bytes=layer_bytes_for_contents("child"), + bytes=layer_bytes_for_contents(b"child"), ), Image( - id="parentimage", parent_id=None, size=None, bytes=layer_bytes_for_contents("parent") + id="parentimage", parent_id=None, size=None, bytes=layer_bytes_for_contents(b"parent") ), ] @@ -1018,7 +1018,7 @@ def test_labels(labels, manifest_protocol, liveserver_session, api_caller, app_r Image( id="theimage", parent_id=None, - bytes=layer_bytes_for_contents("image"), + bytes=layer_bytes_for_contents(b"image"), config={"Labels": {key: value for (key, value, _) in labels}}, ), ] @@ -1064,7 +1064,7 @@ def test_expiration_label( Image( id="theimage", parent_id=None, - bytes=layer_bytes_for_contents("image"), + bytes=layer_bytes_for_contents(b"image"), config={"Labels": {"quay.expires-after": label_value}}, ), ] @@ -1685,7 +1685,7 @@ def test_multilayer_squashed_images( response = liveserver_session.get("/c1/squash/devtable/newrepo/latest", auth=credentials) assert response.status_code == 200 - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) # Verify the squashed image. expected_image_id = next( @@ -1714,11 +1714,11 @@ def test_multilayer_squashed_images( assert set(tar.getnames()) == {"contents", "file1", "file2", "file3", "file4"} # Check the contents of various files. - assert tar.extractfile("contents").read() == "layer 5 contents" - assert tar.extractfile("file1").read() == "from-layer-3" - assert tar.extractfile("file2").read() == "from-layer-2" - assert tar.extractfile("file3").read() == "from-layer-4" - assert tar.extractfile("file4").read() == "from-layer-5" + assert tar.extractfile("contents").read() == b"layer 5 contents" + assert tar.extractfile("file1").read() == b"from-layer-3" + assert tar.extractfile("file2").read() == b"from-layer-2" + assert tar.extractfile("file3").read() == b"from-layer-4" + assert tar.extractfile("file4").read() == b"from-layer-5" @pytest.mark.parametrize("use_estimates", [False, True,]) @@ -1756,7 +1756,7 @@ def test_squashed_images( response = liveserver_session.get("/c1/squash/devtable/newrepo/latest", auth=credentials) assert response.status_code == 200 - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) # Verify the squashed image. expected_image_id = next( @@ -1787,7 +1787,7 @@ def test_squashed_images( assert tar.getnames() == ["contents"] # Check the contents. - assert tar.extractfile("contents").read() == "some contents" + assert tar.extractfile("contents").read() == b"some contents" EXPECTED_ACI_MANIFEST = { @@ -1798,7 +1798,7 @@ def test_squashed_images( "group": "root", "user": "root", "workingDirectory": "/", - "exec": [u"/bin/sh", u"-c", u'""hello""'], + "exec": ["/bin/sh", "-c", '""hello""'], "isolators": [], "eventHandlers": [], "ports": [], @@ -1847,10 +1847,10 @@ def test_aci_conversion( "/c1/aci/server_name/devtable/newrepo/latest/aci/linux/amd64", auth=credentials ) assert response.status_code == 200 - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) assert set(tar.getnames()) == {"manifest", "rootfs", "rootfs/contents"} - assert tar.extractfile("rootfs/contents").read() == "some contents" + assert tar.extractfile("rootfs/contents").read() == b"some contents" loaded = json.loads(tar.extractfile("manifest").read()) for annotation in loaded["app"]["annotations"]: if annotation["name"] == "quay.io/derived-image": @@ -1920,10 +1920,10 @@ def test_aci_conversion_manifest_list( "/c1/aci/server_name/devtable/newrepo/latest/aci/linux/amd64", auth=credentials ) assert response.status_code == 200 - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) assert set(tar.getnames()) == {"manifest", "rootfs", "rootfs/contents"} - assert tar.extractfile("rootfs/contents").read() == "some contents" + assert tar.extractfile("rootfs/contents").read() == b"some contents" loaded = json.loads(tar.extractfile("manifest").read()) for annotation in loaded["app"]["annotations"]: @@ -2291,7 +2291,7 @@ def test_push_pull_same_blobs(pusher, puller, liveserver_session, app_reloader): """ Test: Push and pull of an image to a new repository where a blob is shared between layers. """ credentials = ("devtable", "password") - layer_bytes = layer_bytes_for_contents("some contents") + layer_bytes = layer_bytes_for_contents(b"some contents") images = [ Image(id="parentid", bytes=layer_bytes, parent_id=None), Image(id="someid", bytes=layer_bytes, parent_id="parentid"), @@ -2680,7 +2680,7 @@ def test_squashed_images_empty_layer( response = liveserver_session.get("/c1/squash/devtable/newrepo/latest", auth=credentials) assert response.status_code == 200 - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) # Verify the squashed image. expected_image_id = next( @@ -2765,7 +2765,7 @@ def test_squashed_image_manifest_list( assert response.status_code == 200 # Verify the squashed image. - tar = tarfile.open(fileobj=StringIO(response.content)) + tar = tarfile.open(fileobj=BytesIO(response.content)) expected_image_id = next( (name for name in tar.getnames() if not "/" in name and name != "repositories") ) @@ -3108,7 +3108,7 @@ def test_attempt_pull_by_manifest_digest_for_deleted_tag( result = manifest_protocol.push( liveserver_session, "devtable", "newrepo", "latest", basic_images, credentials=credentials ) - digests = [str(manifest.digest) for manifest in result.manifests.values()] + digests = [str(manifest.digest) for manifest in list(result.manifests.values())] assert len(digests) == 1 # Ensure we can pull by tag. diff --git a/test/specs.py b/test/specs.py index 067eb9108e..626322d69c 100644 --- a/test/specs.py +++ b/test/specs.py @@ -26,7 +26,7 @@ ORG_READERS = "readers" FAKE_MANIFEST = "unknown_tag" -FAKE_DIGEST = "sha256:" + hashlib.sha256("fake").hexdigest() +FAKE_DIGEST = "sha256:" + hashlib.sha256(b"fake").hexdigest() FAKE_IMAGE_ID = "fake-image" FAKE_UPLOAD_ID = "fake-upload" FAKE_TAG_NAME = "fake-tag" @@ -108,8 +108,8 @@ def __init__( self.admin_code = admin_code def gen_basic_auth(self, username, password): - encoded = b64encode("%s:%s" % (username, password)) - return "basic %s" % encoded + encoded = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii"))) + return "basic %s" % encoded.decode("ascii") def set_data_from_obj(self, json_serializable): self._data = json.dumps(json_serializable) @@ -597,8 +597,8 @@ def get_url(self): return url_for(self.index_name, repository=self.repo_name, **self.kwargs) def gen_basic_auth(self, username, password): - encoded = b64encode("%s:%s" % (username, password)) - return "basic %s" % encoded + encoded = b64encode(b"%s:%s" % (username.encode("ascii"), password.encode("ascii"))) + return "basic %s" % encoded.decode("ascii") def get_scope_string(self): return "repository:%s:%s" % (self.repo_name, self.scope) diff --git a/test/test_api_usage.py b/test/test_api_usage.py index b18147aca5..c18b13c5a2 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -11,8 +11,8 @@ from calendar import timegm from contextlib import contextmanager from httmock import urlmatch, HTTMock, all_requests -from urllib import urlencode -from urlparse import urlparse, urlunparse, parse_qs +from urllib.parse import urlencode +from urllib.parse import urlparse, urlunparse, parse_qs from playhouse.test_utils import assert_query_count, _QueryLogHandler from cryptography.hazmat.primitives import serialization @@ -37,7 +37,7 @@ from data.database import RepositoryActionCount, Repository as RepositoryTable from data.logs_model import logs_model from data.registry_model import registry_model -from test.helpers import assert_action_logged, log_queries, check_transitive_modifications +from test.helpers import assert_action_logged, check_transitive_modifications from util.secscan.fake import fake_security_scanner from endpoints.api.team import ( @@ -197,7 +197,7 @@ def __init__(self, changes=None): self._to_rm = [] def __enter__(self): - for key in self._changes.keys(): + for key in list(self._changes.keys()): try: self._originals[key] = app.config[key] except KeyError: @@ -205,7 +205,7 @@ def __enter__(self): app.config[key] = self._changes[key] def __exit__(self, type, value, traceback): - for key in self._originals.keys(): + for key in list(self._originals.keys()): app.config[key] = self._originals[key] for key in self._to_rm: @@ -261,7 +261,7 @@ def toggleFeature(self, name, enabled): def getJsonResponse(self, resource_name, params={}, expected_code=200): rv = self.app.get(api.url_for(resource_name, **params)) - self.assertEquals(expected_code, rv.status_code) + self.assertEqual(expected_code, rv.status_code) data = rv.data parsed = py_json.loads(data) return parsed @@ -279,12 +279,12 @@ def postResponse( headers = None rv = self.app.post(self.url_for(resource_name, params), data=data, headers=headers) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def getResponse(self, resource_name, params={}, expected_code=200): rv = self.app.get(api.url_for(resource_name, **params)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def putResponse(self, resource_name, params={}, data={}, expected_code=200): @@ -293,22 +293,22 @@ def putResponse(self, resource_name, params={}, data={}, expected_code=200): data=py_json.dumps(data), headers={"Content-Type": "application/json"}, ) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteResponse(self, resource_name, params={}, expected_code=204): rv = self.app.delete(self.url_for(resource_name, params)) if rv.status_code != expected_code: - print "Mismatch data for resource DELETE %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource DELETE %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteEmptyResponse(self, resource_name, params={}, expected_code=204): rv = self.app.delete(self.url_for(resource_name, params)) - self.assertEquals(rv.status_code, expected_code) - self.assertEquals(rv.data, "") # ensure response body empty + self.assertEqual(rv.status_code, expected_code) + self.assertEqual(rv.data, b"") # ensure response body empty return def postJsonResponse(self, resource_name, params={}, data={}, expected_code=200): @@ -319,11 +319,11 @@ def postJsonResponse(self, resource_name, params={}, data={}, expected_code=200) ) if rv.status_code != expected_code: - print "Mismatch data for resource POST %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource POST %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) data = rv.data - parsed = py_json.loads(data) + parsed = py_json.loads(data.decode("utf-8")) return parsed def putJsonResponse( @@ -342,11 +342,11 @@ def putJsonResponse( ) if rv.status_code != expected_code: - print "Mismatch data for resource PUT %s: %s" % (resource_name, rv.data) + print("Mismatch data for resource PUT %s: %s" % (resource_name, rv.data)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) data = rv.data - parsed = py_json.loads(data) + parsed = py_json.loads(data.decode("utf-8")) return parsed def assertNotInTeam(self, data, membername): @@ -497,7 +497,7 @@ def test_get(self): # Make sure each notification can be retrieved. for notification in json["notifications"]: njson = self.getJsonResponse(UserNotification, params=dict(uuid=notification["id"])) - self.assertEquals(notification["id"], njson["id"]) + self.assertEqual(notification["id"], njson["id"]) # Update a notification. assert json["notifications"] @@ -508,7 +508,7 @@ def test_get(self): UserNotification, params=dict(uuid=notification["id"]), data=dict(dismissed=True) ) - self.assertEquals(True, pjson["dismissed"]) + self.assertEqual(True, pjson["dismissed"]) def test_org_notifications(self): # Create a notification on the organization. @@ -520,13 +520,13 @@ def test_org_notifications(self): json = self.getJsonResponse(UserNotificationList) notification = json["notifications"][0] - self.assertEquals(notification["kind"], "test_notification") - self.assertEquals(notification["metadata"], {"org": "notification"}) + self.assertEqual(notification["kind"], "test_notification") + self.assertEqual(notification["metadata"], {"org": "notification"}) # Ensure it is not visible to an org member. self.login(READ_ACCESS_USER) json = self.getJsonResponse(UserNotificationList) - self.assertEquals(0, len(json["notifications"])) + self.assertEqual(0, len(json["notifications"])) class TestGetUserPrivateAllowed(ApiTestCase): @@ -620,8 +620,8 @@ def test_convert(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(Organization, params=dict(orgname=READ_ACCESS_USER)) - self.assertEquals(READ_ACCESS_USER, json["name"]) - self.assertEquals(True, json["is_admin"]) + self.assertEqual(READ_ACCESS_USER, json["name"]) + self.assertEqual(True, json["is_admin"]) # Verify the now-org has no permissions. count = ( @@ -629,7 +629,7 @@ def test_convert(self): .where(database.RepositoryPermission.user == organization) .count() ) - self.assertEquals(0, count) + self.assertEqual(0, count) def test_convert_via_email(self): self.login(READ_ACCESS_USER) @@ -648,8 +648,8 @@ def test_convert_via_email(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(Organization, params=dict(orgname=READ_ACCESS_USER)) - self.assertEquals(READ_ACCESS_USER, json["name"]) - self.assertEquals(True, json["is_admin"]) + self.assertEqual(READ_ACCESS_USER, json["name"]) + self.assertEqual(True, json["is_admin"]) class TestChangeUserDetails(ApiTestCase): @@ -660,8 +660,8 @@ def test_changepassword(self): def test_changepassword_unicode(self): self.login(READ_ACCESS_USER) - self.putJsonResponse(User, data=dict(password=u"someunicode北京市pass")) - self.login(READ_ACCESS_USER, password=u"someunicode北京市pass") + self.putJsonResponse(User, data=dict(password="someunicode北京市pass")) + self.login(READ_ACCESS_USER, password="someunicode北京市pass") def test_changeeemail(self): self.login(READ_ACCESS_USER) @@ -672,10 +672,10 @@ def test_changeinvoiceemail(self): self.login(READ_ACCESS_USER) json = self.putJsonResponse(User, data=dict(invoice_email=True)) - self.assertEquals(True, json["invoice_email"]) + self.assertEqual(True, json["invoice_email"]) json = self.putJsonResponse(User, data=dict(invoice_email=False)) - self.assertEquals(False, json["invoice_email"]) + self.assertEqual(False, json["invoice_email"]) def test_changeusername_temp(self): self.login(READ_ACCESS_USER) @@ -690,7 +690,7 @@ def test_changeusername_temp(self): json = self.putJsonResponse(User, data=dict(username="someotherusername")) # Ensure the username was changed. - self.assertEquals("someotherusername", json["username"]) + self.assertEqual("someotherusername", json["username"]) self.assertFalse(model.user.has_user_prompt(user, "confirm_username")) # Ensure the robot was changed. @@ -706,7 +706,7 @@ def test_changeusername_temp_samename(self): json = self.putJsonResponse(User, data=dict(username=READ_ACCESS_USER)) # Ensure the username was not changed but they are no longer temporarily named. - self.assertEquals(READ_ACCESS_USER, json["username"]) + self.assertEqual(READ_ACCESS_USER, json["username"]) self.assertFalse(model.user.has_user_prompt(user, "confirm_username")) def test_changeusername_notallowed(self): @@ -716,7 +716,7 @@ def test_changeusername_notallowed(self): self.assertFalse(model.user.has_user_prompt(user, "confirm_username")) json = self.putJsonResponse(User, data=dict(username="someotherusername")) - self.assertEquals(ADMIN_ACCESS_USER, json["username"]) + self.assertEqual(ADMIN_ACCESS_USER, json["username"]) self.assertTrue("prompts" in json) self.assertIsNone(model.user.get_user("someotherusername")) @@ -729,7 +729,7 @@ def test_changeusername_allowed(self): self.assertFalse(model.user.has_user_prompt(user, "confirm_username")) json = self.putJsonResponse(User, data=dict(username="someotherusername")) - self.assertEquals("someotherusername", json["username"]) + self.assertEqual("someotherusername", json["username"]) self.assertTrue("prompts" in json) self.assertIsNotNone(model.user.get_user("someotherusername")) @@ -756,7 +756,7 @@ def test_existingusername(self): expected_code=400, ) - self.assertEquals("The username already exists", json["detail"]) + self.assertEqual("The username already exists", json["detail"]) def test_trycreatetooshort(self): json = self.postJsonResponse( @@ -765,7 +765,7 @@ def test_trycreatetooshort(self): expected_code=400, ) - self.assertEquals( + self.assertEqual( "Invalid namespace a: Namespace must be between 2 and 255 characters in length", json["detail"], ) @@ -777,14 +777,14 @@ def test_trycreateregexmismatch(self): expected_code=400, ) - self.assertEquals( + self.assertEqual( "Invalid namespace auserName: Namespace must match expression ^([a-z0-9]+(?:[._-][a-z0-9]+)*)$", json["detail"], ) def test_createuser(self): data = self.postJsonResponse(User, data=NEW_USER_DETAILS, expected_code=200) - self.assertEquals(True, data["awaiting_verification"]) + self.assertEqual(True, data["awaiting_verification"]) def test_createuser_captcha(self): @urlmatch(netloc=r"(.*\.)?google.com", path="/recaptcha/api/siteverify") @@ -996,7 +996,7 @@ class TestSignin(ApiTestCase): def test_signin_unicode(self): self.postResponse( Signin, - data=dict(username=u"\xe5\x8c\x97\xe4\xba\xac\xe5\xb8\x82", password="password"), + data=dict(username="\xe5\x8c\x97\xe4\xba\xac\xe5\xb8\x82", password="password"), expected_code=403, ) @@ -1043,7 +1043,7 @@ def test_signout(self): # Make sure the user's UUID has rotated, to ensure sessions are no longer valid. read_user_again = model.user.get_user(READ_ACCESS_USER) - self.assertNotEquals(read_user.uuid, read_user_again.uuid) + self.assertNotEqual(read_user.uuid, read_user_again.uuid) class TestConductSearch(ApiTestCase): @@ -1052,80 +1052,80 @@ def test_noaccess(self): json = self.getJsonResponse(ConductSearch, params=dict(query="read")) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) json = self.getJsonResponse(ConductSearch, params=dict(query="owners")) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) def test_nouser(self): json = self.getJsonResponse(ConductSearch, params=dict(query="read")) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) json = self.getJsonResponse(ConductSearch, params=dict(query="public")) - self.assertEquals(2, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "repository") - self.assertEquals(json["results"][0]["name"], "publicrepo") + self.assertEqual(2, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "repository") + self.assertEqual(json["results"][0]["name"], "publicrepo") - self.assertEquals(json["results"][1]["kind"], "user") - self.assertEquals(json["results"][1]["name"], "public") + self.assertEqual(json["results"][1]["kind"], "user") + self.assertEqual(json["results"][1]["name"], "public") json = self.getJsonResponse(ConductSearch, params=dict(query="owners")) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) def test_orgmember(self): self.login(READ_ACCESS_USER) json = self.getJsonResponse(ConductSearch, params=dict(query="owners")) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) json = self.getJsonResponse(ConductSearch, params=dict(query="readers")) - self.assertEquals(1, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "team") - self.assertEquals(json["results"][0]["name"], "readers") + self.assertEqual(1, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "team") + self.assertEqual(json["results"][0]["name"], "readers") def test_orgadmin(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(ConductSearch, params=dict(query="owners")) - self.assertEquals(4, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "team") - self.assertEquals(json["results"][0]["name"], "owners") + self.assertEqual(4, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "team") + self.assertEqual(json["results"][0]["name"], "owners") json = self.getJsonResponse(ConductSearch, params=dict(query="readers")) - self.assertEquals(1, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "team") - self.assertEquals(json["results"][0]["name"], "readers") + self.assertEqual(1, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "team") + self.assertEqual(json["results"][0]["name"], "readers") def test_explicit_permission(self): self.login("reader") json = self.getJsonResponse(ConductSearch, params=dict(query="shared")) - self.assertEquals(1, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "repository") - self.assertEquals(json["results"][0]["name"], "shared") + self.assertEqual(1, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "repository") + self.assertEqual(json["results"][0]["name"], "shared") def test_full_text(self): self.login(ADMIN_ACCESS_USER) # Make sure the repository is found via `full` and `text search`. json = self.getJsonResponse(ConductSearch, params=dict(query="full")) - self.assertEquals(1, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "repository") - self.assertEquals(json["results"][0]["name"], "text-full-repo") + self.assertEqual(1, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "repository") + self.assertEqual(json["results"][0]["name"], "text-full-repo") json = self.getJsonResponse(ConductSearch, params=dict(query="text search")) - self.assertEquals(1, len(json["results"])) - self.assertEquals(json["results"][0]["kind"], "repository") - self.assertEquals(json["results"][0]["name"], "text-full-repo") + self.assertEqual(1, len(json["results"])) + self.assertEqual(json["results"][0]["kind"], "repository") + self.assertEqual(json["results"][0]["name"], "text-full-repo") class TestGetMatchingEntities(ApiTestCase): @@ -1136,13 +1136,13 @@ def test_simple_lookup(self): EntitySearch, params=dict(prefix=ADMIN_ACCESS_USER, namespace=ORGANIZATION, includeTeams="true"), ) - self.assertEquals(1, len(json["results"])) + self.assertEqual(1, len(json["results"])) def test_simple_lookup_noorg(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(EntitySearch, params=dict(prefix=ADMIN_ACCESS_USER)) - self.assertEquals(1, len(json["results"])) + self.assertEqual(1, len(json["results"])) def test_unicode_search(self): self.login(ADMIN_ACCESS_USER) @@ -1150,7 +1150,7 @@ def test_unicode_search(self): json = self.getJsonResponse( EntitySearch, params=dict(prefix="北京市", namespace=ORGANIZATION, includeTeams="true") ) - self.assertEquals(0, len(json["results"])) + self.assertEqual(0, len(json["results"])) def test_notinorg(self): self.login(NO_ACCESS_USER) @@ -1216,7 +1216,7 @@ def test_existinguser(self): expected_code=400, ) - self.assertEquals("A user or organization with this name already exists", json["detail"]) + self.assertEqual("A user or organization with this name already exists", json["detail"]) def test_existingorg(self): self.login(ADMIN_ACCESS_USER) @@ -1227,7 +1227,7 @@ def test_existingorg(self): expected_code=400, ) - self.assertEquals("A user or organization with this name already exists", json["detail"]) + self.assertEqual("A user or organization with this name already exists", json["detail"]) def test_createorg(self): self.login(ADMIN_ACCESS_USER) @@ -1238,7 +1238,7 @@ def test_createorg(self): expected_code=201, ) - self.assertEquals('"Created"', data.strip()) + self.assertEqual(b'"Created"', data.strip()) # Ensure the org was created. organization = model.organization.get_organization("neworg") @@ -1246,8 +1246,8 @@ def test_createorg(self): # Verify the admin user is the org's admin. json = self.getJsonResponse(Organization, params=dict(orgname="neworg")) - self.assertEquals("neworg", json["name"]) - self.assertEquals(True, json["is_admin"]) + self.assertEqual("neworg", json["name"]) + self.assertEqual(True, json["is_admin"]) def test_createorg_viaoauth(self): # Attempt with no auth. @@ -1278,7 +1278,7 @@ def test_createorg_viaoauth(self): expected_code=201, ) - self.assertEquals('"Created"', data.strip()) + self.assertEqual(b'"Created"', data.strip()) class TestGetOrganization(ApiTestCase): @@ -1294,15 +1294,15 @@ def test_getorganization(self): self.login(READ_ACCESS_USER) json = self.getJsonResponse(Organization, params=dict(orgname=ORGANIZATION)) - self.assertEquals(ORGANIZATION, json["name"]) - self.assertEquals(False, json["is_admin"]) + self.assertEqual(ORGANIZATION, json["name"]) + self.assertEqual(False, json["is_admin"]) def test_getorganization_asadmin(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(Organization, params=dict(orgname=ORGANIZATION)) - self.assertEquals(ORGANIZATION, json["name"]) - self.assertEquals(True, json["is_admin"]) + self.assertEqual(ORGANIZATION, json["name"]) + self.assertEqual(True, json["is_admin"]) class TestChangeOrganizationDetails(ApiTestCase): @@ -1313,12 +1313,12 @@ def test_changeinvoiceemail(self): Organization, params=dict(orgname=ORGANIZATION), data=dict(invoice_email=True) ) - self.assertEquals(True, json["invoice_email"]) + self.assertEqual(True, json["invoice_email"]) json = self.putJsonResponse( Organization, params=dict(orgname=ORGANIZATION), data=dict(invoice_email=False) ) - self.assertEquals(False, json["invoice_email"]) + self.assertEqual(False, json["invoice_email"]) def test_changemail(self): self.login(ADMIN_ACCESS_USER) @@ -1327,7 +1327,7 @@ def test_changemail(self): Organization, params=dict(orgname=ORGANIZATION), data=dict(email="newemail@example.com") ) - self.assertEquals("newemail@example.com", json["email"]) + self.assertEqual("newemail@example.com", json["email"]) class TestGetOrganizationPrototypes(ApiTestCase): @@ -1353,7 +1353,7 @@ def test_invaliduser(self): expected_code=400, ) - self.assertEquals("Unknown activating user", json["detail"]) + self.assertEqual("Unknown activating user", json["detail"]) def test_missingdelegate(self): self.login(ADMIN_ACCESS_USER) @@ -1374,7 +1374,7 @@ def test_createprototype(self): data=dict(role="read", delegate={"kind": "team", "name": "readers"}), ) - self.assertEquals("read", json["role"]) + self.assertEqual("read", json["role"]) pid = json["id"] # Verify the prototype exists. @@ -1423,7 +1423,7 @@ def test_updateprototype(self): data=dict(role="admin"), ) - self.assertEquals("admin", json["role"]) + self.assertEqual("admin", json["role"]) class TestGetOrganizationMembers(ApiTestCase): @@ -1443,7 +1443,7 @@ def test_getmembers(self): OrganizationMember, params=dict(orgname=ORGANIZATION, membername=membername) ) - self.assertEquals(member, response) + self.assertEqual(member, response) class TestRemoveOrganizationMember(ApiTestCase): @@ -1518,7 +1518,7 @@ def test_existingorg(self): json = self.getJsonResponse(OrgPrivateRepositories, params=dict(orgname=ORGANIZATION)) - self.assertEquals(True, json["privateAllowed"]) + self.assertEqual(True, json["privateAllowed"]) assert not "reposAllowed" in json def test_neworg(self): @@ -1530,7 +1530,7 @@ def test_neworg(self): json = self.getJsonResponse(OrgPrivateRepositories, params=dict(orgname="neworg")) - self.assertEquals(False, json["privateAllowed"]) + self.assertEqual(False, json["privateAllowed"]) class TestUpdateOrganizationTeam(ApiTestCase): @@ -1543,8 +1543,8 @@ def test_updateexisting(self): data=dict(description="My cool team", role="creator"), ) - self.assertEquals("My cool team", data["description"]) - self.assertEquals("creator", data["role"]) + self.assertEqual("My cool team", data["description"]) + self.assertEqual("creator", data["role"]) def test_attemptchangeroleonowners(self): self.login(ADMIN_ACCESS_USER) @@ -1565,8 +1565,8 @@ def test_createnewteam(self): data=dict(description="My cool team", role="member"), ) - self.assertEquals("My cool team", data["description"]) - self.assertEquals("member", data["role"]) + self.assertEqual("My cool team", data["description"]) + self.assertEqual("member", data["role"]) # Verify the team was created. json = self.getJsonResponse(Organization, params=dict(orgname=ORGANIZATION)) @@ -1598,7 +1598,7 @@ def test_attemptdeleteowners(self): "Deleting team 'owners' would remove admin ability for user " + "'devtable' in organization 'buynlarge'" ) - self.assertEquals(msg, data["message"]) + self.assertEqual(msg, data["message"]) class TestTeamPermissions(ApiTestCase): @@ -1609,7 +1609,7 @@ def test_team_permissions(self): TeamPermissions, params=dict(orgname=ORGANIZATION, teamname="readers") ) - self.assertEquals(1, len(resp["permissions"])) + self.assertEqual(1, len(resp["permissions"])) class TestGetOrganizationTeamMembers(ApiTestCase): @@ -1691,7 +1691,7 @@ def test_addmember_nonorgmember(self): TeamMember, params=dict(orgname=ORGANIZATION, teamname="owners", membername=membername) ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Make sure the user is not (yet) part of the team. json = self.getJsonResponse( @@ -1775,7 +1775,7 @@ def test_accept(self): TeamMember, params=dict(orgname=ORGANIZATION, teamname="owners", membername=membername) ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Login as the user. self.login(membername) @@ -1783,7 +1783,7 @@ def test_accept(self): # Accept the invite. user = model.user.get_user(membername) invites = list(model.team.lookup_team_invites(user)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.putJsonResponse(TeamMemberInvite, params=dict(code=invites[0].invite_token)) @@ -1809,14 +1809,14 @@ def test_accept_via_email(self): params=dict(orgname=ORGANIZATION, teamname="owners", email=member.email), ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Login as the user. self.login(member.username) # Accept the invite. invites = list(model.team.lookup_team_invites_by_email(member.email)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.putJsonResponse(TeamMemberInvite, params=dict(code=invites[0].invite_token)) @@ -1841,7 +1841,7 @@ def test_accept_invite_different_user(self): params=dict(orgname=ORGANIZATION, teamname="owners", membername=NO_ACCESS_USER), ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Login as a different user. self.login(PUBLIC_USER) @@ -1849,7 +1849,7 @@ def test_accept_invite_different_user(self): # Try to accept the invite. user = model.user.get_user(NO_ACCESS_USER) invites = list(model.team.lookup_team_invites(user)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.putResponse( TeamMemberInvite, params=dict(code=invites[0].invite_token), expected_code=400 @@ -1858,7 +1858,7 @@ def test_accept_invite_different_user(self): # Ensure the invite is still valid. user = model.user.get_user(NO_ACCESS_USER) invites = list(model.team.lookup_team_invites(user)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) # Ensure the user is *not* a member of the team. self.login(ADMIN_ACCESS_USER) @@ -1876,14 +1876,14 @@ def test_accept_invite_different_email(self): params=dict(orgname=ORGANIZATION, teamname="owners", email="someemail@example.com"), ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Login as a different user. self.login(PUBLIC_USER) # Try to accept the invite. invites = list(model.team.lookup_team_invites_by_email("someemail@example.com")) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.putResponse( TeamMemberInvite, params=dict(code=invites[0].invite_token), expected_code=400 @@ -1891,7 +1891,7 @@ def test_accept_invite_different_email(self): # Ensure the invite is still valid. invites = list(model.team.lookup_team_invites_by_email("someemail@example.com")) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) # Ensure the user is *not* a member of the team. self.login(ADMIN_ACCESS_USER) @@ -1911,12 +1911,12 @@ def test_decline_wronguser(self): TeamMember, params=dict(orgname=ORGANIZATION, teamname="owners", membername=membername) ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Try to decline the invite. user = model.user.get_user(membername) invites = list(model.team.lookup_team_invites(user)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.deleteResponse( TeamMemberInvite, params=dict(code=invites[0].invite_token), expected_code=400 @@ -1931,7 +1931,7 @@ def test_decline(self): TeamMember, params=dict(orgname=ORGANIZATION, teamname="owners", membername=membername) ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Login as the user. self.login(membername) @@ -1939,7 +1939,7 @@ def test_decline(self): # Decline the invite. user = model.user.get_user(membername) invites = list(model.team.lookup_team_invites(user)) - self.assertEquals(1, len(invites)) + self.assertEqual(1, len(invites)) self.deleteEmptyResponse(TeamMemberInvite, params=dict(code=invites[0].invite_token)) @@ -1959,14 +1959,14 @@ def test_deletememberinvite(self): params=dict(orgname=ORGANIZATION, teamname="readers", includePending=True), ) - self.assertEquals(len(json["members"]), 3) + self.assertEqual(len(json["members"]), 3) membername = NO_ACCESS_USER response = self.putJsonResponse( TeamMember, params=dict(orgname=ORGANIZATION, teamname="readers", membername=membername) ) - self.assertEquals(True, response["invited"]) + self.assertEqual(True, response["invited"]) # Verify the invite was added. json = self.getJsonResponse( @@ -1974,7 +1974,7 @@ def test_deletememberinvite(self): params=dict(orgname=ORGANIZATION, teamname="readers", includePending=True), ) - self.assertEquals(len(json["members"]), 4) + self.assertEqual(len(json["members"]), 4) # Delete the invite. self.deleteEmptyResponse( @@ -1987,7 +1987,7 @@ def test_deletememberinvite(self): params=dict(orgname=ORGANIZATION, teamname="readers", includePending=True), ) - self.assertEquals(len(json["members"]), 3) + self.assertEqual(len(json["members"]), 3) def test_deletemember(self): self.login(ADMIN_ACCESS_USER) @@ -2002,7 +2002,7 @@ def test_deletemember(self): TeamMemberList, params=dict(orgname=ORGANIZATION, teamname="readers") ) - self.assertEquals(len(json["members"]), 1) + self.assertEqual(len(json["members"]), 1) class TestCreateRepo(ApiTestCase): @@ -2015,7 +2015,7 @@ def test_invalidreponame(self): expected_code=400, ) - self.assertEquals("Invalid repository name", json["detail"]) + self.assertEqual("Invalid repository name", json["detail"]) def test_duplicaterepo(self): self.login(ADMIN_ACCESS_USER) @@ -2026,7 +2026,7 @@ def test_duplicaterepo(self): expected_code=400, ) - self.assertEquals("Repository already exists", json["detail"]) + self.assertEqual("Repository already exists", json["detail"]) def test_createrepo(self): self.login(ADMIN_ACCESS_USER) @@ -2037,8 +2037,8 @@ def test_createrepo(self): expected_code=201, ) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("newrepo", json["name"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("newrepo", json["name"]) def test_create_app_repo(self): self.login(ADMIN_ACCESS_USER) @@ -2051,9 +2051,9 @@ def test_create_app_repo(self): expected_code=201, ) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("newrepo", json["name"]) - self.assertEquals("application", json["kind"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("newrepo", json["name"]) + self.assertEqual("application", json["kind"]) def test_createrepo_underorg(self): self.login(ADMIN_ACCESS_USER) @@ -2066,8 +2066,8 @@ def test_createrepo_underorg(self): expected_code=201, ) - self.assertEquals(ORGANIZATION, json["namespace"]) - self.assertEquals("newrepo", json["name"]) + self.assertEqual(ORGANIZATION, json["namespace"]) + self.assertEqual("newrepo", json["name"]) class TestListRepos(ApiTestCase): @@ -2088,14 +2088,14 @@ def test_list_app_repos(self): params=dict(namespace=ADMIN_ACCESS_USER, public=False, repo_kind="application"), ) - self.assertEquals(1, len(json["repositories"])) - self.assertEquals("application", json["repositories"][0]["kind"]) + self.assertEqual(1, len(json["repositories"])) + self.assertEqual("application", json["repositories"][0]["kind"]) def test_listrepos_asguest(self): # Queries: Base + the list query with assert_query_count(BASE_QUERY_COUNT + 1): json = self.getJsonResponse(RepositoryList, params=dict(public=True)) - self.assertEquals(len(json["repositories"]), 1) + self.assertEqual(len(json["repositories"]), 1) def assertPublicRepos(self, has_extras=False): public_user = model.user.get_user("public") @@ -2127,7 +2127,7 @@ def assertPublicRepos(self, has_extras=False): self.assertTrue(has_extras, "Could not find name %s in repos created" % name) if "next_page" in json: - self.assertEquals(len(json["repositories"]), REPOS_PER_PAGE) + self.assertEqual(len(json["repositories"]), REPOS_PER_PAGE) else: break @@ -2149,7 +2149,7 @@ def test_listrepos_filter(self): self.assertGreater(len(json["repositories"]), 0) for repo in json["repositories"]: - self.assertEquals(ORGANIZATION, repo["namespace"]) + self.assertEqual(ORGANIZATION, repo["namespace"]) def test_listrepos_allparams(self): # Add a repository action count entry for one of the org repos. @@ -2170,7 +2170,7 @@ def test_listrepos_allparams(self): self.assertGreater(len(json["repositories"]), 0) for repository in json["repositories"]: - self.assertEquals(ORGANIZATION, repository["namespace"]) + self.assertEqual(ORGANIZATION, repository["namespace"]) if repository["name"] == ORG_REPO: self.assertGreater(repository["popularity"], 0) @@ -2199,21 +2199,21 @@ def test_listrepos_asguest_allparams(self): ) for repo in json["repositories"]: - self.assertEquals(ORGANIZATION, repo["namespace"]) + self.assertEqual(ORGANIZATION, repo["namespace"]) def assertRepositoryVisible(self, namespace, name): json = self.getJsonResponse(RepositoryList, params=dict(namespace=namespace, public=False)) - self.assertEquals(1, len(json["repositories"])) - self.assertEquals(name, json["repositories"][0]["name"]) + self.assertEqual(1, len(json["repositories"])) + self.assertEqual(name, json["repositories"][0]["name"]) def assertRepositoryNotVisible(self, namespace, name): json = self.getJsonResponse(RepositoryList, params=dict(namespace=namespace, public=False)) for repo in json["repositories"]: - self.assertNotEquals(name, repo["name"]) + self.assertNotEqual(name, repo["name"]) json = self.getJsonResponse(RepositoryList, params=dict(starred=True)) for repo in json["repositories"]: - self.assertNotEquals(name, repo["name"]) + self.assertNotEqual(name, repo["name"]) def test_listrepos_starred_filtered(self): admin_user = model.user.get_user(ADMIN_ACCESS_USER) @@ -2322,7 +2322,7 @@ def test_updatedescription(self): # Verify the repo description was updated. json = self.getJsonResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) - self.assertEquals("Some cool repo", json["description"]) + self.assertEqual("Some cool repo", json["description"]) class TestChangeRepoVisibility(ApiTestCase): @@ -2341,7 +2341,7 @@ def test_trychangevisibility(self): # Verify the visibility. json = self.getJsonResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) - self.assertEquals(True, json["is_public"]) + self.assertEqual(True, json["is_public"]) # Change the subscription of the namespace. self.putJsonResponse(UserPlan, data=dict(plan="personal-2018")) @@ -2357,7 +2357,7 @@ def test_trychangevisibility(self): # Verify the visibility. json = self.getJsonResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) - self.assertEquals(True, json["is_public"]) + self.assertEqual(True, json["is_public"]) def test_changevisibility(self): self.login(ADMIN_ACCESS_USER) @@ -2372,7 +2372,7 @@ def test_changevisibility(self): # Verify the visibility. json = self.getJsonResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) - self.assertEquals(True, json["is_public"]) + self.assertEqual(True, json["is_public"]) # Make private. self.postJsonResponse( @@ -2384,7 +2384,7 @@ def test_changevisibility(self): # Verify the visibility. json = self.getJsonResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) - self.assertEquals(False, json["is_public"]) + self.assertEqual(False, json["is_public"]) class TestDeleteRepository(ApiTestCase): @@ -2534,10 +2534,10 @@ def test_get_largerepo(self): Repository, params=dict(repository=ADMIN_ACCESS_USER + "/gargantuan") ) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("gargantuan", json["name"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("gargantuan", json["name"]) - self.assertEquals(False, json["is_public"]) + self.assertEqual(False, json["is_public"]) def test_getrepo_badnames(self): self.login(ADMIN_ACCESS_USER) @@ -2557,21 +2557,21 @@ def test_getrepo_badnames(self): Repository, params=dict(repository=ADMIN_ACCESS_USER + "/" + bad_name) ) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals(bad_name, json["name"]) - self.assertEquals(True, json["is_public"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual(bad_name, json["name"]) + self.assertEqual(True, json["is_public"]) def test_getrepo_public_asguest(self): json = self.getJsonResponse(Repository, params=dict(repository=self.PUBLIC_REPO)) - self.assertEquals(PUBLIC_USER, json["namespace"]) - self.assertEquals("publicrepo", json["name"]) + self.assertEqual(PUBLIC_USER, json["namespace"]) + self.assertEqual("publicrepo", json["name"]) - self.assertEquals(True, json["is_public"]) - self.assertEquals(False, json["is_organization"]) + self.assertEqual(True, json["is_public"]) + self.assertEqual(False, json["is_organization"]) - self.assertEquals(False, json["can_write"]) - self.assertEquals(False, json["can_admin"]) + self.assertEqual(False, json["can_write"]) + self.assertEqual(False, json["can_admin"]) assert "latest" in json["tags"] @@ -2580,9 +2580,9 @@ def test_getrepo_public_asowner(self): json = self.getJsonResponse(Repository, params=dict(repository=self.PUBLIC_REPO)) - self.assertEquals(False, json["is_organization"]) - self.assertEquals(True, json["can_write"]) - self.assertEquals(True, json["can_admin"]) + self.assertEqual(False, json["is_organization"]) + self.assertEqual(True, json["can_write"]) + self.assertEqual(True, json["can_admin"]) def test_getrepo_building(self): self.login(ADMIN_ACCESS_USER) @@ -2591,9 +2591,9 @@ def test_getrepo_building(self): Repository, params=dict(repository=ADMIN_ACCESS_USER + "/building") ) - self.assertEquals(True, json["can_write"]) - self.assertEquals(True, json["can_admin"]) - self.assertEquals(False, json["is_organization"]) + self.assertEqual(True, json["can_write"]) + self.assertEqual(True, json["can_admin"]) + self.assertEqual(False, json["is_organization"]) def test_getrepo_org_asnonmember(self): self.getResponse( @@ -2607,13 +2607,13 @@ def test_getrepo_org_asreader(self): Repository, params=dict(repository=ORGANIZATION + "/" + ORG_REPO) ) - self.assertEquals(ORGANIZATION, json["namespace"]) - self.assertEquals(ORG_REPO, json["name"]) + self.assertEqual(ORGANIZATION, json["namespace"]) + self.assertEqual(ORG_REPO, json["name"]) - self.assertEquals(False, json["can_write"]) - self.assertEquals(False, json["can_admin"]) + self.assertEqual(False, json["can_write"]) + self.assertEqual(False, json["can_admin"]) - self.assertEquals(True, json["is_organization"]) + self.assertEqual(True, json["is_organization"]) def test_getrepo_org_asadmin(self): self.login(ADMIN_ACCESS_USER) @@ -2622,10 +2622,10 @@ def test_getrepo_org_asadmin(self): Repository, params=dict(repository=ORGANIZATION + "/" + ORG_REPO) ) - self.assertEquals(True, json["can_write"]) - self.assertEquals(True, json["can_admin"]) + self.assertEqual(True, json["can_write"]) + self.assertEqual(True, json["can_admin"]) - self.assertEquals(True, json["is_organization"]) + self.assertEqual(True, json["is_organization"]) class TestRepositoryBuildResource(ApiTestCase): @@ -2666,8 +2666,8 @@ def test_cancel_waitingbuild(self): RepositoryBuildList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(1, len(json["builds"])) - self.assertEquals(uuid, json["builds"][0]["id"]) + self.assertEqual(1, len(json["builds"])) + self.assertEqual(uuid, json["builds"][0]["id"]) # Find the build's queue item. build_ref = database.RepositoryBuild.get(uuid=uuid) @@ -2688,8 +2688,8 @@ def test_cancel_waitingbuild(self): RepositoryBuildList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(1, len(json["builds"])) - self.assertEquals("cancelled", json["builds"][0]["phase"]) + self.assertEqual(1, len(json["builds"])) + self.assertEqual("cancelled", json["builds"][0]["phase"]) # Check for the build's queue item. try: @@ -2716,8 +2716,8 @@ def test_attemptcancel_scheduledbuild(self): RepositoryBuildList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(1, len(json["builds"])) - self.assertEquals(uuid, json["builds"][0]["id"]) + self.assertEqual(1, len(json["builds"])) + self.assertEqual(uuid, json["builds"][0]["id"]) # Set queue item to be picked up. build_ref = database.RepositoryBuild.get(uuid=uuid) @@ -2750,8 +2750,8 @@ def test_attemptcancel_workingbuild(self): RepositoryBuildList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(1, len(json["builds"])) - self.assertEquals(uuid, json["builds"][0]["id"]) + self.assertEqual(1, len(json["builds"])) + self.assertEqual(uuid, json["builds"][0]["id"]) # Set the build to a different phase. rb = database.RepositoryBuild.get(uuid=uuid) @@ -2799,9 +2799,9 @@ def test_getrepobuilds(self): params=dict(repository=ADMIN_ACCESS_USER + "/building", build_uuid=build["id"]), ) - self.assertEquals(status_json["id"], build["id"]) - self.assertEquals(status_json["resource_key"], build["resource_key"]) - self.assertEquals(status_json["trigger"], build["trigger"]) + self.assertEqual(status_json["id"], build["id"]) + self.assertEqual(status_json["resource_key"], build["resource_key"]) + self.assertEqual(status_json["trigger"], build["trigger"]) class TestRequestRepoBuild(ApiTestCase): @@ -2858,7 +2858,7 @@ def test_requestrepobuild_withurl(self): ) assert len(json["builds"]) > 0 - self.assertEquals("http://quay.io/robots.txt", json["builds"][0]["archive_url"]) + self.assertEqual("http://quay.io/robots.txt", json["builds"][0]["archive_url"]) def test_requestrepobuild_withfile(self): self.login(ADMIN_ACCESS_USER) @@ -2958,9 +2958,9 @@ def test_emailnotauthorized_butsent(self): ), ) - self.assertEquals(False, json["confirmed"]) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("simple", json["repository"]) + self.assertEqual(False, json["confirmed"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("simple", json["repository"]) def test_emailauthorized(self): self.login(ADMIN_ACCESS_USER) @@ -2971,9 +2971,9 @@ def test_emailauthorized(self): params=dict(repository=ADMIN_ACCESS_USER + "/simple", email="jschorr@devtable.com"), ) - self.assertEquals(True, json["confirmed"]) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("simple", json["repository"]) + self.assertEqual(True, json["confirmed"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("simple", json["repository"]) def test_send_email_authorization(self): self.login(ADMIN_ACCESS_USER) @@ -2984,9 +2984,9 @@ def test_send_email_authorization(self): params=dict(repository=ADMIN_ACCESS_USER + "/simple", email="jschorr+foo@devtable.com"), ) - self.assertEquals(False, json["confirmed"]) - self.assertEquals(ADMIN_ACCESS_USER, json["namespace"]) - self.assertEquals("simple", json["repository"]) + self.assertEqual(False, json["confirmed"]) + self.assertEqual(ADMIN_ACCESS_USER, json["namespace"]) + self.assertEqual("simple", json["repository"]) class TestRepositoryNotifications(ApiTestCase): @@ -3037,9 +3037,9 @@ def test_webhooks(self): expected_code=201, ) - self.assertEquals("repo_push", json["event"]) - self.assertEquals("webhook", json["method"]) - self.assertEquals("http://example.com", json["config"]["url"]) + self.assertEqual("repo_push", json["event"]) + self.assertEqual("webhook", json["method"]) + self.assertEqual("http://example.com", json["config"]["url"]) self.assertIsNone(json["title"]) wid = json["uuid"] @@ -3049,9 +3049,9 @@ def test_webhooks(self): RepositoryNotification, params=dict(repository=ADMIN_ACCESS_USER + "/simple", uuid=wid) ) - self.assertEquals(wid, json["uuid"]) - self.assertEquals("repo_push", json["event"]) - self.assertEquals("webhook", json["method"]) + self.assertEqual(wid, json["uuid"]) + self.assertEqual("repo_push", json["event"]) + self.assertEqual("webhook", json["method"]) self.assertIsNone(json["title"]) # Verify the notification is listed. @@ -3090,10 +3090,10 @@ def test_webhooks(self): expected_code=201, ) - self.assertEquals("repo_push", json["event"]) - self.assertEquals("webhook", json["method"]) - self.assertEquals("http://example.com", json["config"]["url"]) - self.assertEquals("Some Notification", json["title"]) + self.assertEqual("repo_push", json["event"]) + self.assertEqual("webhook", json["method"]) + self.assertEqual("http://example.com", json["config"]["url"]) + self.assertEqual("Some Notification", json["title"]) wid = json["uuid"] @@ -3102,10 +3102,10 @@ def test_webhooks(self): RepositoryNotification, params=dict(repository=ADMIN_ACCESS_USER + "/simple", uuid=wid) ) - self.assertEquals(wid, json["uuid"]) - self.assertEquals("repo_push", json["event"]) - self.assertEquals("webhook", json["method"]) - self.assertEquals("Some Notification", json["title"]) + self.assertEqual(wid, json["uuid"]) + self.assertEqual("repo_push", json["event"]) + self.assertEqual("webhook", json["method"]) + self.assertEqual("Some Notification", json["title"]) class TestListAndGetImage(ApiTestCase): @@ -3132,7 +3132,7 @@ def test_listandgetimages(self): params=dict(repository=ADMIN_ACCESS_USER + "/simple", image_id=image["id"]), ) - self.assertEquals(image["id"], ijson["id"]) + self.assertEqual(image["id"], ijson["id"]) class TestGetImageChanges(ApiTestCase): @@ -3191,7 +3191,7 @@ def test_restoretag(self): ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/history", tag="latest") ) - self.assertEquals(2, len(json["tags"])) + self.assertEqual(2, len(json["tags"])) self.assertFalse("end_ts" in json["tags"][0]) previous_image_id = json["tags"][1]["docker_image_id"] @@ -3205,9 +3205,9 @@ def test_restoretag(self): json = self.getJsonResponse( ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/history", tag="latest") ) - self.assertEquals(3, len(json["tags"])) + self.assertEqual(3, len(json["tags"])) self.assertFalse("end_ts" in json["tags"][0]) - self.assertEquals(previous_image_id, json["tags"][0]["docker_image_id"]) + self.assertEqual(previous_image_id, json["tags"][0]["docker_image_id"]) def test_restoretag_to_digest(self): self.login(ADMIN_ACCESS_USER) @@ -3216,7 +3216,7 @@ def test_restoretag_to_digest(self): ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/history", tag="latest") ) - self.assertEquals(2, len(json["tags"])) + self.assertEqual(2, len(json["tags"])) self.assertFalse("end_ts" in json["tags"][0]) previous_manifest = json["tags"][1]["manifest_digest"] @@ -3230,9 +3230,9 @@ def test_restoretag_to_digest(self): json = self.getJsonResponse( ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/history", tag="latest") ) - self.assertEquals(3, len(json["tags"])) + self.assertEqual(3, len(json["tags"])) self.assertFalse("end_ts" in json["tags"][0]) - self.assertEquals(previous_manifest, json["tags"][0]["manifest_digest"]) + self.assertEqual(previous_manifest, json["tags"][0]["manifest_digest"]) class TestListAndDeleteTag(ApiTestCase): @@ -3302,7 +3302,7 @@ def test_listdeletecreateandmovetag(self): params=dict(repository=ADMIN_ACCESS_USER + "/complex", tag="staging"), ) - self.assertEquals(staging_images, json["images"]) + self.assertEqual(staging_images, json["images"]) # Require a valid tag name. self.putResponse( @@ -3327,7 +3327,7 @@ def test_listdeletecreateandmovetag(self): ) sometag_images = json["images"] - self.assertEquals(sometag_images, staging_images) + self.assertEqual(sometag_images, staging_images) # Move the tag. self.putResponse( @@ -3344,8 +3344,8 @@ def test_listdeletecreateandmovetag(self): ) sometag_new_images = json["images"] - self.assertEquals(1, len(sometag_new_images)) - self.assertEquals(staging_images[-1], sometag_new_images[0]) + self.assertEqual(1, len(sometag_new_images)) + self.assertEqual(staging_images[-1], sometag_new_images[0]) def test_deletesubtag(self): self.login(ADMIN_ACCESS_USER) @@ -3370,7 +3370,7 @@ def test_deletesubtag(self): RepositoryTagImages, params=dict(repository=ADMIN_ACCESS_USER + "/complex", tag="prod") ) - self.assertEquals(prod_images, json["images"]) + self.assertEqual(prod_images, json["images"]) def test_listtag_digest(self): self.login(ADMIN_ACCESS_USER) @@ -3388,7 +3388,7 @@ def test_listtagpagination(self): # Create 8 tags in the simple repo. remaining_tags = {"latest", "prod"} - for i in xrange(1, 9): + for i in range(1, 9): tag_name = "tag" + str(i) remaining_tags.add(tag_name) assert registry_model.retarget_tag( @@ -3400,34 +3400,34 @@ def test_listtagpagination(self): ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/simple", page=1, limit=5), ) - self.assertEquals(1, json["page"]) - self.assertEquals(5, len(json["tags"])) + self.assertEqual(1, json["page"]) + self.assertEqual(5, len(json["tags"])) self.assertTrue(json["has_additional"]) names = {tag["name"] for tag in json["tags"]} remaining_tags = remaining_tags - names - self.assertEquals(5, len(remaining_tags)) + self.assertEqual(5, len(remaining_tags)) json = self.getJsonResponse( ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/simple", page=2, limit=5), ) - self.assertEquals(2, json["page"]) - self.assertEquals(5, len(json["tags"])) + self.assertEqual(2, json["page"]) + self.assertEqual(5, len(json["tags"])) self.assertFalse(json["has_additional"]) names = {tag["name"] for tag in json["tags"]} remaining_tags = remaining_tags - names - self.assertEquals(0, len(remaining_tags)) + self.assertEqual(0, len(remaining_tags)) json = self.getJsonResponse( ListRepositoryTags, params=dict(repository=ADMIN_ACCESS_USER + "/simple", page=3, limit=5), ) - self.assertEquals(3, json["page"]) - self.assertEquals(0, len(json["tags"])) + self.assertEqual(3, json["page"]) + self.assertEqual(0, len(json["tags"])) self.assertFalse(json["has_additional"]) @@ -3448,10 +3448,10 @@ def test_userpermissions_underorg(self): permissions = self.listUserPermissions(namespace=ORGANIZATION, repo=ORG_REPO) - self.assertEquals(1, len(permissions)) + self.assertEqual(1, len(permissions)) assert "outsideorg" in permissions - self.assertEquals("read", permissions["outsideorg"]["role"]) - self.assertEquals(False, permissions["outsideorg"]["is_org_member"]) + self.assertEqual("read", permissions["outsideorg"]["role"]) + self.assertEqual(False, permissions["outsideorg"]["is_org_member"]) # Add another user. self.putJsonResponse( @@ -3463,10 +3463,10 @@ def test_userpermissions_underorg(self): # Verify the user is present. permissions = self.listUserPermissions(namespace=ORGANIZATION, repo=ORG_REPO) - self.assertEquals(2, len(permissions)) + self.assertEqual(2, len(permissions)) assert ADMIN_ACCESS_USER in permissions - self.assertEquals("admin", permissions[ADMIN_ACCESS_USER]["role"]) - self.assertEquals(True, permissions[ADMIN_ACCESS_USER]["is_org_member"]) + self.assertEqual("admin", permissions[ADMIN_ACCESS_USER]["role"]) + self.assertEqual(True, permissions[ADMIN_ACCESS_USER]["is_org_member"]) def test_userpermissions(self): self.login(ADMIN_ACCESS_USER) @@ -3474,9 +3474,9 @@ def test_userpermissions(self): # The repo should start with just the admin as a user perm. permissions = self.listUserPermissions() - self.assertEquals(1, len(permissions)) + self.assertEqual(1, len(permissions)) assert ADMIN_ACCESS_USER in permissions - self.assertEquals("admin", permissions[ADMIN_ACCESS_USER]["role"]) + self.assertEqual("admin", permissions[ADMIN_ACCESS_USER]["role"]) self.assertFalse("is_org_member" in permissions[ADMIN_ACCESS_USER]) # Add another user. @@ -3489,16 +3489,16 @@ def test_userpermissions(self): # Verify the user is present. permissions = self.listUserPermissions() - self.assertEquals(2, len(permissions)) + self.assertEqual(2, len(permissions)) assert NO_ACCESS_USER in permissions - self.assertEquals("read", permissions[NO_ACCESS_USER]["role"]) + self.assertEqual("read", permissions[NO_ACCESS_USER]["role"]) self.assertFalse("is_org_member" in permissions[NO_ACCESS_USER]) json = self.getJsonResponse( RepositoryUserPermission, params=dict(repository=ADMIN_ACCESS_USER + "/simple", username=NO_ACCESS_USER), ) - self.assertEquals("read", json["role"]) + self.assertEqual("read", json["role"]) # Change the user's permissions. self.putJsonResponse( @@ -3510,9 +3510,9 @@ def test_userpermissions(self): # Verify. permissions = self.listUserPermissions() - self.assertEquals(2, len(permissions)) + self.assertEqual(2, len(permissions)) assert NO_ACCESS_USER in permissions - self.assertEquals("admin", permissions[NO_ACCESS_USER]["role"]) + self.assertEqual("admin", permissions[NO_ACCESS_USER]["role"]) # Delete the user's permission. self.deleteEmptyResponse( @@ -3523,7 +3523,7 @@ def test_userpermissions(self): # Verify. permissions = self.listUserPermissions() - self.assertEquals(1, len(permissions)) + self.assertEqual(1, len(permissions)) assert not NO_ACCESS_USER in permissions def test_teampermissions(self): @@ -3532,9 +3532,9 @@ def test_teampermissions(self): # The repo should start with just the readers as a team perm. permissions = self.listTeamPermissions() - self.assertEquals(1, len(permissions)) + self.assertEqual(1, len(permissions)) assert "readers" in permissions - self.assertEquals("read", permissions["readers"]["role"]) + self.assertEqual("read", permissions["readers"]["role"]) # Add another team. self.putJsonResponse( @@ -3546,15 +3546,15 @@ def test_teampermissions(self): # Verify the team is present. permissions = self.listTeamPermissions() - self.assertEquals(2, len(permissions)) + self.assertEqual(2, len(permissions)) assert "owners" in permissions - self.assertEquals("write", permissions["owners"]["role"]) + self.assertEqual("write", permissions["owners"]["role"]) json = self.getJsonResponse( RepositoryTeamPermission, params=dict(repository=ORGANIZATION + "/" + ORG_REPO, teamname="owners"), ) - self.assertEquals("write", json["role"]) + self.assertEqual("write", json["role"]) # Change the team's permissions. self.putJsonResponse( @@ -3566,9 +3566,9 @@ def test_teampermissions(self): # Verify. permissions = self.listTeamPermissions() - self.assertEquals(2, len(permissions)) + self.assertEqual(2, len(permissions)) assert "owners" in permissions - self.assertEquals("admin", permissions["owners"]["role"]) + self.assertEqual("admin", permissions["owners"]["role"]) # Delete the team's permission. self.deleteEmptyResponse( @@ -3579,7 +3579,7 @@ def test_teampermissions(self): # Verify. permissions = self.listTeamPermissions() - self.assertEquals(1, len(permissions)) + self.assertEqual(1, len(permissions)) assert not "owners" in permissions @@ -3595,8 +3595,8 @@ def test_getusercard(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(UserCard) - self.assertEquals("4242", json["card"]["last4"]) - self.assertEquals("Visa", json["card"]["type"]) + self.assertEqual("4242", json["card"]["last4"]) + self.assertEqual("Visa", json["card"]["type"]) def test_setusercard_error(self): self.login(ADMIN_ACCESS_USER) @@ -3609,8 +3609,8 @@ def test_getorgcard(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(OrganizationCard, params=dict(orgname=ORGANIZATION)) - self.assertEquals("4242", json["card"]["last4"]) - self.assertEquals("Visa", json["card"]["type"]) + self.assertEqual("4242", json["card"]["last4"]) + self.assertEqual("Visa", json["card"]["type"]) class TestUserSubscription(ApiTestCase): @@ -3625,14 +3625,14 @@ def test_updateplan(self): # Verify sub = self.getSubscription() - self.assertEquals("free", sub["plan"]) + self.assertEqual("free", sub["plan"]) # Change the plan. self.putJsonResponse(UserPlan, data=dict(plan="bus-large-2018")) # Verify sub = self.getSubscription() - self.assertEquals("bus-large-2018", sub["plan"]) + self.assertEqual("bus-large-2018", sub["plan"]) class TestOrgSubscription(ApiTestCase): @@ -3649,7 +3649,7 @@ def test_updateplan(self): # Verify sub = self.getSubscription() - self.assertEquals("free", sub["plan"]) + self.assertEqual("free", sub["plan"]) # Change the plan. self.putJsonResponse( @@ -3658,7 +3658,7 @@ def test_updateplan(self): # Verify sub = self.getSubscription() - self.assertEquals("bus-large-2018", sub["plan"]) + self.assertEqual("bus-large-2018", sub["plan"]) class TestUserRobots(ApiTestCase): @@ -3691,7 +3691,7 @@ def test_robots(self): UserRobot, params=dict(robot_shortname="bender"), expected_code=201 ) - self.assertEquals(NO_ACCESS_USER + "+bender", json["name"]) + self.assertEqual(NO_ACCESS_USER + "+bender", json["name"]) # Verify. robots = self.getRobotNames() @@ -3720,13 +3720,13 @@ def test_regenerate(self): ) # Verify the token changed. - self.assertNotEquals(token, json["token"]) + self.assertNotEqual(token, json["token"]) json2 = self.getJsonResponse( UserRobot, params=dict(robot_shortname="bender"), expected_code=200 ) - self.assertEquals(json["token"], json2["token"]) + self.assertEqual(json["token"], json2["token"]) class TestOrgRobots(ApiTestCase): @@ -3851,7 +3851,7 @@ def test_robots(self): OrgRobot, params=dict(orgname=ORGANIZATION, robot_shortname="bender"), expected_code=201 ) - self.assertEquals(ORGANIZATION + "+bender", json["name"]) + self.assertEqual(ORGANIZATION + "+bender", json["name"]) # Verify. robots = self.getRobotNames() @@ -3884,13 +3884,13 @@ def test_regenerate(self): ) # Verify the token changed. - self.assertNotEquals(token, json["token"]) + self.assertNotEqual(token, json["token"]) json2 = self.getJsonResponse( OrgRobot, params=dict(orgname=ORGANIZATION, robot_shortname="bender"), expected_code=200 ) - self.assertEquals(json["token"], json2["token"]) + self.assertEqual(json["token"], json2["token"]) class TestLogs(ApiTestCase): @@ -3909,8 +3909,8 @@ def test_repo_logs_crossyear(self): RepositoryLogs, params=dict(repository="devtable/simple", starttime="12/01/2016", endtime="1/09/2017"), ) - self.assertEquals("Thu, 01 Dec 2016 00:00:00 -0000", json["start_time"]) - self.assertEquals("Tue, 10 Jan 2017 00:00:00 -0000", json["end_time"]) + self.assertEqual("Thu, 01 Dec 2016 00:00:00 -0000", json["start_time"]) + self.assertEqual("Tue, 10 Jan 2017 00:00:00 -0000", json["end_time"]) def test_repo_aggregate_logs(self): self.login(ADMIN_ACCESS_USER) @@ -3961,7 +3961,7 @@ def test_performer(self): assert len(json["logs"]) < len(all_logs) for log in json["logs"]: - self.assertEquals(READ_ACCESS_USER, log["performer"]["name"]) + self.assertEqual(READ_ACCESS_USER, log["performer"]["name"]) class TestApplicationInformation(ApiTestCase): @@ -3985,7 +3985,7 @@ def test_list_create_applications(self): json = self.getJsonResponse(OrganizationApplications, params=dict(orgname=ORGANIZATION)) - self.assertEquals(2, len(json["applications"])) + self.assertEqual(2, len(json["applications"])) found = False for application in json["applications"]: @@ -4002,14 +4002,14 @@ def test_list_create_applications(self): data=dict(name="Some cool app", description="foo"), ) - self.assertEquals("Some cool app", json["name"]) - self.assertEquals("foo", json["description"]) + self.assertEqual("Some cool app", json["name"]) + self.assertEqual("foo", json["description"]) # Retrieve the apps list again list_json = self.getJsonResponse( OrganizationApplications, params=dict(orgname=ORGANIZATION) ) - self.assertEquals(3, len(list_json["applications"])) + self.assertEqual(3, len(list_json["applications"])) class TestOrganizationApplicationResource(ApiTestCase): @@ -4022,7 +4022,7 @@ def test_get_edit_delete_application(self): params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID), ) - self.assertEquals(FAKE_APPLICATION_CLIENT_ID, json["client_id"]) + self.assertEqual(FAKE_APPLICATION_CLIENT_ID, json["client_id"]) # Edit the application. edit_json = self.putJsonResponse( @@ -4037,12 +4037,12 @@ def test_get_edit_delete_application(self): ), ) - self.assertEquals(FAKE_APPLICATION_CLIENT_ID, edit_json["client_id"]) - self.assertEquals("Some App", edit_json["name"]) - self.assertEquals("foo", edit_json["description"]) - self.assertEquals("bar", edit_json["application_uri"]) - self.assertEquals("baz", edit_json["redirect_uri"]) - self.assertEquals("meh", edit_json["avatar_email"]) + self.assertEqual(FAKE_APPLICATION_CLIENT_ID, edit_json["client_id"]) + self.assertEqual("Some App", edit_json["name"]) + self.assertEqual("foo", edit_json["description"]) + self.assertEqual("bar", edit_json["application_uri"]) + self.assertEqual("baz", edit_json["redirect_uri"]) + self.assertEqual("meh", edit_json["avatar_email"]) # Retrieve the application again. json = self.getJsonResponse( @@ -4050,7 +4050,7 @@ def test_get_edit_delete_application(self): params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID), ) - self.assertEquals(json, edit_json) + self.assertEqual(json, edit_json) # Delete the application. self.deleteEmptyResponse( @@ -4086,7 +4086,7 @@ def test_reset_client_secret(self): params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID), ) - self.assertEquals(FAKE_APPLICATION_CLIENT_ID, json["client_id"]) + self.assertEqual(FAKE_APPLICATION_CLIENT_ID, json["client_id"]) # Reset the client secret. reset_json = self.postJsonResponse( @@ -4094,15 +4094,15 @@ def test_reset_client_secret(self): params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID), ) - self.assertEquals(FAKE_APPLICATION_CLIENT_ID, reset_json["client_id"]) - self.assertNotEquals(reset_json["client_secret"], json["client_secret"]) + self.assertEqual(FAKE_APPLICATION_CLIENT_ID, reset_json["client_id"]) + self.assertNotEqual(reset_json["client_secret"], json["client_secret"]) # Verify it was changed in the DB. json = self.getJsonResponse( OrganizationApplicationResource, params=dict(orgname=ORGANIZATION, client_id=FAKE_APPLICATION_CLIENT_ID), ) - self.assertEquals(reset_json["client_secret"], json["client_secret"]) + self.assertEqual(reset_json["client_secret"], json["client_secret"]) class FakeBuildTrigger(BuildTriggerHandler): @@ -4183,13 +4183,13 @@ def test_list_build_triggers(self): json = self.getJsonResponse( BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(0, len(json["triggers"])) + self.assertEqual(0, len(json["triggers"])) # Check a repo with one known trigger. json = self.getJsonResponse( BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + "/building") ) - self.assertEquals(1, len(json["triggers"])) + self.assertEqual(1, len(json["triggers"])) trigger = json["triggers"][0] @@ -4204,7 +4204,7 @@ def test_list_build_triggers(self): params=dict(repository=ADMIN_ACCESS_USER + "/building", trigger_uuid=trigger["id"]), ) - self.assertEquals(trigger, trigger_json) + self.assertEqual(trigger, trigger_json) # Check the recent builds for the trigger. builds_json = self.getJsonResponse( @@ -4220,7 +4220,7 @@ def test_delete_build_trigger(self): json = self.getJsonResponse( BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + "/building") ) - self.assertEquals(1, len(json["triggers"])) + self.assertEqual(1, len(json["triggers"])) trigger = json["triggers"][0] # Delete the trigger. @@ -4233,7 +4233,7 @@ def test_delete_build_trigger(self): json = self.getJsonResponse( BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + "/building") ) - self.assertEquals(0, len(json["triggers"])) + self.assertEqual(0, len(json["triggers"])) def test_analyze_fake_trigger(self): self.login(ADMIN_ACCESS_USER) @@ -4253,8 +4253,8 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("warning", analyze_json["status"]) - self.assertEquals( + self.assertEqual("warning", analyze_json["status"]) + self.assertEqual( "Specified Dockerfile path for the trigger was not " + "found on the main branch. This trigger may fail.", analyze_json["message"], @@ -4268,8 +4268,8 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("warning", analyze_json["status"]) - self.assertEquals("No FROM line found in the Dockerfile", analyze_json["message"]) + self.assertEqual("warning", analyze_json["status"]) + self.assertEqual("No FROM line found in the Dockerfile", analyze_json["message"]) # Analyze the trigger's dockerfile: Third, dockerfile with public repo. trigger_config = {"dockerfile": "FROM somerepo"} @@ -4279,7 +4279,7 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("publicbase", analyze_json["status"]) + self.assertEqual("publicbase", analyze_json["status"]) # Analyze the trigger's dockerfile: Fourth, dockerfile with private repo with an invalid path. trigger_config = {"dockerfile": "FROM localhost:5000/somepath"} @@ -4289,8 +4289,8 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("warning", analyze_json["status"]) - self.assertEquals( + self.assertEqual("warning", analyze_json["status"]) + self.assertEqual( '"localhost:5000/somepath" is not a valid Quay repository path', analyze_json["message"] ) @@ -4302,11 +4302,11 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("error", analyze_json["status"]) + self.assertEqual("error", analyze_json["status"]) nofound = ( 'Repository "localhost:5000/%s/randomrepo" referenced by the Dockerfile was not found' ) - self.assertEquals(nofound % "nothere", analyze_json["message"]) + self.assertEqual(nofound % "nothere", analyze_json["message"]) # Analyze the trigger's dockerfile: Sixth, dockerfile with private repo that the user cannot see trigger_config = {"dockerfile": "FROM localhost:5000/randomuser/randomrepo"} @@ -4316,8 +4316,8 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("error", analyze_json["status"]) - self.assertEquals(nofound % "randomuser", analyze_json["message"]) + self.assertEqual("error", analyze_json["status"]) + self.assertEqual(nofound % "randomuser", analyze_json["message"]) # Analyze the trigger's dockerfile: Seventh, dockerfile with private repo that the user see. trigger_config = {"dockerfile": "FROM localhost:5000/devtable/complex"} @@ -4327,10 +4327,10 @@ def test_analyze_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals("requiresrobot", analyze_json["status"]) - self.assertEquals("devtable", analyze_json["namespace"]) - self.assertEquals("complex", analyze_json["name"]) - self.assertEquals(ADMIN_ACCESS_USER + "+dtrobot", analyze_json["robots"][0]["name"]) + self.assertEqual("requiresrobot", analyze_json["status"]) + self.assertEqual("devtable", analyze_json["namespace"]) + self.assertEqual("complex", analyze_json["name"]) + self.assertEqual(ADMIN_ACCESS_USER + "+dtrobot", analyze_json["robots"][0]["name"]) def test_fake_trigger(self): self.login(ADMIN_ACCESS_USER) @@ -4346,17 +4346,17 @@ def test_fake_trigger(self): json = self.getJsonResponse( BuildTriggerList, params=dict(repository=ADMIN_ACCESS_USER + "/simple") ) - self.assertEquals(1, len(json["triggers"])) - self.assertEquals(trigger.uuid, json["triggers"][0]["id"]) - self.assertEquals(trigger.service.name, json["triggers"][0]["service"]) - self.assertEquals(False, json["triggers"][0]["is_active"]) + self.assertEqual(1, len(json["triggers"])) + self.assertEqual(trigger.uuid, json["triggers"][0]["id"]) + self.assertEqual(trigger.service.name, json["triggers"][0]["service"]) + self.assertEqual(False, json["triggers"][0]["is_active"]) # List the trigger's source namespaces. namespace_json = self.getJsonResponse( BuildTriggerSourceNamespaces, params=dict(repository=ADMIN_ACCESS_USER + "/simple", trigger_uuid=trigger.uuid), ) - self.assertEquals( + self.assertEqual( [{"id": "first", "name": "first"}, {"id": "second", "name": "second"}], namespace_json["namespaces"], ) @@ -4366,7 +4366,7 @@ def test_fake_trigger(self): params=dict(repository=ADMIN_ACCESS_USER + "/simple", trigger_uuid=trigger.uuid), data=dict(namespace="first"), ) - self.assertEquals([{"name": "source"}], source_json["sources"]) + self.assertEqual([{"name": "source"}], source_json["sources"]) # List the trigger's subdirs. subdir_json = self.postJsonResponse( @@ -4375,7 +4375,7 @@ def test_fake_trigger(self): data={"somevalue": "meh"}, ) - self.assertEquals( + self.assertEqual( { "status": "success", "dockerfile_paths": ["/sometoken", "/foo", "/bar", "/meh"], @@ -4392,12 +4392,12 @@ def test_fake_trigger(self): data={"config": trigger_config}, ) - self.assertEquals(True, activate_json["is_active"]) + self.assertEqual(True, activate_json["is_active"]) # Make sure the trigger has a write token. trigger = model.build.get_build_trigger(trigger.uuid) - self.assertNotEquals(None, trigger.write_token) - self.assertEquals(True, py_json.loads(trigger.config)["active"]) + self.assertNotEqual(None, trigger.write_token) + self.assertEqual(True, py_json.loads(trigger.config)["active"]) # Make sure we cannot activate again. self.postResponse( @@ -4417,7 +4417,7 @@ def test_fake_trigger(self): ), ) - self.assertEquals(result["values"], [1, 2, 3]) + self.assertEqual(result["values"], [1, 2, 3]) self.postResponse( BuildTriggerFieldValues, @@ -4438,14 +4438,14 @@ def test_fake_trigger(self): ) assert "id" in start_json - self.assertEquals("build-name", start_json["display_name"]) - self.assertEquals(["bar"], start_json["tags"]) - self.assertEquals("subdir", start_json["subdirectory"]) - self.assertEquals("somesource", start_json["trigger"]["build_source"]) + self.assertEqual("build-name", start_json["display_name"]) + self.assertEqual(["bar"], start_json["tags"]) + self.assertEqual("subdir", start_json["subdirectory"]) + self.assertEqual("somesource", start_json["trigger"]["build_source"]) # Verify the metadata was added. build_obj = database.RepositoryBuild.get(database.RepositoryBuild.uuid == start_json["id"]) - self.assertEquals("bar", py_json.loads(build_obj.job_config)["trigger_metadata"]["foo"]) + self.assertEqual("bar", py_json.loads(build_obj.job_config)["trigger_metadata"]["foo"]) # Start another manual build, with a ref. self.postJsonResponse( @@ -4520,8 +4520,8 @@ def test_robot_account(self): ) # Verify that the robot was saved. - self.assertEquals(True, activate_json["is_active"]) - self.assertEquals(ADMIN_ACCESS_USER + "+dtrobot", activate_json["pull_robot"]["name"]) + self.assertEqual(True, activate_json["is_active"]) + self.assertEqual(ADMIN_ACCESS_USER + "+dtrobot", activate_json["pull_robot"]["name"]) # Start a manual build. start_json = self.postJsonResponse( @@ -4532,8 +4532,8 @@ def test_robot_account(self): ) assert "id" in start_json - self.assertEquals("build-name", start_json["display_name"]) - self.assertEquals(["bar"], start_json["tags"]) + self.assertEqual("build-name", start_json["display_name"]) + self.assertEqual(["bar"], start_json["tags"]) class TestUserAuthorizations(ApiTestCase): @@ -4542,7 +4542,7 @@ def test_list_get_delete_user_authorizations(self): json = self.getJsonResponse(UserAuthorizationList) - self.assertEquals(1, len(json["authorizations"])) + self.assertEqual(1, len(json["authorizations"])) authorization = json["authorizations"][0] @@ -4554,7 +4554,7 @@ def test_list_get_delete_user_authorizations(self): get_json = self.getJsonResponse( UserAuthorization, params=dict(access_token_uuid=authorization["uuid"]) ) - self.assertEquals(authorization, get_json) + self.assertEqual(authorization, get_json) # Delete the authorization. self.deleteEmptyResponse( @@ -4658,7 +4658,7 @@ def test_get_update_keys(self): # Ensure the key's name has been changed. json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=key["kid"])) - self.assertEquals("somenewname", json["name"]) + self.assertEqual("somenewname", json["name"]) with assert_action_logged("service_key_modify"): # Update the key's metadata. @@ -4670,7 +4670,7 @@ def test_get_update_keys(self): # Ensure the key's metadata has been changed. json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=key["kid"])) - self.assertEquals("bar", json["metadata"]["foo"]) + self.assertEqual("bar", json["metadata"]["foo"]) with assert_action_logged("service_key_extend"): # Change the key's expiration. @@ -4690,14 +4690,14 @@ def test_get_update_keys(self): self.getResponse(SuperUserServiceKey, params=dict(kid=key["kid"]), expected_code=404) json = self.getJsonResponse(SuperUserServiceKeyManagement) - self.assertEquals(key_count - 1, len(json["keys"])) + self.assertEqual(key_count - 1, len(json["keys"])) def test_approve_key(self): self.login(ADMIN_ACCESS_USER) # Ensure the key is not yet approved. json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid="kid3")) - self.assertEquals("unapprovedkey", json["name"]) + self.assertEqual("unapprovedkey", json["name"]) self.assertIsNone(json["approval"]) # Approve the key. @@ -4711,11 +4711,11 @@ def test_approve_key(self): # Ensure the key is approved. json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid="kid3")) - self.assertEquals("unapprovedkey", json["name"]) + self.assertEqual("unapprovedkey", json["name"]) self.assertIsNotNone(json["approval"]) - self.assertEquals("ServiceKeyApprovalType.SUPERUSER", json["approval"]["approval_type"]) - self.assertEquals(ADMIN_ACCESS_USER, json["approval"]["approver"]["username"]) - self.assertEquals("testapprove", json["approval"]["notes"]) + self.assertEqual("ServiceKeyApprovalType.SUPERUSER", json["approval"]["approval_type"]) + self.assertEqual(ADMIN_ACCESS_USER, json["approval"]["approver"]["username"]) + self.assertEqual("testapprove", json["approval"]["notes"]) def test_approve_preapproved(self): self.login(ADMIN_ACCESS_USER) @@ -4754,7 +4754,7 @@ def test_create_key(self): with assert_action_logged("service_key_create"): # Create the key. json = self.postJsonResponse(SuperUserServiceKeyManagement, data=new_key) - self.assertEquals("mynewkey", json["name"]) + self.assertEqual("mynewkey", json["name"]) self.assertTrue("kid" in json) self.assertTrue("public_key" in json) self.assertTrue("private_key" in json) @@ -4768,15 +4768,15 @@ def test_create_key(self): kid = json["kid"] json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=kid)) - self.assertEquals("mynewkey", json["name"]) - self.assertEquals("coolservice", json["service"]) - self.assertEquals("baz", json["metadata"]["foo"]) - self.assertEquals(kid, json["kid"]) + self.assertEqual("mynewkey", json["name"]) + self.assertEqual("coolservice", json["service"]) + self.assertEqual("baz", json["metadata"]["foo"]) + self.assertEqual(kid, json["kid"]) self.assertIsNotNone(json["approval"]) - self.assertEquals("ServiceKeyApprovalType.SUPERUSER", json["approval"]["approval_type"]) - self.assertEquals(ADMIN_ACCESS_USER, json["approval"]["approver"]["username"]) - self.assertEquals("whazzup!?", json["approval"]["notes"]) + self.assertEqual("ServiceKeyApprovalType.SUPERUSER", json["approval"]["approval_type"]) + self.assertEqual(ADMIN_ACCESS_USER, json["approval"]["approver"]["username"]) + self.assertEqual("whazzup!?", json["approval"]["notes"]) class TestRepositoryManifestLabels(ApiTestCase): @@ -4793,7 +4793,7 @@ def test_basic_labels(self): params=dict(repository=repository, manifestref=tag.manifest_digest), ) - self.assertEquals(0, len(json["labels"])) + self.assertEqual(0, len(json["labels"])) self.postJsonResponse( RepositoryManifestLabels, @@ -4840,15 +4840,15 @@ def test_basic_labels(self): params=dict(repository=repository, manifestref=tag.manifest_digest), ) - self.assertEquals(3, len(json["labels"])) + self.assertEqual(3, len(json["labels"])) - self.assertNotEquals(label2["label"]["id"], label1["label"]["id"]) - self.assertNotEquals(label3["label"]["id"], label1["label"]["id"]) - self.assertNotEquals(label2["label"]["id"], label3["label"]["id"]) + self.assertNotEqual(label2["label"]["id"], label1["label"]["id"]) + self.assertNotEqual(label3["label"]["id"], label1["label"]["id"]) + self.assertNotEqual(label2["label"]["id"], label3["label"]["id"]) - self.assertEquals("text/plain", label1["label"]["media_type"]) - self.assertEquals("text/plain", label2["label"]["media_type"]) - self.assertEquals("application/json", label3["label"]["media_type"]) + self.assertEqual("text/plain", label1["label"]["media_type"]) + self.assertEqual("text/plain", label2["label"]["media_type"]) + self.assertEqual("application/json", label3["label"]["media_type"]) # Ensure we can retrieve each of the labels. for label in json["labels"]: @@ -4858,7 +4858,7 @@ def test_basic_labels(self): repository=repository, manifestref=tag.manifest_digest, labelid=label["id"] ), ) - self.assertEquals(label["id"], label_json["id"]) + self.assertEqual(label["id"], label_json["id"]) # Delete a label. with assert_action_logged("manifest_label_delete"): @@ -4877,7 +4877,7 @@ def test_basic_labels(self): params=dict(repository=repository, manifestref=tag.manifest_digest), ) - self.assertEquals(2, len(json["labels"])) + self.assertEqual(2, len(json["labels"])) # Check filtering. json = self.getJsonResponse( @@ -4885,7 +4885,7 @@ def test_basic_labels(self): params=dict(repository=repository, manifestref=tag.manifest_digest, filter="hello"), ) - self.assertEquals(1, len(json["labels"])) + self.assertEqual(1, len(json["labels"])) def test_prefixed_labels(self): self.login(ADMIN_ACCESS_USER) @@ -4959,16 +4959,16 @@ def test_get_user(self): self.login(ADMIN_ACCESS_USER) json = self.getJsonResponse(SuperUserManagement, params=dict(username="freshuser")) - self.assertEquals("freshuser", json["username"]) - self.assertEquals("jschorr+test@devtable.com", json["email"]) - self.assertEquals(False, json["super_user"]) + self.assertEqual("freshuser", json["username"]) + self.assertEqual("jschorr+test@devtable.com", json["email"]) + self.assertEqual(False, json["super_user"]) def test_delete_user(self): self.login(ADMIN_ACCESS_USER) # Verify the user exists. json = self.getJsonResponse(SuperUserManagement, params=dict(username="freshuser")) - self.assertEquals("freshuser", json["username"]) + self.assertEqual("freshuser", json["username"]) # Delete the user. self.deleteEmptyResponse( @@ -4983,8 +4983,8 @@ def test_change_user_password(self): # Verify the user exists. json = self.getJsonResponse(SuperUserManagement, params=dict(username="freshuser")) - self.assertEquals("freshuser", json["username"]) - self.assertEquals("jschorr+test@devtable.com", json["email"]) + self.assertEqual("freshuser", json["username"]) + self.assertEqual("jschorr+test@devtable.com", json["email"]) # Update the user. json = self.putJsonResponse( @@ -4999,8 +4999,8 @@ def test_update_user(self): # Verify the user exists. json = self.getJsonResponse(SuperUserManagement, params=dict(username="freshuser")) - self.assertEquals("freshuser", json["username"]) - self.assertEquals("jschorr+test@devtable.com", json["email"]) + self.assertEqual("freshuser", json["username"]) + self.assertEqual("jschorr+test@devtable.com", json["email"]) # Update the user. json = self.putJsonResponse( @@ -5010,8 +5010,8 @@ def test_update_user(self): # Verify the user was updated. json = self.getJsonResponse(SuperUserManagement, params=dict(username="freshuser")) - self.assertEquals("freshuser", json["username"]) - self.assertEquals("foo@bar.com", json["email"]) + self.assertEqual("freshuser", json["username"]) + self.assertEqual("foo@bar.com", json["email"]) def test_set_message(self): self.login(ADMIN_ACCESS_USER) @@ -5021,7 +5021,7 @@ def test_set_message(self): self.postResponse(GlobalUserMessages, data=dict(message=message), expected_code=201) json = self.getJsonResponse(GlobalUserMessages) - self.assertEquals(len(json["messages"]), 3) + self.assertEqual(len(json["messages"]), 3) has_matching_message = False for message in json["messages"]: @@ -5045,7 +5045,7 @@ def test_delete_message(self): json = self.getJsonResponse(GlobalUserMessages) - self.assertEquals(len(json["messages"]), 1) + self.assertEqual(len(json["messages"]), 1) if __name__ == "__main__": diff --git a/test/test_endpoints.py b/test/test_endpoints.py index db8a9d0cdf..fd209c5d1b 100644 --- a/test/test_endpoints.py +++ b/test/test_endpoints.py @@ -8,8 +8,8 @@ from mock import patch from io import BytesIO -from urllib import urlencode -from urlparse import urlparse, urlunparse, parse_qs +from urllib.parse import urlencode +from urllib.parse import urlparse, urlunparse, parse_qs from datetime import datetime, timedelta import jwt @@ -28,6 +28,7 @@ from endpoints.csrf import OAUTH_CSRF_TOKEN_NAME from endpoints.web import web as web_bp from endpoints.webhooks import webhooks as webhooks_bp +from endpoints.test.shared import gen_basic_auth from initdb import setup_database_for_testing, finished_database_for_testing from test.helpers import assert_action_logged from util.security.token import encode_public_private_token @@ -90,20 +91,20 @@ def tearDown(self): def getResponse(self, resource_name, expected_code=200, **kwargs): rv = self.app.get(url_for(resource_name, **kwargs)) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteResponse(self, resource_name, headers=None, expected_code=200, **kwargs): headers = headers or {} rv = self.app.delete(url_for(resource_name, **kwargs), headers=headers) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def deleteEmptyResponse(self, resource_name, headers=None, expected_code=204, **kwargs): headers = headers or {} rv = self.app.delete(url_for(resource_name, **kwargs), headers=headers) - self.assertEquals(rv.status_code, expected_code) - self.assertEquals(rv.data, "") # ensure response body empty + self.assertEqual(rv.status_code, expected_code) + self.assertEqual(rv.data, b"") # ensure response body empty return def putResponse(self, resource_name, headers=None, data=None, expected_code=200, **kwargs): @@ -112,7 +113,7 @@ def putResponse(self, resource_name, headers=None, data=None, expected_code=200, rv = self.app.put( url_for(resource_name, **kwargs), headers=headers, data=py_json.dumps(data) ) - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv.data def postResponse( @@ -123,7 +124,7 @@ def postResponse( form=None, with_csrf=True, expected_code=200, - **kwargs + **kwargs, ): headers = headers or {} form = form or {} @@ -139,7 +140,7 @@ def postResponse( rv = self.app.post(url, headers=headers, data=post_data) if expected_code is not None: - self.assertEquals(rv.status_code, expected_code) + self.assertEqual(rv.status_code, expected_code) return rv @@ -149,7 +150,7 @@ def login(self, username, password): data=py_json.dumps(dict(username=username, password=password)), headers={"Content-Type": "application/json"}, ) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) class BuildLogsTestCase(EndpointTestCase): @@ -176,7 +177,7 @@ def test_buildlogs_successful(self): logs = ["log1", "log2"] with patch("endpoints.web.build_logs.get_log_entries", return_value=(None, logs)): resp = self.getResponse("web.buildlogs", build_uuid=self.build_uuid, expected_code=200) - self.assertEquals({"logs": logs}, py_json.loads(resp)) + self.assertEqual({"logs": logs}, py_json.loads(resp)) class ArchivedLogsTestCase(EndpointTestCase): @@ -227,7 +228,7 @@ def test_valid_build_trigger_webhook_cookie_auth(self): ) def test_valid_build_trigger_webhook_missing_payload(self): - auth_header = "Basic %s" % (base64.b64encode("devtable:password")) + auth_header = gen_basic_auth("devtable", "password") trigger = list(model.build.list_build_triggers("devtable", "building"))[0] self.postResponse( "webhooks.build_trigger_webhook", @@ -237,7 +238,7 @@ def test_valid_build_trigger_webhook_missing_payload(self): ) def test_valid_build_trigger_webhook_invalid_payload(self): - auth_header = "Basic %s" % (base64.b64encode("devtable:password")) + auth_header = gen_basic_auth("devtable", "password") trigger = list(model.build.list_build_triggers("devtable", "building"))[0] self.postResponse( "webhooks.build_trigger_webhook", @@ -278,13 +279,13 @@ def test_confirm_repo_email(self): def test_confirm_email(self): user = model.user.get_user("devtable") - self.assertNotEquals(user.email, "foo@bar.com") + self.assertNotEqual(user.email, "foo@bar.com") confirmation_code = model.user.create_confirm_email_code(user, "foo@bar.com") self.getResponse("web.confirm_email", code=confirmation_code, expected_code=302) user = model.user.get_user("devtable") - self.assertEquals(user.email, "foo@bar.com") + self.assertEqual(user.email, "foo@bar.com") def test_confirm_recovery(self): # Try for an invalid code. @@ -415,7 +416,7 @@ def test_authorize_invalidclient(self): resp = self.postResponse( "web.authorize_application", form=form, with_csrf=True, expected_code=302 ) - self.assertEquals( + self.assertEqual( "http://localhost:5000/foobar?error=unauthorized_client", resp.headers["Location"] ) @@ -431,7 +432,7 @@ def test_authorize_invalidscope(self): resp = self.postResponse( "web.authorize_application", form=form, with_csrf=True, expected_code=302 ) - self.assertEquals( + self.assertEqual( "http://localhost:8000/o2c.html?error=invalid_scope", resp.headers["Location"] ) @@ -501,7 +502,7 @@ def test_authorize_nocsrf_withbadheader(self): "scope": "user:admin", } - headers = dict(authorization="Basic " + base64.b64encode("devtable:invalidpassword")) + headers = dict(authorization=gen_basic_auth("devtable", "invalidpassword")) self.postResponse( "web.authorize_application", headers=headers, @@ -519,7 +520,7 @@ def test_authorize_nocsrf_correctheader(self): } # Try without the client id being in the whitelist. - headers = dict(authorization="Basic " + base64.b64encode("devtable:password")) + headers = dict(authorization=gen_basic_auth("devtable", "password")) self.postResponse( "web.authorize_application", headers=headers, @@ -531,7 +532,7 @@ def test_authorize_nocsrf_correctheader(self): # Add the client ID to the whitelist and try again. app.config["DIRECT_OAUTH_CLIENTID_WHITELIST"] = ["deadbeef"] - headers = dict(authorization="Basic " + base64.b64encode("devtable:password")) + headers = dict(authorization=gen_basic_auth("devtable", "password")) resp = self.postResponse( "web.authorize_application", headers=headers, @@ -550,7 +551,7 @@ def test_authorize_nocsrf_ratelimiting(self): } # Try without the client id being in the whitelist a few times, making sure we eventually get rate limited. - headers = dict(authorization="Basic " + base64.b64encode("devtable:invalidpassword")) + headers = dict(authorization=gen_basic_auth("devtable", "invalidpassword")) self.postResponse( "web.authorize_application", headers=headers, @@ -568,7 +569,7 @@ def test_authorize_nocsrf_ratelimiting(self): with_csrf=False, expected_code=None, ) - self.assertNotEquals(200, r.status_code) + self.assertNotEqual(200, r.status_code) counter = counter + 1 if counter > 5: self.fail("Exponential backoff did not fire") @@ -606,7 +607,7 @@ def test_list_service_keys(self): # Make sure the hidden keys are not returned and the visible ones are returned. self.assertTrue(len(visible_jwks) > 0) self.assertTrue(len(invisible_jwks) > 0) - self.assertEquals(len(visible_jwks), len(jwkset["keys"])) + self.assertEqual(len(visible_jwks), len(jwkset["keys"])) for jwk in jwkset["keys"]: self.assertIn(jwk, visible_jwks) @@ -653,7 +654,10 @@ def test_put_service_key(self): "key_server.put_service_key", service="sample service", kid="kid420", - headers={"Authorization": "Bearer %s" % token, "Content-Type": "application/json",}, + headers={ + "Authorization": "Bearer %s" % token.decode("ascii"), + "Content-Type": "application/json", + }, data=jwk, expected_code=400, ) @@ -664,7 +668,10 @@ def test_put_service_key(self): "key_server.put_service_key", service="sample_service", kid="kid420", - headers={"Authorization": "Bearer %s" % token, "Content-Type": "application/json",}, + headers={ + "Authorization": "Bearer %s" % token.decode("ascii"), + "Content-Type": "application/json", + }, data=jwk, expected_code=202, ) @@ -682,7 +689,10 @@ def test_put_service_key(self): "key_server.put_service_key", service="sample_service", kid="kid6969", - headers={"Authorization": "Bearer %s" % token, "Content-Type": "application/json",}, + headers={ + "Authorization": "Bearer %s" % token.decode("ascii"), + "Content-Type": "application/json", + }, data=jwk, expected_code=403, ) @@ -701,7 +711,10 @@ def test_put_service_key(self): "key_server.put_service_key", service="sample_service", kid="kid6969", - headers={"Authorization": "Bearer %s" % token, "Content-Type": "application/json",}, + headers={ + "Authorization": "Bearer %s" % token.decode("ascii"), + "Content-Type": "application/json", + }, data=jwk, expected_code=200, ) @@ -716,7 +729,10 @@ def test_put_service_key(self): "key_server.put_service_key", service="sample_service", kid="kid6969", - headers={"Authorization": "Bearer %s" % token, "Content-Type": "application/json",}, + headers={ + "Authorization": "Bearer %s" % token.decode("ascii"), + "Content-Type": "application/json", + }, data=jwk, expected_code=403, ) @@ -735,7 +751,7 @@ def test_attempt_delete_service_key_with_no_kid_signer(self): # Using the credentials of our key, attempt to delete our unapproved key self.deleteResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=400, service="sample_service", kid="first", @@ -765,7 +781,7 @@ def test_attempt_delete_service_key_with_expired_key(self): # Using the credentials of our second key, attempt to delete our unapproved key self.deleteResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=403, service="sample_service", kid="second", @@ -777,7 +793,7 @@ def test_attempt_delete_service_key_with_expired_key(self): with assert_action_logged("service_key_delete"): self.deleteEmptyResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=204, service="sample_service", kid="second", @@ -806,7 +822,7 @@ def test_delete_unapproved_service_key(self): with assert_action_logged("service_key_delete"): self.deleteEmptyResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=204, service="sample_service", kid="unapprovedkeyhere", @@ -835,7 +851,7 @@ def test_delete_chained_service_key(self): # Using the credentials of our second key, attempt tp delete our unapproved key self.deleteResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=403, service="sample_service", kid="kid321", @@ -850,7 +866,7 @@ def test_delete_chained_service_key(self): with assert_action_logged("service_key_delete"): self.deleteEmptyResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=204, service="sample_service", kid="kid321", @@ -865,7 +881,7 @@ def test_delete_chained_service_key(self): ) self.deleteResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % bad_token}, + headers={"Authorization": "Bearer %s" % bad_token.decode("ascii")}, expected_code=403, service="sample_service", kid="kid123", @@ -875,7 +891,7 @@ def test_delete_chained_service_key(self): with assert_action_logged("service_key_delete"): self.deleteEmptyResponse( "key_server.delete_service_key", - headers={"Authorization": "Bearer %s" % token}, + headers={"Authorization": "Bearer %s" % token.decode("ascii")}, expected_code=204, service="sample_service", kid="kid123", diff --git a/test/test_external_jwt_authn.py b/test/test_external_jwt_authn.py index 1c9389f4e1..1aa3ea67c4 100644 --- a/test/test_external_jwt_authn.py +++ b/test/test_external_jwt_authn.py @@ -86,7 +86,7 @@ def _create_app(emails=True): jwt_app.config["SERVER_HOSTNAME"] = "localhost:%s" % _PORT_NUMBER def _get_basic_auth(): - data = base64.b64decode(request.headers["Authorization"][len("Basic ") :]) + data = base64.b64decode(request.headers["Authorization"][len("Basic ") :]).decode("utf-8") return data.split(":", 1) @jwt_app.route("/user/query", methods=["GET"]) @@ -115,7 +115,7 @@ def query_users(): } encoded = jwt.encode(token_data, private_key_data, "RS256") - return jsonify({"token": encoded}) + return jsonify({"token": encoded.decode("ascii")}) @jwt_app.route("/user/get", methods=["GET"]) def get_user(): @@ -137,7 +137,7 @@ def get_user(): } encoded = jwt.encode(token_data, private_key_data, "RS256") - return jsonify({"token": encoded}) + return jsonify({"token": encoded.decode("ascii")}) return make_response("Invalid username or password", 404) @@ -164,7 +164,7 @@ def verify_user(): } encoded = jwt.encode(token_data, private_key_data, "RS256") - return jsonify({"token": encoded}) + return jsonify({"token": encoded.decode("ascii")}) return make_response("Invalid username or password", 404) @@ -198,7 +198,7 @@ def tearDown(self): def test_verify_and_link_user(self): with fake_jwt(self.emails) as jwt_auth: result, error_message = jwt_auth.verify_and_link_user("invaliduser", "foobar") - self.assertEquals("Invalid username or password", error_message) + self.assertEqual("Invalid username or password", error_message) self.assertIsNone(result) result, _ = jwt_auth.verify_and_link_user("cool.user", "invalidpassword") @@ -206,11 +206,11 @@ def test_verify_and_link_user(self): result, _ = jwt_auth.verify_and_link_user("cool.user", "password") self.assertIsNotNone(result) - self.assertEquals("cool_user", result.username) + self.assertEqual("cool_user", result.username) result, _ = jwt_auth.verify_and_link_user("some.neat.user", "foobar") self.assertIsNotNone(result) - self.assertEquals("some_neat_user", result.username) + self.assertEqual("some_neat_user", result.username) def test_confirm_existing_user(self): with fake_jwt(self.emails) as jwt_auth: @@ -227,7 +227,7 @@ def test_confirm_existing_user(self): result, _ = jwt_auth.confirm_existing_user("cool_user", "password") self.assertIsNotNone(result) - self.assertEquals("cool_user", result.username) + self.assertEqual("cool_user", result.username) # Fail to confirm the *external* username, which should return nothing. result, _ = jwt_auth.confirm_existing_user("some.neat.user", "password") @@ -236,39 +236,39 @@ def test_confirm_existing_user(self): # Now confirm the internal username. result, _ = jwt_auth.confirm_existing_user("some_neat_user", "foobar") self.assertIsNotNone(result) - self.assertEquals("some_neat_user", result.username) + self.assertEqual("some_neat_user", result.username) def test_disabled_user_custom_error(self): with fake_jwt(self.emails) as jwt_auth: result, error_message = jwt_auth.verify_and_link_user("disabled", "password") self.assertIsNone(result) - self.assertEquals("User is currently disabled", error_message) + self.assertEqual("User is currently disabled", error_message) def test_query(self): with fake_jwt(self.emails) as jwt_auth: # Lookup `cool`. results, identifier, error_message = jwt_auth.query_users("cool") self.assertIsNone(error_message) - self.assertEquals("jwtauthn", identifier) - self.assertEquals(1, len(results)) + self.assertEqual("jwtauthn", identifier) + self.assertEqual(1, len(results)) - self.assertEquals("cool.user", results[0].username) - self.assertEquals("user@domain.com" if self.emails else None, results[0].email) + self.assertEqual("cool.user", results[0].username) + self.assertEqual("user@domain.com" if self.emails else None, results[0].email) # Lookup `some`. results, identifier, error_message = jwt_auth.query_users("some") self.assertIsNone(error_message) - self.assertEquals("jwtauthn", identifier) - self.assertEquals(1, len(results)) + self.assertEqual("jwtauthn", identifier) + self.assertEqual(1, len(results)) - self.assertEquals("some.neat.user", results[0].username) - self.assertEquals("neat@domain.com" if self.emails else None, results[0].email) + self.assertEqual("some.neat.user", results[0].username) + self.assertEqual("neat@domain.com" if self.emails else None, results[0].email) # Lookup `unknown`. results, identifier, error_message = jwt_auth.query_users("unknown") self.assertIsNone(error_message) - self.assertEquals("jwtauthn", identifier) - self.assertEquals(0, len(results)) + self.assertEqual("jwtauthn", identifier) + self.assertEqual(0, len(results)) def test_get_user(self): with fake_jwt(self.emails) as jwt_auth: @@ -277,16 +277,16 @@ def test_get_user(self): self.assertIsNone(error_message) self.assertIsNotNone(result) - self.assertEquals("cool.user", result.username) - self.assertEquals("user@domain.com", result.email) + self.assertEqual("cool.user", result.username) + self.assertEqual("user@domain.com", result.email) # Lookup some.neat.user. result, error_message = jwt_auth.get_user("some.neat.user") self.assertIsNone(error_message) self.assertIsNotNone(result) - self.assertEquals("some.neat.user", result.username) - self.assertEquals("neat@domain.com", result.email) + self.assertEqual("some.neat.user", result.username) + self.assertEqual("neat@domain.com", result.email) # Lookup unknown user. result, error_message = jwt_auth.get_user("unknownuser") @@ -298,16 +298,16 @@ def test_link_user(self): user, error_message = jwt_auth.link_user("cool.user") self.assertIsNone(error_message) self.assertIsNotNone(user) - self.assertEquals("cool_user", user.username) + self.assertEqual("cool_user", user.username) # Link again. Should return the same user record. user_again, _ = jwt_auth.link_user("cool.user") - self.assertEquals(user_again.id, user.id) + self.assertEqual(user_again.id, user.id) # Confirm cool.user. result, _ = jwt_auth.confirm_existing_user("cool_user", "password") self.assertIsNotNone(result) - self.assertEquals("cool_user", result.username) + self.assertEqual("cool_user", result.username) def test_link_invalid_user(self): with fake_jwt(self.emails) as jwt_auth: diff --git a/test/test_keystone_auth.py b/test/test_keystone_auth.py index c344b47f7f..a6e168ef67 100644 --- a/test/test_keystone_auth.py +++ b/test/test_keystone_auth.py @@ -243,7 +243,7 @@ def v3tokens(): @ks_app.route("/v2.0/auth/tokens", methods=["POST"]) def tokens(): - creds = request.json["auth"][u"passwordCredentials"] + creds = request.json["auth"]["passwordCredentials"] for user in users: if creds["username"] == user["username"] and creds["password"] == user["password"]: return json.dumps( @@ -310,14 +310,14 @@ def test_invalid_password(self): def test_cooluser(self): with self.fake_keystone() as keystone: (user, _) = keystone.verify_credentials("cool.user", "password") - self.assertEquals(user.username, "cool.user") - self.assertEquals(user.email, "cool.user@example.com" if self.emails else None) + self.assertEqual(user.username, "cool.user") + self.assertEqual(user.email, "cool.user@example.com" if self.emails else None) def test_neatuser(self): with self.fake_keystone() as keystone: (user, _) = keystone.verify_credentials("some.neat.user", "foobar") - self.assertEquals(user.username, "some.neat.user") - self.assertEquals(user.email, "some.neat.user@example.com" if self.emails else None) + self.assertEqual(user.username, "some.neat.user") + self.assertEqual(user.email, "some.neat.user@example.com" if self.emails else None) class KeystoneV2AuthNoEmailTests(KeystoneAuthTestsMixin, unittest.TestCase): @@ -359,17 +359,17 @@ def test_query(self): # Lookup cool. (response, federated_id, error_message) = keystone.query_users("cool") self.assertIsNone(error_message) - self.assertEquals(1, len(response)) - self.assertEquals("keystone", federated_id) + self.assertEqual(1, len(response)) + self.assertEqual("keystone", federated_id) user_info = response[0] - self.assertEquals("cool.user", user_info.username) + self.assertEqual("cool.user", user_info.username) # Lookup unknown. (response, federated_id, error_message) = keystone.query_users("unknown") self.assertIsNone(error_message) - self.assertEquals(0, len(response)) - self.assertEquals("keystone", federated_id) + self.assertEqual(0, len(response)) + self.assertEqual("keystone", federated_id) def test_link_user(self): with self.fake_keystone() as keystone: @@ -377,27 +377,27 @@ def test_link_user(self): user, error_message = keystone.link_user("cool.user") self.assertIsNone(error_message) self.assertIsNotNone(user) - self.assertEquals("cool_user", user.username) - self.assertEquals("cool.user@example.com", user.email) + self.assertEqual("cool_user", user.username) + self.assertEqual("cool.user@example.com", user.email) # Link again. Should return the same user record. user_again, _ = keystone.link_user("cool.user") - self.assertEquals(user_again.id, user.id) + self.assertEqual(user_again.id, user.id) # Confirm someuser. result, _ = keystone.confirm_existing_user("cool_user", "password") self.assertIsNotNone(result) - self.assertEquals("cool_user", result.username) + self.assertEqual("cool_user", result.username) def test_check_group_lookup_args(self): with self.fake_keystone() as keystone: (status, err) = keystone.check_group_lookup_args({}) self.assertFalse(status) - self.assertEquals("Missing group_id", err) + self.assertEqual("Missing group_id", err) (status, err) = keystone.check_group_lookup_args({"group_id": "unknownid"}) self.assertFalse(status) - self.assertEquals("Group not found", err) + self.assertEqual("Group not found", err) (status, err) = keystone.check_group_lookup_args({"group_id": "somegroupid"}) self.assertTrue(status) @@ -411,9 +411,9 @@ def test_iterate_group_members(self): results = list(itt) results.sort() - self.assertEquals(2, len(results)) - self.assertEquals("adminuser", results[0][0].id) - self.assertEquals("cool.user", results[1][0].id) + self.assertEqual(2, len(results)) + self.assertEqual("adminuser", results[0][0].id) + self.assertEqual("cool.user", results[1][0].id) if __name__ == "__main__": diff --git a/test/test_ldap.py b/test/test_ldap.py index 176ed8ee2e..919ef5bac4 100644 --- a/test/test_ldap.py +++ b/test/test_ldap.py @@ -304,97 +304,97 @@ def test_invalid_admin_password(self): # Try to login. (response, err_msg) = ldap.verify_and_link_user("someuser", "somepass") self.assertIsNone(response) - self.assertEquals("LDAP Admin dn or password is invalid", err_msg) + self.assertEqual("LDAP Admin dn or password is invalid", err_msg) def test_login(self): with mock_ldap() as ldap: # Verify we can login. (response, _) = ldap.verify_and_link_user("someuser", "somepass") - self.assertEquals(response.username, "someuser") + self.assertEqual(response.username, "someuser") self.assertTrue(model.user.has_user_prompt(response, "confirm_username")) # Verify we can confirm the user. (response, _) = ldap.confirm_existing_user("someuser", "somepass") - self.assertEquals(response.username, "someuser") + self.assertEqual(response.username, "someuser") def test_login_empty_password(self): with mock_ldap() as ldap: # Verify we cannot login. (response, err_msg) = ldap.verify_and_link_user("someuser", "") self.assertIsNone(response) - self.assertEquals(err_msg, "Anonymous binding not allowed") + self.assertEqual(err_msg, "Anonymous binding not allowed") # Verify we cannot confirm the user. (response, err_msg) = ldap.confirm_existing_user("someuser", "") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid user") + self.assertEqual(err_msg, "Invalid user") def test_login_whitespace_password(self): with mock_ldap() as ldap: # Verify we cannot login. (response, err_msg) = ldap.verify_and_link_user("someuser", " ") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid password") + self.assertEqual(err_msg, "Invalid password") # Verify we cannot confirm the user. (response, err_msg) = ldap.confirm_existing_user("someuser", " ") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid user") + self.assertEqual(err_msg, "Invalid user") def test_login_secondary(self): with mock_ldap() as ldap: # Verify we can login. (response, _) = ldap.verify_and_link_user("secondaryuser", "somepass") - self.assertEquals(response.username, "secondaryuser") + self.assertEqual(response.username, "secondaryuser") # Verify we can confirm the user. (response, _) = ldap.confirm_existing_user("secondaryuser", "somepass") - self.assertEquals(response.username, "secondaryuser") + self.assertEqual(response.username, "secondaryuser") def test_invalid_wildcard(self): with mock_ldap() as ldap: # Verify we cannot login with a wildcard. (response, err_msg) = ldap.verify_and_link_user("some*", "somepass") self.assertIsNone(response) - self.assertEquals(err_msg, "Username not found") + self.assertEqual(err_msg, "Username not found") # Verify we cannot confirm the user. (response, err_msg) = ldap.confirm_existing_user("some*", "somepass") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid user") + self.assertEqual(err_msg, "Invalid user") def test_invalid_password(self): with mock_ldap() as ldap: # Verify we cannot login with an invalid password. (response, err_msg) = ldap.verify_and_link_user("someuser", "invalidpass") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid password") + self.assertEqual(err_msg, "Invalid password") # Verify we cannot confirm the user. (response, err_msg) = ldap.confirm_existing_user("someuser", "invalidpass") self.assertIsNone(response) - self.assertEquals(err_msg, "Invalid user") + self.assertEqual(err_msg, "Invalid user") def test_missing_mail(self): with mock_ldap() as ldap: (response, err_msg) = ldap.get_user("nomail") self.assertIsNone(response) - self.assertEquals('Missing mail field "mail" in user record', err_msg) + self.assertEqual('Missing mail field "mail" in user record', err_msg) def test_missing_mail_allowed(self): with mock_ldap(requires_email=False) as ldap: (response, _) = ldap.get_user("nomail") - self.assertEquals(response.username, "nomail") + self.assertEqual(response.username, "nomail") def test_confirm_different_username(self): with mock_ldap() as ldap: # Verify that the user is logged in and their username was adjusted. (response, _) = ldap.verify_and_link_user("cool.user", "somepass") - self.assertEquals(response.username, "cool_user") + self.assertEqual(response.username, "cool_user") # Verify we can confirm the user's quay username. (response, _) = ldap.confirm_existing_user("cool_user", "somepass") - self.assertEquals(response.username, "cool_user") + self.assertEqual(response.username, "cool_user") # Verify that we *cannot* confirm the LDAP username. (response, _) = ldap.confirm_existing_user("cool.user", "somepass") @@ -403,11 +403,11 @@ def test_confirm_different_username(self): def test_referral(self): with mock_ldap() as ldap: (response, _) = ldap.verify_and_link_user("referred", "somepass") - self.assertEquals(response.username, "cool_user") + self.assertEqual(response.username, "cool_user") # Verify we can confirm the user's quay username. (response, _) = ldap.confirm_existing_user("cool_user", "somepass") - self.assertEquals(response.username, "cool_user") + self.assertEqual(response.username, "cool_user") def test_invalid_referral(self): with mock_ldap() as ldap: @@ -417,7 +417,7 @@ def test_invalid_referral(self): def test_multientry(self): with mock_ldap() as ldap: (response, _) = ldap.verify_and_link_user("multientry", "somepass") - self.assertEquals(response.username, "multientry") + self.assertEqual(response.username, "multientry") def test_login_empty_userdn(self): with mock_ldap(): @@ -442,11 +442,11 @@ def test_login_empty_userdn(self): # Verify we can login. (response, _) = ldap.verify_and_link_user("someuser", "somepass") - self.assertEquals(response.username, "someuser") + self.assertEqual(response.username, "someuser") # Verify we can confirm the user. (response, _) = ldap.confirm_existing_user("someuser", "somepass") - self.assertEquals(response.username, "someuser") + self.assertEqual(response.username, "someuser") def test_link_user(self): with mock_ldap() as ldap: @@ -454,16 +454,16 @@ def test_link_user(self): user, error_message = ldap.link_user("someuser") self.assertIsNone(error_message) self.assertIsNotNone(user) - self.assertEquals("someuser", user.username) + self.assertEqual("someuser", user.username) # Link again. Should return the same user record. user_again, _ = ldap.link_user("someuser") - self.assertEquals(user_again.id, user.id) + self.assertEqual(user_again.id, user.id) # Confirm someuser. result, _ = ldap.confirm_existing_user("someuser", "somepass") self.assertIsNotNone(result) - self.assertEquals("someuser", result.username) + self.assertEqual("someuser", result.username) self.assertTrue(model.user.has_user_prompt(user, "confirm_username")) def test_query(self): @@ -471,18 +471,18 @@ def test_query(self): # Lookup cool. (response, federated_id, error_message) = ldap.query_users("cool") self.assertIsNone(error_message) - self.assertEquals(1, len(response)) - self.assertEquals("ldap", federated_id) + self.assertEqual(1, len(response)) + self.assertEqual("ldap", federated_id) user_info = response[0] - self.assertEquals("cool.user", user_info.username) - self.assertEquals("foo@bar.com", user_info.email) + self.assertEqual("cool.user", user_info.username) + self.assertEqual("foo@bar.com", user_info.email) # Lookup unknown. (response, federated_id, error_message) = ldap.query_users("unknown") self.assertIsNone(error_message) - self.assertEquals(0, len(response)) - self.assertEquals("ldap", federated_id) + self.assertEqual(0, len(response)) + self.assertEqual("ldap", federated_id) def test_timeout(self): base_dn = ["dc=quay", "dc=io"] @@ -493,7 +493,7 @@ def test_timeout(self): email_attr = "mail" secondary_user_rdns = ["ou=otheremployees"] - with self.assertRaisesRegexp(Exception, "Can't contact LDAP server"): + with self.assertRaisesRegex(Exception, "Can't contact LDAP server"): ldap = LDAPUsers( "ldap://localhost", base_dn, @@ -516,7 +516,7 @@ def test_iterate_group_members(self): self.assertIsNone(err) results = list(it) - self.assertEquals(2, len(results)) + self.assertEqual(2, len(results)) first = results[0][0] second = results[1][0] @@ -526,13 +526,13 @@ def test_iterate_group_members(self): else: testy, someuser = second, first - self.assertEquals("testy", testy.id) - self.assertEquals("testy", testy.username) - self.assertEquals("bar@baz.com", testy.email) + self.assertEqual("testy", testy.id) + self.assertEqual("testy", testy.username) + self.assertEqual("bar@baz.com", testy.email) - self.assertEquals("someuser", someuser.id) - self.assertEquals("someuser", someuser.username) - self.assertEquals("foo@bar.com", someuser.email) + self.assertEqual("someuser", someuser.id) + self.assertEqual("someuser", someuser.username) + self.assertEqual("foo@bar.com", someuser.email) def test_iterate_group_members_with_pagination(self): with mock_ldap() as ldap: @@ -541,7 +541,7 @@ def test_iterate_group_members_with_pagination(self): self.assertIsNone(err) results = list(it) - self.assertEquals(2, len(results)) + self.assertEqual(2, len(results)) first = results[0][0] second = results[1][0] @@ -551,13 +551,13 @@ def test_iterate_group_members_with_pagination(self): else: testy, someuser = second, first - self.assertEquals("testy", testy.id) - self.assertEquals("testy", testy.username) - self.assertEquals("bar@baz.com", testy.email) + self.assertEqual("testy", testy.id) + self.assertEqual("testy", testy.username) + self.assertEqual("bar@baz.com", testy.email) - self.assertEquals("someuser", someuser.id) - self.assertEquals("someuser", someuser.username) - self.assertEquals("foo@bar.com", someuser.email) + self.assertEqual("someuser", someuser.id) + self.assertEqual("someuser", someuser.username) + self.assertEqual("foo@bar.com", someuser.email) def test_check_group_lookup_args(self): with mock_ldap() as ldap: @@ -599,7 +599,7 @@ def test_at_least_one_user_exists_invalid_creds(self): # Try to query with invalid credentials. (response, err_msg) = ldap.at_least_one_user_exists() self.assertFalse(response) - self.assertEquals("LDAP Admin dn or password is invalid", err_msg) + self.assertEqual("LDAP Admin dn or password is invalid", err_msg) def test_at_least_one_user_exists_no_users(self): base_dn = ["dc=quay", "dc=io"] diff --git a/test/test_oauth_login.py b/test/test_oauth_login.py index f6d6ced365..862bb7440c 100644 --- a/test/test_oauth_login.py +++ b/test/test_oauth_login.py @@ -1,7 +1,7 @@ import json as py_json import time import unittest -import urlparse +import urllib.parse import jwt @@ -52,7 +52,7 @@ def invoke_oauth_tests( ) # Delete the created user. - self.assertNotEquals(created.username, "devtable") + self.assertNotEqual(created.username, "devtable") model.user.delete_user(created, []) # Test attach. @@ -91,13 +91,13 @@ def invoke_oauth_test(self, endpoint_name, service_name, service_ident, username federated_login = model.user.lookup_federated_login(found_user, service_name) self.assertIsNotNone(federated_login) - self.assertEquals(federated_login.service_ident, service_ident) + self.assertEqual(federated_login.service_ident, service_ident) return found_user def test_google_oauth(self): @urlmatch(netloc=r"accounts.google.com", path="/o/oauth2/token") def account_handler(_, request): - parsed = dict(urlparse.parse_qsl(request.body)) + parsed = dict(urllib.parse.parse_qsl(request.body)) if parsed["code"] == "somecode": content = {"access_token": "someaccesstoken"} return py_json.dumps(content) @@ -121,7 +121,7 @@ def user_handler(_, __): def test_github_oauth(self): @urlmatch(netloc=r"github.com", path="/login/oauth/access_token") def account_handler(url, _): - parsed = dict(urlparse.parse_qsl(url.query)) + parsed = dict(urllib.parse.parse_qsl(url.query)) if parsed["code"] == "somecode": content = {"access_token": "someaccesstoken"} return py_json.dumps(content) @@ -167,7 +167,7 @@ def _get_oidc_mocks(self): @urlmatch(netloc=r"fakeoidc", path="/token") def token_handler(_, request): if request.body.find("code=somecode") >= 0: - content = {"access_token": "someaccesstoken", "id_token": id_token} + content = {"access_token": "someaccesstoken", "id_token": id_token.decode("ascii")} return py_json.dumps(content) else: return {"status_code": 400, "content": '{"message": "Invalid code"}'} diff --git a/test/test_secscan.py b/test/test_secscan.py index 9fdfd0b145..9bf182c1c9 100644 --- a/test/test_secscan.py +++ b/test/test_secscan.py @@ -45,10 +45,10 @@ def _delete_tag(namespace, repo, tag): class TestSecurityScanner(unittest.TestCase): def setUp(self): # Enable direct download in fake storage. - storage.put_content(["local_us"], "supports_direct_download", "true") + storage.put_content(["local_us"], "supports_direct_download", b"true") # Have fake storage say all files exist for the duration of the test. - storage.put_content(["local_us"], "all_files_exist", "true") + storage.put_content(["local_us"], "all_files_exist", b"true") # Setup the database with fake storage. setup_database_for_testing(self) @@ -76,8 +76,8 @@ def tearDown(self): self.ctx.__exit__(True, None, None) def assertAnalyzed(self, layer, security_scanner, isAnalyzed, engineVersion): - self.assertEquals(isAnalyzed, layer.security_indexed) - self.assertEquals(engineVersion, layer.security_indexed_engine) + self.assertEqual(isAnalyzed, layer.security_indexed) + self.assertEqual(engineVersion, layer.security_indexed_engine) if isAnalyzed: self.assertTrue(security_scanner.has_layer(security_scanner.layer_id(layer))) @@ -86,7 +86,7 @@ def assertAnalyzed(self, layer, security_scanner, isAnalyzed, engineVersion): parents = model.image.get_parent_images(ADMIN_ACCESS_USER, SIMPLE_REPO, layer) for parent in parents: self.assertTrue(parent.security_indexed) - self.assertEquals(engineVersion, parent.security_indexed_engine) + self.assertEqual(engineVersion, parent.security_indexed_engine) self.assertTrue(security_scanner.has_layer(security_scanner.layer_id(parent))) def test_get_layer(self): @@ -106,7 +106,7 @@ def test_get_layer(self): # Retrieve the results. result = self.api.get_layer_data(layer, include_vulnerabilities=True) self.assertIsNotNone(result) - self.assertEquals(result["Layer"]["Name"], security_scanner.layer_id(layer)) + self.assertEqual(result["Layer"]["Name"], security_scanner.layer_id(layer)) def test_analyze_layer_nodirectdownload_success(self): """ @@ -114,7 +114,7 @@ def test_analyze_layer_nodirectdownload_success(self): """ # Disable direct download in fake storage. - storage.put_content(["local_us"], "supports_direct_download", "false") + storage.put_content(["local_us"], "supports_direct_download", b"false") try: app.register_blueprint(v2_bp, url_prefix="/v2") @@ -124,7 +124,7 @@ def test_analyze_layer_nodirectdownload_success(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) # Ensure that the download is a registry+JWT download. uri, auth_header = self.api._get_image_url_and_auth(layer) @@ -133,12 +133,12 @@ def test_analyze_layer_nodirectdownload_success(self): # Ensure the download doesn't work without the header. rv = self.app.head(uri) - self.assertEquals(rv.status_code, 401) + self.assertEqual(rv.status_code, 401) # Ensure the download works with the header. Note we use a HEAD here, as GET causes DB # access which messes with the test runner's rollback. rv = self.app.head(uri, headers=[("authorization", auth_header)]) - self.assertEquals(rv.status_code, 200) + self.assertEqual(rv.status_code, 200) # Ensure the code works when called via analyze. with fake_security_scanner() as security_scanner: @@ -155,7 +155,7 @@ def test_analyze_layer_success(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: analyzer = LayerAnalyzer(app.config, self.api) @@ -171,7 +171,7 @@ def test_analyze_layer_failure(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: security_scanner.set_fail_layer_id(security_scanner.layer_id(layer)) @@ -189,7 +189,7 @@ def test_analyze_layer_internal_error(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: security_scanner.set_internal_error_layer_id(security_scanner.layer_id(layer)) @@ -208,7 +208,7 @@ def test_analyze_layer_error(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail with an error. @@ -229,7 +229,7 @@ def test_analyze_layer_unexpected_status(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail with an error. @@ -251,7 +251,7 @@ def test_analyze_layer_missing_parent_handled(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Analyze the layer and its parents. @@ -284,7 +284,7 @@ def test_analyze_layer_invalid_parent(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Analyze the layer and its parents. @@ -320,7 +320,7 @@ def test_analyze_layer_unsupported_parent(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) with fake_security_scanner() as security_scanner: # Make is so trying to analyze the parent will fail. @@ -340,7 +340,7 @@ def test_analyze_layer_missing_storage(self): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) # Delete the storage for the layer. path = model.storage.get_layer_path(layer.storage) @@ -360,7 +360,7 @@ def assert_analyze_layer_notify( ): layer = _get_legacy_image(ADMIN_ACCESS_USER, SIMPLE_REPO, "latest", include_storage=True) self.assertFalse(layer.security_indexed) - self.assertEquals(-1, layer.security_indexed_engine) + self.assertEqual(-1, layer.security_indexed_engine) # Ensure there are no existing events. self.assertIsNone(notification_queue.get()) @@ -413,19 +413,19 @@ def assert_analyze_layer_notify( self.assertIsNotNone(queue_item) body = json.loads(queue_item.body) - self.assertEquals(set(["latest", "prod"]), set(body["event_data"]["tags"])) - self.assertEquals("CVE-2014-9471", body["event_data"]["vulnerability"]["id"]) - self.assertEquals("Low", body["event_data"]["vulnerability"]["priority"]) + self.assertEqual(set(["latest", "prod"]), set(body["event_data"]["tags"])) + self.assertEqual("CVE-2014-9471", body["event_data"]["vulnerability"]["id"]) + self.assertEqual("Low", body["event_data"]["vulnerability"]["priority"]) self.assertTrue(body["event_data"]["vulnerability"]["has_fix"]) - self.assertEquals("CVE-2014-9471", body["event_data"]["vulnerabilities"][0]["id"]) - self.assertEquals(2, len(body["event_data"]["vulnerabilities"])) + self.assertEqual("CVE-2014-9471", body["event_data"]["vulnerabilities"][0]["id"]) + self.assertEqual(2, len(body["event_data"]["vulnerabilities"])) # Ensure we get the correct event message out as well. event = VulnerabilityFoundEvent() msg = "1 Low and 1 more vulnerabilities were detected in repository devtable/simple in 2 tags" - self.assertEquals(msg, event.get_summary(body["event_data"], {})) - self.assertEquals("info", event.get_level(body["event_data"], {})) + self.assertEqual(msg, event.get_summary(body["event_data"], {})) + self.assertEqual("info", event.get_level(body["event_data"], {})) else: self.assertIsNone(queue_item) @@ -551,9 +551,9 @@ def test_notification_new_layers(self): self.assertIsNotNone(queue_item) item_body = json.loads(queue_item.body) - self.assertEquals(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) - self.assertEquals("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) - self.assertEquals("Low", item_body["event_data"]["vulnerability"]["priority"]) + self.assertEqual(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) + self.assertEqual("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) + self.assertEqual("Low", item_body["event_data"]["vulnerability"]["priority"]) self.assertTrue(item_body["event_data"]["vulnerability"]["has_fix"]) def test_notification_no_new_layers(self): @@ -649,9 +649,9 @@ def test_notification_no_new_layers_increased_severity(self): self.assertIsNotNone(queue_item) item_body = json.loads(queue_item.body) - self.assertEquals(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) - self.assertEquals("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) - self.assertEquals("Critical", item_body["event_data"]["vulnerability"]["priority"]) + self.assertEqual(sorted(["prod", "latest"]), sorted(item_body["event_data"]["tags"])) + self.assertEqual("CVE-TEST", item_body["event_data"]["vulnerability"]["id"]) + self.assertEqual("Critical", item_body["event_data"]["vulnerability"]["priority"]) self.assertTrue(item_body["event_data"]["vulnerability"]["has_fix"]) # Verify that an event would be raised. diff --git a/test/test_v1_endpoint_security.py b/test/test_v1_endpoint_security.py index 189b5623a8..9d0b7fe3a6 100644 --- a/test/test_v1_endpoint_security.py +++ b/test/test_v1_endpoint_security.py @@ -5,7 +5,7 @@ from app import app from util.names import parse_namespace_repository from initdb import setup_database_for_testing, finished_database_for_testing -from specs import build_v1_index_specs +from .specs import build_v1_index_specs from endpoints.v1 import v1_bp @@ -89,36 +89,31 @@ def __new__(cls, name, bases, attrs): return type(name, bases, attrs) -class TestAnonymousAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestAnonymousAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v1_index_specs result_attr = "anon_code" auth_username = None -class TestNoAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestNoAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v1_index_specs result_attr = "no_access_code" auth_username = NO_ACCESS_USER -class TestReadAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestReadAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v1_index_specs result_attr = "read_code" auth_username = READ_ACCESS_USER -class TestCreatorAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestCreatorAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v1_index_specs result_attr = "creator_code" auth_username = CREATOR_ACCESS_USER -class TestAdminAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestAdminAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v1_index_specs result_attr = "admin_code" auth_username = ADMIN_ACCESS_USER diff --git a/test/test_v2_endpoint_security.py b/test/test_v2_endpoint_security.py index 85b1a9e56f..a3a5b7465e 100644 --- a/test/test_v2_endpoint_security.py +++ b/test/test_v2_endpoint_security.py @@ -92,36 +92,31 @@ def __new__(cls, name, bases, attrs): return type(name, bases, attrs) -class TestAnonymousAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestAnonymousAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v2_index_specs result_attr = "anon_code" auth_username = None -class TestNoAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestNoAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v2_index_specs result_attr = "no_access_code" auth_username = NO_ACCESS_USER -class TestReadAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestReadAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v2_index_specs result_attr = "read_code" auth_username = READ_ACCESS_USER -class TestCreatorAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestCreatorAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v2_index_specs result_attr = "creator_code" auth_username = CREATOR_ACCESS_USER -class TestAdminAccess(EndpointTestCase): - __metaclass__ = _SpecTestBuilder +class TestAdminAccess(EndpointTestCase, metaclass=_SpecTestBuilder): spec_func = build_v2_index_specs result_attr = "admin_code" auth_username = ADMIN_ACCESS_USER diff --git a/test/testlogs.py b/test/testlogs.py index 51d2c7b3fa..de258bee2b 100644 --- a/test/testlogs.py +++ b/test/testlogs.py @@ -177,9 +177,9 @@ def get_message(): @staticmethod def _compute_total_completion(statuses, total_images): - percentage_with_sizes = float(len(statuses.values())) / total_images - sent_bytes = sum([status[u"current"] for status in statuses.values()]) - total_bytes = sum([status[u"total"] for status in statuses.values()]) + percentage_with_sizes = float(len(list(statuses.values()))) / total_images + sent_bytes = sum([status["current"] for status in list(statuses.values())]) + total_bytes = sum([status["total"] for status in list(statuses.values())]) return float(sent_bytes) / total_bytes * percentage_with_sizes @staticmethod diff --git a/tools/deleteinvalidlayers.py b/tools/deleteinvalidlayers.py new file mode 100644 index 0000000000..d53c005403 --- /dev/null +++ b/tools/deleteinvalidlayers.py @@ -0,0 +1,102 @@ +from data.database import ( + ImageStorage, + Image, + ImageStoragePlacement, + ImageStorageLocation, + RepositoryTag, +) +from data import model +from app import storage as storage_system +from tqdm import tqdm + + +def find_broken_storages(): + broken_storages = set() + + print("Checking storages...") + placement_count = ImageStoragePlacement.select().count() + placements = ( + ImageStoragePlacement.select() + .join(ImageStorage) + .switch(ImageStoragePlacement) + .join(ImageStorageLocation) + ) + + for placement in tqdm(placements, total=placement_count): + path = model.storage.get_layer_path(placement.storage) + if not storage_system.exists([placement.location.name], path): + broken_storages.add(placement.storage.id) + + return list(broken_storages) + + +def delete_broken_layers(): + result = input('Please make sure your registry is not running and enter "GO" to continue: ') + if result != "GO": + print("Declined to run") + return + + broken_storages = find_broken_storages() + if not broken_storages: + print("No broken layers found") + return + + # Find all the images referencing the broken layers. + print("Finding broken images...") + IMAGE_BATCH_SIZE = 100 + + all_images = [] + for i in tqdm(list(range(0, len(broken_storages) / IMAGE_BATCH_SIZE))): + start = i * IMAGE_BATCH_SIZE + end = (i + 1) * IMAGE_BATCH_SIZE + + images = ( + Image.select().join(ImageStorage).where(Image.storage << broken_storages[start:end]) + ) + all_images.extend(images) + + if not all_images: + print("No broken layers found") + return + + # Find all the tags containing the images. + print("Finding associated tags for %s images..." % len(all_images)) + all_tags = {} + for image in tqdm(all_images): + query = model.tag.get_matching_tags( + image.docker_image_id, image.storage.uuid, RepositoryTag + ) + for tag in query: + all_tags[tag.id] = tag + + # Ask to delete them. + print("") + print("The following tags were found to reference invalid images:") + for tag in list(all_tags.values()): + print("%s/%s: %s" % (tag.repository.namespace_user.username, tag.repository.name, tag.name)) + + if not all_tags: + print("(Tags in time machine)") + + print("") + result = input( + 'Enter "DELETENOW" to delete these tags and ALL associated images (THIS IS PERMANENT): ' + ) + if result != "DELETENOW": + print("Declined to delete") + return + + print("") + print("Marking tags to be GCed...") + for tag in tqdm(list(all_tags.values())): + tag.lifetime_end_ts = 0 + tag.save() + + print("GCing all repositories...") + for tag in tqdm(list(all_tags.values())): + model.repository.garbage_collect_repo(tag.repository) + + print("All done! You may now restart your registry.") + + +delete_broken_layers() diff --git a/tools/emailinvoice.py b/tools/emailinvoice.py index 167ee74e2b..226824013d 100644 --- a/tools/emailinvoice.py +++ b/tools/emailinvoice.py @@ -15,19 +15,19 @@ def sendInvoice(invoice_id): invoice = stripe.Invoice.retrieve(invoice_id) if not invoice["customer"]: - print "No customer found" + print("No customer found") return customer_id = invoice["customer"] user = model.user.get_user_or_org_by_customer_id(customer_id) if not user: - print "No user found for customer %s" % (customer_id) + print("No user found for customer %s" % (customer_id)) return with app.app_context(): invoice_html = renderInvoiceToHtml(invoice, user) send_invoice_email(user.invoice_email_address or user.email, invoice_html) - print "Invoice sent to %s" % (user.invoice_email_address or user.email) + print("Invoice sent to %s" % (user.invoice_email_address or user.email)) parser = argparse.ArgumentParser(description="Email an invoice") diff --git a/tools/freeloaders.py b/tools/freeloaders.py index 2b179f836c..8b4e09cc2d 100644 --- a/tools/freeloaders.py +++ b/tools/freeloaders.py @@ -26,4 +26,4 @@ def get_private_allowed(customer): for username, used, allowed in usage: if used > allowed: - print("Violation: %s %s > %s" % (username, used, allowed)) + print(("Violation: %s %s > %s" % (username, used, allowed))) diff --git a/tools/generatekeypair.py b/tools/generatekeypair.py index 17beb55a2f..88c951ffb8 100644 --- a/tools/generatekeypair.py +++ b/tools/generatekeypair.py @@ -12,17 +12,17 @@ def generate_key_pair(filename, kid=None): if kid is None: kid = canonical_kid(jwk) - print("Writing public key to %s.jwk" % filename) + print(("Writing public key to %s.jwk" % filename)) with open("%s.jwk" % filename, mode="w") as f: f.truncate(0) f.write(json.dumps(jwk)) - print("Writing key ID to %s.kid" % filename) + print(("Writing key ID to %s.kid" % filename)) with open("%s.kid" % filename, mode="w") as f: f.truncate(0) f.write(kid) - print("Writing private key to %s.pem" % filename) + print(("Writing private key to %s.pem" % filename)) with open("%s.pem" % filename, mode="w") as f: f.truncate(0) f.write(private_key.exportKey()) diff --git a/tools/invoices.py b/tools/invoices.py index 3bad8c03df..38877c0d7e 100644 --- a/tools/invoices.py +++ b/tools/invoices.py @@ -10,7 +10,7 @@ from itertools import groupby from datetime import datetime, timedelta, date -from cStringIO import StringIO +from io import StringIO from app import billing as stripe @@ -130,8 +130,8 @@ def format_charge(charge): # Amount remaining to be accounted for remaining_charge_amount = charge.amount - discount_start = sys.maxint - discount_end = sys.maxint + discount_start = sys.maxsize + discount_end = sys.maxsize discount_percent = 0 try: if charge.invoice and charge.invoice.discount: @@ -139,7 +139,7 @@ def format_charge(charge): assert discount_obj.coupon.amount_off is None discount_start = discount_obj.start - discount_end = sys.maxint if not discount_obj.end else discount_obj.end + discount_end = sys.maxsize if not discount_obj.end else discount_obj.end discount_percent = discount_obj.coupon.percent_off / 100.0 assert discount_percent > 0 except AssertionError: @@ -245,7 +245,7 @@ def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): def _encode_cell(cell): if cell is None: return cell - return unicode(cell).encode("utf-8") + return str(cell).encode("utf-8") def writerow(self, row): self.writer.writerow([self._encode_cell(s) for s in row]) diff --git a/tools/monthlyrevenue.py b/tools/monthlyrevenue.py index 76364a507d..f66526ff33 100644 --- a/tools/monthlyrevenue.py +++ b/tools/monthlyrevenue.py @@ -33,16 +33,16 @@ def empty_tuple(): def format_money(total_cents): - dollars = total_cents / 100 + dollars = total_cents // 100 cents = total_cents % 100 return dollars, cents total_monthly_revenue = 0 -for plan_id, (subs, rev) in plan_revenue.items(): +for plan_id, (subs, rev) in list(plan_revenue.items()): total_monthly_revenue += rev d, c = format_money(rev) - print "%s: $%d.%02d(%s)" % (plan_id, d, c, subs) + print("%s: $%d.%02d(%s)" % (plan_id, d, c, subs)) d, c = format_money(total_monthly_revenue) -print "Monthly revenue: $%d.%02d" % (d, c) +print("Monthly revenue: $%d.%02d" % (d, c)) diff --git a/tools/orphans.py b/tools/orphans.py index 61d98779e9..7fc41bfdd5 100644 --- a/tools/orphans.py +++ b/tools/orphans.py @@ -13,4 +13,4 @@ counter = 0 for orphan in orphaned: counter += 1 - print orphan.uuid + print(orphan.uuid) diff --git a/tools/parsebuildpack.py b/tools/parsebuildpack.py index 53c5976468..f73ec07ec9 100644 --- a/tools/parsebuildpack.py +++ b/tools/parsebuildpack.py @@ -7,7 +7,7 @@ resource_key = "5c0a985c-405d-4161-b0ac-603c3757b5f9" resource_url = user_files.get_file_url(resource_key, "127.0.0.1", requires_cors=False) -print resource_url +print(resource_url) docker_resource = requests.get(resource_url, stream=True) c_type = docker_resource.headers["content-type"] @@ -16,4 +16,4 @@ c_type = c_type.split(";")[0] build_dir = w._mime_processors[c_type](docker_resource) -print build_dir +print(build_dir) diff --git a/tools/renameuser.py b/tools/renameuser.py index 90398e77d7..557357fc54 100644 --- a/tools/renameuser.py +++ b/tools/renameuser.py @@ -16,9 +16,9 @@ def renameUser(username, new_name): if existing is None: raise Exception("Username %s does not exist" % username) - print "Renaming user..." + print("Renaming user...") model.user.change_username(existing.id, new_name) - print "Rename complete" + print("Rename complete") parser = argparse.ArgumentParser(description="Rename a user") diff --git a/tools/renderinvoice.py b/tools/renderinvoice.py index 47ca7b11bb..20c944c030 100644 --- a/tools/renderinvoice.py +++ b/tools/renderinvoice.py @@ -11,13 +11,13 @@ def sendInvoice(invoice_id): invoice = stripe.Invoice.retrieve(invoice_id) if not invoice["customer"]: - print "No customer found" + print("No customer found") return customer_id = invoice["customer"] user = model.user.get_user_or_org_by_customer_id(customer_id) if not user: - print "No user found for customer %s" % (customer_id) + print("No user found for customer %s" % (customer_id)) return with app.app_context(): @@ -25,7 +25,7 @@ def sendInvoice(invoice_id): with open("invoice.pdf", "wb") as f: f.write(file_data) - print "Invoice output as invoice.pdf" + print("Invoice output as invoice.pdf") parser = argparse.ArgumentParser(description="Generate an invoice") diff --git a/tools/sendconfirmemail.py b/tools/sendconfirmemail.py index 3c939ac8c2..8efc45e2cb 100644 --- a/tools/sendconfirmemail.py +++ b/tools/sendconfirmemail.py @@ -13,13 +13,13 @@ def sendConfirmation(username): user = model.user.get_nonrobot_user(username) if not user: - print "No user found" + print("No user found") return with app.app_context(): confirmation_code = model.user.create_confirm_email_code(user) send_confirmation_email(user.username, user.email, confirmation_code) - print "Email sent to %s" % (user.email) + print("Email sent to %s" % (user.email)) parser = argparse.ArgumentParser(description="Sends a confirmation email") diff --git a/tools/sendresetemail.py b/tools/sendresetemail.py index f7bf2d98b8..aa589cacac 100644 --- a/tools/sendresetemail.py +++ b/tools/sendresetemail.py @@ -13,13 +13,13 @@ def sendReset(username): user = model.user.get_nonrobot_user(username) if not user: - print "No user found" + print("No user found") return with app.app_context(): confirmation_code = model.user.create_reset_password_email_code(user.email) send_recovery_email(user.email, confirmation_code) - print "Email sent to %s" % (user.email) + print("Email sent to %s" % (user.email)) parser = argparse.ArgumentParser(description="Sends a reset email") diff --git a/tools/sharedimagestorage.py b/tools/sharedimagestorage.py index c8cdbeafbf..c50d791b0f 100644 --- a/tools/sharedimagestorage.py +++ b/tools/sharedimagestorage.py @@ -10,5 +10,5 @@ saved_bytes += (storage.count - 1) * storage.image_size total_bytes += storage.count * storage.image_size -print "Saved: %s" % saved_bytes -print "Total: %s" % total_bytes +print("Saved: %s" % saved_bytes) +print("Total: %s" % total_bytes) diff --git a/tox.ini b/tox.ini index 144745aaf7..a7a2da171b 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27-{unit,registry,mysql,psql} +envlist = py38-{unit,registry,mysql,psql} skipsdist = True [pytest] @@ -18,10 +18,11 @@ setenv = TEST=true registry: FILE=test/registry/registry_tests.py unit: FILE="" +# TODO(kleesc): Re-enable buildman tests after buildman rewrite commands = python --version alembic upgrade head - py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose {env:FILE} -vv {posargs} + py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose --ignore=buildman/ {env:FILE} -vv {posargs} [docker:mysql:5.7] healthcheck_cmd = mysql -uroot -D information_schema -e "SELECT * FROM plugins LIMIT 0;" @@ -31,7 +32,7 @@ healthcheck_retries = 3 healthcheck_start_period = 25 ports = 3306:3306/tcp -[testenv:py27-mysql] +[testenv:py38-mysql] setenv = PYTHONDONTWRITEBYTECODE = 1 PYTHONPATH={toxinidir}{:}{toxinidir} @@ -46,10 +47,11 @@ dockerenv = MYSQL_ALLOW_EMPTY_PASSWORD=1 MYSQL_USER=quay whitelist_internals = /bin/sh +# TODO(kleesc): Re-enable buildman tests after buildman rewrite commands = python --version /bin/sh -c "TEST_DATABASE_URI=mysql+pymysql://quay:quay@127.0.0.1:$MYSQL_3306_TCP_PORT/quay_ci alembic upgrade head" - /bin/sh -c "TEST_DATABASE_URI=mysql+pymysql://quay:quay@127.0.0.1:$MYSQL_3306_TCP_PORT/quay_ci py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose {env:FILE} -vv {posargs}" + /bin/sh -c "TEST_DATABASE_URI=mysql+pymysql://quay:quay@127.0.0.1:$MYSQL_3306_TCP_PORT/quay_ci py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose --ignore=buildman/ {env:FILE} -vv {posargs}" [docker:postgres:9.6] @@ -59,7 +61,8 @@ healthcheck_timeout = 10 healthcheck_retries = 3 healthcheck_start_period = 10 -[testenv:py27-psql] +[testenv:py38-psql] +# TODO(kleesc): Re-enable buildman tests after buildman rewrite setenv = PYTHONDONTWRITEBYTECODE = 1 PYTHONPATH={toxinidir}{:}{toxinidir} @@ -73,8 +76,9 @@ dockerenv = POSTGRES_PASSWORD=quay POSTGRES_USER=quay whitelist_internals = /bin/sh +# TODO(kleesc): Re-enable buildman tests after buildman rewrite commands = python --version /bin/sh -c "docker exec $(docker ps -q -n 1) psql -U quay -d quay_ci -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'" /bin/sh -c "TEST_DATABASE_URI=postgresql://quay:quay@127.0.0.1:$POSTGRES_5432_TCP_PORT/quay_ci alembic upgrade head" - /bin/sh -c "TEST_DATABASE_URI=postgresql://quay:quay@127.0.0.1:$POSTGRES_5432_TCP_PORT/quay_ci py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose {env:FILE} -vv {posargs}" + /bin/sh -c "TEST_DATABASE_URI=postgresql://quay:quay@127.0.0.1:$POSTGRES_5432_TCP_PORT/quay_ci py.test --timeout=3600 --cov-report=html --cov-report=term-missing -x --color no --verbose --ignore=buildman/ {env:FILE} -vv {posargs}" diff --git a/util/audit.py b/util/audit.py index 7475f22dd7..e21a9a2862 100644 --- a/util/audit.py +++ b/util/audit.py @@ -2,7 +2,7 @@ import random from collections import namedtuple -from urlparse import urlparse +from urllib.parse import urlparse from flask import request diff --git a/util/backfillreplication.py b/util/backfillreplication.py index ec4736a320..8fb5909aaa 100644 --- a/util/backfillreplication.py +++ b/util/backfillreplication.py @@ -41,7 +41,7 @@ def backfill_replication(): existing_locations = set([p.location.name for p in query]) locations_missing = locations_required - existing_locations if locations_missing: - print "Enqueueing image storage %s to be replicated" % (image.storage.uuid) + print("Enqueueing image storage %s to be replicated" % (image.storage.uuid)) encountered.add(image.storage.uuid) if not image_replication_queue.alive([image.storage.uuid]): @@ -51,6 +51,6 @@ def backfill_replication(): if __name__ == "__main__": logging.basicConfig(level=logging.INFO) if not features.STORAGE_REPLICATION: - print "Storage replication is not enabled" + print("Storage replication is not enabled") else: backfill_replication() diff --git a/util/bytes.py b/util/bytes.py index 311788c259..ecc469a6c1 100644 --- a/util/bytes.py +++ b/util/bytes.py @@ -5,14 +5,14 @@ class Bytes(object): """ def __init__(self, data): - assert isinstance(data, str) + assert isinstance(data, bytes) self._encoded_data = data @classmethod def for_string_or_unicode(cls, input): # If the string is a unicode string, then encode its data as UTF-8. Note that # we don't catch any decode exceptions here, as we want those to be raised. - if isinstance(input, unicode): + if isinstance(input, str): return Bytes(input.encode("utf-8")) # Next, try decoding as UTF-8. If we have a utf-8 encoded string, then we have no diff --git a/util/config/configdocs/configdoc.py b/util/config/configdocs/configdoc.py index 04c9e5cddd..b826693acf 100644 --- a/util/config/configdocs/configdoc.py +++ b/util/config/configdocs/configdoc.py @@ -21,7 +21,7 @@ def make_custom_sort(orders): def process(stuff): if isinstance(stuff, dict): - l = [(k, process(v)) for (k, v) in stuff.iteritems()] + l = [(k, process(v)) for (k, v) in stuff.items()] keys = set(stuff) for order in orders: if keys.issubset(order) or keys.issuperset(order): diff --git a/util/config/database.py b/util/config/database.py index 5fe41d4f03..f604d006f6 100644 --- a/util/config/database.py +++ b/util/config/database.py @@ -8,7 +8,7 @@ def sync_database_with_config(config): This ensures all implicitly required reference table entries exist in the database. """ - location_names = config.get("DISTRIBUTED_STORAGE_CONFIG", {}).keys() + location_names = list(config.get("DISTRIBUTED_STORAGE_CONFIG", {}).keys()) if location_names: model.image.ensure_image_locations(*location_names) blob.ensure_blob_locations(NEW_MODELS, *location_names) diff --git a/util/config/provider/baseprovider.py b/util/config/provider/baseprovider.py index 959a5036ea..2a34af2bcc 100644 --- a/util/config/provider/baseprovider.py +++ b/util/config/provider/baseprovider.py @@ -37,7 +37,7 @@ def import_yaml(config_obj, config_file): if isinstance(c, str): raise Exception("Invalid YAML config file: " + str(c)) - for key in c.iterkeys(): + for key in c.keys(): if key.isupper(): config_obj[key] = c[key] @@ -54,7 +54,7 @@ def import_yaml(config_obj, config_file): def get_yaml(config_obj): - return yaml.safe_dump(config_obj, encoding="utf-8", allow_unicode=True) + return yaml.safe_dump(config_obj, allow_unicode=True) def export_yaml(config_obj, config_file): diff --git a/util/config/provider/k8sprovider.py b/util/config/provider/k8sprovider.py index eebf48bf6d..9647473380 100644 --- a/util/config/provider/k8sprovider.py +++ b/util/config/provider/k8sprovider.py @@ -4,7 +4,7 @@ import base64 import time -from cStringIO import StringIO +from io import StringIO from requests import Request, Session from util.config.provider.baseprovider import CannotWriteConfigException, get_yaml @@ -157,7 +157,9 @@ def _update_secret_file(self, relative_file_path, value=None): secret["data"] = secret.get("data", {}) if value is not None: - secret["data"][relative_file_path] = base64.b64encode(value) + secret["data"][relative_file_path] = base64.b64encode(value.encode("ascii")).decode( + "ascii" + ) else: secret["data"].pop(relative_file_path) @@ -168,8 +170,8 @@ def _update_secret_file(self, relative_file_path, value=None): # consistency. while True: matching_files = set() - for secret_filename, encoded_value in secret["data"].iteritems(): - expected_value = base64.b64decode(encoded_value) + for secret_filename, encoded_value in secret["data"].items(): + expected_value = base64.b64decode(encoded_value).decode("utf-8") try: with self.get_volume_file(secret_filename) as f: contents = f.read() diff --git a/util/config/provider/test/test_k8sprovider.py b/util/config/provider/test/test_k8sprovider.py index 727b4d5ee3..45d11185c2 100644 --- a/util/config/provider/test/test_k8sprovider.py +++ b/util/config/provider/test/test_k8sprovider.py @@ -41,10 +41,12 @@ def write_file(config_dir, filepath, value): config_dir = tmpdir_factory.mktemp("config") if files: - for filepath, value in files.iteritems(): + for filepath, value in files.items(): normalized_path = normalize_path(filepath) write_file(config_dir, filepath, value) - secret["data"][normalized_path] = base64.b64encode(value) + secret["data"][normalized_path] = base64.b64encode(value.encode("utf-8")).decode( + "ascii" + ) @urlmatch( netloc=hostname, @@ -61,10 +63,12 @@ def get_secret(_, __): ) def put_secret(_, request): updated_secret = json.loads(request.body) - for filepath, value in updated_secret["data"].iteritems(): + for filepath, value in updated_secret["data"].items(): if filepath not in secret["data"]: # Add - write_file(config_dir, filepath, base64.b64decode(value)) + write_file( + config_dir, filepath, base64.b64decode(value.encode("utf-8")).decode("ascii") + ) for filepath in secret["data"]: if filepath not in updated_secret["data"]: @@ -81,7 +85,7 @@ def get_namespace(_, __): @urlmatch(netloc=hostname) def catch_all(url, _): - print url + print(url) return {"status_code": 404, "content": "{}"} with HTTMock(get_secret, put_secret, get_namespace, catch_all): @@ -94,7 +98,7 @@ def catch_all(url, _): ) # Validate all the files. - for filepath, value in files.iteritems(): + for filepath, value in files.items(): normalized_path = normalize_path(filepath) assert provider.volume_file_exists(normalized_path) with provider.get_volume_file(normalized_path) as f: diff --git a/util/config/superusermanager.py b/util/config/superusermanager.py index 17c4a53d3d..ec5dc4067e 100644 --- a/util/config/superusermanager.py +++ b/util/config/superusermanager.py @@ -15,13 +15,13 @@ def __init__(self, app): self._max_length = len(usernames_str) + MAX_USERNAME_LENGTH + 1 self._array = Array("c", self._max_length, lock=True) - self._array.value = usernames_str + self._array.value = usernames_str.encode("utf8") def is_superuser(self, username): """ Returns if the given username represents a super user. """ - usernames = self._array.value.split(",") + usernames = self._array.value.decode("utf8").split(",") return username in usernames def register_superuser(self, username): @@ -30,12 +30,12 @@ def register_superuser(self, username): Note that this does *not* change any underlying config files. """ - usernames = self._array.value.split(",") + usernames = self._array.value.decode("utf8").split(",") usernames.append(username) new_string = ",".join(usernames) if len(new_string) <= self._max_length: - self._array.value = new_string + self._array.value = new_string.encode("utf8") else: raise Exception("Maximum superuser count reached. Please report this to support.") diff --git a/util/config/validators/test/test_validate_redis.py b/util/config/validators/test/test_validate_redis.py index 9e521a9af8..16441e0ae0 100644 --- a/util/config/validators/test/test_validate_redis.py +++ b/util/config/validators/test/test_validate_redis.py @@ -3,7 +3,7 @@ from mock import patch -from mockredis import mock_strict_redis_client +from fakeredis import FakeStrictRedis from util.config.validator import ValidatorContext from util.config.validators import ConfigValidationException @@ -23,7 +23,7 @@ ], ) def test_validate_redis(unvalidated_config, user, user_password, use_mock, expected, app): - with patch("redis.StrictRedis" if use_mock else "redis.None", mock_strict_redis_client): + with patch("redis.StrictRedis" if use_mock else "redis.None", FakeStrictRedis): validator = RedisValidator() unvalidated_config = ValidatorContext(unvalidated_config) diff --git a/util/config/validators/test/test_validate_ssl.py b/util/config/validators/test/test_validate_ssl.py index 387556f920..f143b9a6fa 100644 --- a/util/config/validators/test/test_validate_ssl.py +++ b/util/config/validators/test/test_validate_ssl.py @@ -7,6 +7,7 @@ from util.config.validators import ConfigValidationException from util.config.validators.validate_ssl import SSLValidator, SSL_FILENAMES from util.security.test.test_ssl_util import generate_test_cert +from util.bytes import Bytes from test.fixtures import * from app import config_provider @@ -29,7 +30,7 @@ def test_skip_validate_ssl(unvalidated_config, app): "cert, server_hostname, expected_error, error_message", [ ( - "invalidcert", + ("invalidcert", "invalidkey"), "someserver", ConfigValidationException, "Could not load SSL certificate: no start line", @@ -60,22 +61,22 @@ def test_skip_validate_ssl(unvalidated_config, app): ) def test_validate_ssl(cert, server_hostname, expected_error, error_message, app): with NamedTemporaryFile(delete=False) as cert_file: - cert_file.write(cert[0]) + cert_file.write(Bytes.for_string_or_unicode(cert[0]).as_encoded_str()) cert_file.seek(0) with NamedTemporaryFile(delete=False) as key_file: - key_file.write(cert[1]) + key_file.write(Bytes.for_string_or_unicode(cert[1]).as_encoded_str()) key_file.seek(0) def return_true(filename): return True - def get_volume_file(filename): + def get_volume_file(filename, mode="r"): if filename == SSL_FILENAMES[0]: - return open(cert_file.name) + return open(cert_file.name, mode=mode) if filename == SSL_FILENAMES[1]: - return open(key_file.name) + return open(key_file.name, mode=mode) return None diff --git a/util/config/validators/validate_oidc.py b/util/config/validators/validate_oidc.py index 3b06a94109..aecd91ed78 100644 --- a/util/config/validators/validate_oidc.py +++ b/util/config/validators/validate_oidc.py @@ -33,5 +33,5 @@ def validate(cls, validator_context): msg = "Could not validate OIDC service %s" % service.service_id() raise ConfigValidationException(msg) except DiscoveryFailureException as dfe: - msg = "Could not validate OIDC service %s: %s" % (service.service_id(), dfe.message) + msg = "Could not validate OIDC service %s: %s" % (service.service_id(), str(dfe)) raise ConfigValidationException(msg) diff --git a/util/config/validators/validate_signer.py b/util/config/validators/validate_signer.py index 633c0f4c9e..4b31fc58f6 100644 --- a/util/config/validators/validate_signer.py +++ b/util/config/validators/validate_signer.py @@ -1,4 +1,4 @@ -from StringIO import StringIO +from io import StringIO from util.config.validators import BaseValidator, ConfigValidationException from util.security.signing import SIGNING_ENGINES @@ -22,4 +22,4 @@ def validate(cls, validator_context): raise ConfigValidationException("Unknown signing engine: %s" % config["SIGNING_ENGINE"]) engine = SIGNING_ENGINES[config["SIGNING_ENGINE"]](config, config_provider) - engine.detached_sign(StringIO("test string")) + engine.detached_sign(BytesIO(b"test string")) diff --git a/util/config/validators/validate_ssl.py b/util/config/validators/validate_ssl.py index 5a438a791e..d0052f2f1f 100644 --- a/util/config/validators/validate_ssl.py +++ b/util/config/validators/validate_ssl.py @@ -29,7 +29,7 @@ def validate(cls, validator_context): raise ConfigValidationException("Missing required SSL file: %s" % filename) # Read the contents of the SSL certificate. - with config_provider.get_volume_file(SSL_FILENAMES[0]) as f: + with config_provider.get_volume_file(SSL_FILENAMES[0], mode="rb") as f: cert_contents = f.read() # Validate the certificate. diff --git a/util/config/validators/validate_storage.py b/util/config/validators/validate_storage.py index cd2a97e0a4..006188a35a 100644 --- a/util/config/validators/validate_storage.py +++ b/util/config/validators/validate_storage.py @@ -17,7 +17,7 @@ def validate(cls, validator_context): replication_enabled = config.get("FEATURE_STORAGE_REPLICATION", False) - providers = _get_storage_providers(config, ip_resolver, config_provider).items() + providers = list(_get_storage_providers(config, ip_resolver, config_provider).items()) if not providers: raise ConfigValidationException("Storage configuration required") @@ -50,7 +50,7 @@ def _get_storage_providers(config, ip_resolver, config_provider): drivers = {} try: - for name, parameters in storage_config.items(): + for name, parameters in list(storage_config.items()): driver = get_storage_driver(None, None, config_provider, ip_resolver, parameters) drivers[name] = (parameters[0], driver) except TypeError: diff --git a/util/config/validators/validate_timemachine.py b/util/config/validators/validate_timemachine.py index 5ef647a58c..4c4c9ba56c 100644 --- a/util/config/validators/validate_timemachine.py +++ b/util/config/validators/validate_timemachine.py @@ -20,7 +20,7 @@ def validate(cls, validator_context): try: convert_to_timedelta(config["DEFAULT_TAG_EXPIRATION"]).total_seconds() except ValueError as ve: - raise ConfigValidationException("Invalid default expiration: %s" % ve.message) + raise ConfigValidationException("Invalid default expiration: %s" % str(ve)) if not config["DEFAULT_TAG_EXPIRATION"] in config.get("TAG_EXPIRATION_OPTIONS", []): raise ConfigValidationException("Default expiration must be in expiration options set") diff --git a/util/dict_wrappers.py b/util/dict_wrappers.py index c08002d972..0ddedf0f55 100644 --- a/util/dict_wrappers.py +++ b/util/dict_wrappers.py @@ -72,10 +72,10 @@ def __getitem__(self, path): return self.get(path) def __iter__(self): - return self._object.itervalues() + return iter(self._object.values()) def iterkeys(self): - return self._object.iterkeys() + return iter(self._object.keys()) def get(self, path, not_found_handler=None): """ @@ -105,4 +105,4 @@ def get(self, path, not_found_handler=None): return match def keys(self): - return self._object.keys() + return list(self._object.keys()) diff --git a/util/disableabuser.py b/util/disableabuser.py index ff10f6e7b6..cb36b4bd86 100644 --- a/util/disableabuser.py +++ b/util/disableabuser.py @@ -21,7 +21,7 @@ def ask_disable_namespace(username, queue_name): raise Exception("Unknown user or organization %s" % username) if not user.enabled: - print "NOTE: Namespace %s is already disabled" % username + print("NOTE: Namespace %s is already disabled" % username) queue_prefix = "%s/%s/%%" % (queue_name, username) existing_queue_item_count = ( @@ -42,23 +42,23 @@ def ask_disable_namespace(username, queue_name): .count() ) - print "=============================================" - print "For namespace %s" % username - print "=============================================" + print("=============================================") + print("For namespace %s" % username) + print("=============================================") - print "User %s has email address %s" % (username, user.email) - print "User %s has %s queued builds in their namespace" % (username, existing_queue_item_count) - print "User %s has %s build triggers in their namespace" % (username, repository_trigger_count) + print("User %s has email address %s" % (username, user.email)) + print("User %s has %s queued builds in their namespace" % (username, existing_queue_item_count)) + print("User %s has %s build triggers in their namespace" % (username, repository_trigger_count)) confirm_msg = ( "Would you like to disable this user and delete their triggers and builds? [y/N]> " ) - letter = str(raw_input(confirm_msg)) + letter = str(input(confirm_msg)) if letter.lower() != "y": - print "Action canceled" + print("Action canceled") return - print "=============================================" + print("=============================================") triggers = [] count_removed = 0 @@ -105,7 +105,10 @@ def ask_disable_namespace(username, queue_name): count_removed = dockerfile_build_queue.delete_namespaced_items(user.username) info = (user.username, len(triggers), count_removed, len(mirrors)) - print "Namespace %s disabled, %s triggers deleted, %s queued builds removed, %s mirrors deleted" % info + print( + "Namespace %s disabled, %s triggers deleted, %s queued builds removed, %s mirrors deleted" + % info + ) return user diff --git a/util/dockerfileparse.py b/util/dockerfileparse.py index af04c40bb3..2301396d71 100644 --- a/util/dockerfileparse.py +++ b/util/dockerfileparse.py @@ -88,11 +88,8 @@ def join_continued_lines(contents): def parse_dockerfile(contents): # If we receive ASCII, translate into unicode. - try: + if isinstance(contents, bytes): contents = contents.decode("utf-8") - except ValueError: - # Already unicode or unable to convert. - pass contents = join_continued_lines(strip_comments(contents)) lines = [line.strip() for line in contents.split("\n") if len(line) > 0] diff --git a/util/expiresdict.py b/util/expiresdict.py index 88fb643572..8ecd270d83 100644 --- a/util/expiresdict.py +++ b/util/expiresdict.py @@ -67,10 +67,10 @@ def _rebuild(self): return items def _alive_items(self): - return {k: entry.value for (k, entry) in self._items.items() if not entry.expired} + return {k: entry.value for (k, entry) in list(self._items.items()) if not entry.expired} def items(self): - return self._alive_items().items() + return list(self._alive_items().items()) def iteritems(self): return iteritems(self._alive_items()) diff --git a/util/fixuseradmin.py b/util/fixuseradmin.py index 25d7e6d8cb..15f2fa3abd 100644 --- a/util/fixuseradmin.py +++ b/util/fixuseradmin.py @@ -35,18 +35,18 @@ def get_users(all_users=False, users_list=None): if all_users: return get_active_users(disabled=False) - return map(get_nonrobot_user, users_list) + return list(map(get_nonrobot_user, users_list)) def ensure_admin(user, repos, dry_run=False): repos = [repo for repo in repos if not has_admin(user, repo)] for repo in repos: - print("User {} missing admin on: {}".format(user.username, repo.name)) + print(("User {} missing admin on: {}".format(user.username, repo.name))) if not dry_run: RepositoryPermission.create(user=user, repository=repo, role=ADMIN) - print("Granted {} admin on: {}".format(user.username, repo.name)) + print(("Granted {} admin on: {}".format(user.username, repo.name))) return len(repos) @@ -63,7 +63,7 @@ def main(): repos = repos_for_namespace(user.username) found += ensure_admin(user, repos, dry_run=args.dry_run) - print("\nFound {} user repos missing admin" " permissions for owner.".format(found)) + print(("\nFound {} user repos missing admin" " permissions for owner.".format(found))) if __name__ == "__main__": diff --git a/util/generatepresharedkey.py b/util/generatepresharedkey.py index 4e73363a46..a8fbb6c0c0 100644 --- a/util/generatepresharedkey.py +++ b/util/generatepresharedkey.py @@ -47,4 +47,4 @@ def valid_date(s): args = parser.parse_args() generated, _ = generate_key(args.service, args.name, args.expiration, args.notes) - print generated.exportKey("PEM") + print(generated.exportKey("PEM")) diff --git a/util/invoice.py b/util/invoice.py index d6adab60d5..fb43abb80e 100644 --- a/util/invoice.py +++ b/util/invoice.py @@ -2,7 +2,7 @@ from jinja2 import Environment, FileSystemLoader from xhtml2pdf import pisa -import StringIO +import io from app import app @@ -18,7 +18,7 @@ def renderInvoiceToPdf(invoice, user): Renders a nice PDF display for the given invoice. """ sourceHtml = renderInvoiceToHtml(invoice, user) - output = StringIO.StringIO() + output = io.StringIO() pisaStatus = pisa.CreatePDF(sourceHtml, dest=output) if pisaStatus.err: return None diff --git a/util/ipresolver/test/test_ipresolver.py b/util/ipresolver/test/test_ipresolver.py index 012e339ce0..6545130efd 100644 --- a/util/ipresolver/test/test_ipresolver.py +++ b/util/ipresolver/test/test_ipresolver.py @@ -46,10 +46,10 @@ def test_resolved(aws_ip_range_data, test_ip_range_cache, test_aws_ip, app): provider="aws", service=None, sync_token=123456789, country_iso_code=None ) assert ipresolver.resolve_ip("6.0.0.2") == ResolvedLocation( - provider="aws", service=None, sync_token=123456789, country_iso_code=u"US" + provider="aws", service=None, sync_token=123456789, country_iso_code="US" ) assert ipresolver.resolve_ip("1.2.3.4") == ResolvedLocation( - provider="internet", service=u"US", sync_token=123456789, country_iso_code=u"US" + provider="internet", service="US", sync_token=123456789, country_iso_code="US" ) assert ipresolver.resolve_ip("127.0.0.1") == ResolvedLocation( provider="internet", service=None, sync_token=123456789, country_iso_code=None diff --git a/util/jsontemplate.py b/util/jsontemplate.py index ccb6368ec6..0ea71b4d68 100644 --- a/util/jsontemplate.py +++ b/util/jsontemplate.py @@ -32,12 +32,12 @@ def apply(self, data): def apply_data_to_obj(obj, data, missing="(none)"): - if isinstance(obj, basestring): + if isinstance(obj, str): return _process_string(obj, data, missing) elif isinstance(obj, dict): return { _process_string(key, data, missing): apply_data_to_obj(value, data, missing) - for key, value in obj.iteritems() + for key, value in obj.items() } elif isinstance(obj, list): return [apply_data_to_obj(item, data, missing) for item in obj] diff --git a/util/metrics/prometheus.py b/util/metrics/prometheus.py index 7c35667680..328c0a44d1 100644 --- a/util/metrics/prometheus.py +++ b/util/metrics/prometheus.py @@ -4,7 +4,7 @@ import sys import threading import time -import urllib2 +import urllib.request, urllib.error, urllib.parse from cachetools.func import lru_cache @@ -92,7 +92,7 @@ def run(self): agg_url, process_grouping_key(), ) - except urllib2.URLError: + except urllib.error.URLError: # There are many scenarios when the gateway might not be running. # These could be testing scenarios or simply processes racing to start. # Rather than try to guess all of them, keep it simple and let it fail. diff --git a/util/morecollections.py b/util/morecollections.py index df0ba5a834..8508d8ba97 100644 --- a/util/morecollections.py +++ b/util/morecollections.py @@ -6,7 +6,7 @@ def __init__(self, *args, **kwargs): @classmethod def deep_copy(cls, attr_dict): copy = AttrDict(attr_dict) - for key, value in copy.items(): + for key, value in list(copy.items()): if isinstance(value, AttrDict): copy[key] = cls.deep_copy(value) return copy @@ -199,7 +199,7 @@ def push_new(self, stream_values): assert counter <= self._reports_per_stream # Process them all to see if anything has changed. - for value in self._new_stream.values(): + for value in list(self._new_stream.values()): old_index = self._old_stream.index(value) if old_index is not None: # The item is present, so we cannot report it. However, since we've reached this point, diff --git a/util/names.py b/util/names.py index e623e7c124..7cb0c9403e 100644 --- a/util/names.py +++ b/util/names.py @@ -1,7 +1,7 @@ -import urllib +import urllib.request, urllib.parse, urllib.error import re -import anunidecode # Don't listen to pylint's lies. This import is required for unidecode below. +from text_unidecode import unidecode from uuid import uuid4 @@ -10,7 +10,7 @@ VALID_TAG_PATTERN = r"[\w][\w.-]{0,127}" FULL_TAG_PATTERN = r"^[\w][\w.-]{0,127}$" -TAG_REGEX = re.compile(FULL_TAG_PATTERN) +TAG_REGEX = re.compile(FULL_TAG_PATTERN, re.ASCII) TAG_ERROR = ( 'Invalid tag: must match [A-Za-z0-9_.-], NOT start with "." or "-", ' "and can contain 1-128 characters" @@ -40,7 +40,7 @@ def escape_tag(tag, default="latest"): def parse_namespace_repository( repository, library_namespace, include_tag=False, allow_library=True ): - repository = repository.encode("unidecode", "ignore") + repository = unidecode(repository) parts = repository.rstrip("/").split("/", 1) if len(parts) < 2: @@ -58,7 +58,7 @@ def parse_namespace_repository( else: (repository, tag) = parts - repository = urllib.quote_plus(repository) + repository = urllib.parse.quote_plus(repository) if include_tag: return (namespace, repository, tag) return (namespace, repository) diff --git a/util/registry/aufs.py b/util/registry/aufs.py index e6a6f98e08..c40158dde4 100644 --- a/util/registry/aufs.py +++ b/util/registry/aufs.py @@ -1,7 +1,7 @@ import os -AUFS_METADATA = u".wh..wh." -AUFS_WHITEOUT = u".wh." +AUFS_METADATA = ".wh..wh." +AUFS_WHITEOUT = ".wh." AUFS_WHITEOUT_PREFIX_LENGTH = len(AUFS_WHITEOUT) diff --git a/util/registry/filelike.py b/util/registry/filelike.py index 2e33366d8e..48a7536cff 100644 --- a/util/registry/filelike.py +++ b/util/registry/filelike.py @@ -48,6 +48,9 @@ def seek(self, index, whence=WHENCE_ABSOLUTE): self._cursor_position += bytes_forward return bytes_forward + def readable(self): + return self._fileobj.readable() + class SocketReader(BaseStreamFilelike): def __init__(self, fileobj): @@ -72,13 +75,13 @@ def wrap_with_handler(in_fp, handler): class FilelikeStreamConcat(object): """ - A file-like object which concats all the file-like objects in the specified generator into a + A buffered (binary) file-like object which concats all the file-like objects in the specified generator into a single stream. """ def __init__(self, file_generator): self._file_generator = file_generator - self._current_file = file_generator.next() + self._current_file = next(file_generator) self._current_position = 0 self._closed = False @@ -89,7 +92,7 @@ def close(self): self._closed = True def read(self, size=READ_UNTIL_END): - buf = "" + buf = b"" current_size = size while size == READ_UNTIL_END or len(buf) < size: @@ -104,7 +107,7 @@ def read(self, size=READ_UNTIL_END): # That file was out of data, prime a new one self._current_file.close() try: - self._current_file = self._file_generator.next() + self._current_file = next(self._file_generator) except StopIteration: return buf diff --git a/util/registry/generatorfile.py b/util/registry/generatorfile.py index 6a42669079..bd671b501b 100644 --- a/util/registry/generatorfile.py +++ b/util/registry/generatorfile.py @@ -1,6 +1,6 @@ def _complain_ifclosed(closed): if closed: - raise ValueError, "I/O operation on closed file" + raise ValueError("I/O operation on closed file") class GeneratorFile(object): @@ -13,7 +13,7 @@ class GeneratorFile(object): def __init__(self, generator): self._generator = generator self._closed = False - self._buf = "" + self._buf = b"" self._position = 0 def __iter__(self): @@ -26,7 +26,7 @@ def tell(self): _complain_ifclosed(self._closed) return self._position - def next(self): + def __next__(self): """ A file object is its own iterator, for example iter(f) returns f (unless f is closed). @@ -48,8 +48,8 @@ def readline(self): while True: c = self.read(size=1) buf.append(c) - if c == "\n" or c == "": - return "".join(buf) + if c == b"\n" or c == b"": + return b"".join(buf) def flush(self): _complain_ifclosed(self._closed) @@ -67,16 +67,16 @@ def read(self, size=-1): buf = self._buf while size < 0 or len(buf) < size: try: - buf = buf + self._generator.next() + buf = buf + next(self._generator) except StopIteration: break - returned = "" + returned = b"" if size >= 1: self._buf = buf[size:] returned = buf[:size] else: - self._buf = "" + self._buf = b"" returned = buf self._position = self._position + len(returned) diff --git a/util/registry/gzipinputstream.py b/util/registry/gzipinputstream.py index 5b918c8902..15995021b1 100644 --- a/util/registry/gzipinputstream.py +++ b/util/registry/gzipinputstream.py @@ -27,7 +27,7 @@ def __init__(self, fileobj): self._file = fileobj self._zip = zlib.decompressobj(WINDOW_BUFFER_SIZE) self._offset = 0 # position in unzipped stream - self._data = "" + self._data = b"" def __fill(self, num_bytes): """ @@ -77,12 +77,12 @@ def read(self, size=0): self._data = self._data[size:] else: data = self._data - self._data = "" + self._data = b"" self._offset = self._offset + len(data) return data - def next(self): + def __next__(self): line = self.readline() if not line: raise StopIteration() @@ -90,10 +90,10 @@ def next(self): def readline(self): # make sure we have an entire line - while self._zip and "\n" not in self._data: + while self._zip and b"\n" not in self._data: self.__fill(len(self._data) + 512) - pos = string.find(self._data, "\n") + 1 + pos = self._data.find(b"\n") + 1 if pos <= 0: return self.read() diff --git a/util/registry/gzipwrap.py b/util/registry/gzipwrap.py index df15396d85..06c00ca889 100644 --- a/util/registry/gzipwrap.py +++ b/util/registry/gzipwrap.py @@ -7,7 +7,7 @@ class GzipWrap(object): def __init__(self, input, filename=None, compresslevel=1): self.input = iter(input) - self.buffer = "" + self.buffer = b"" self.zipper = GzipFile( filename, mode="wb", fileobj=self, compresslevel=compresslevel, mtime=0 ) @@ -30,10 +30,10 @@ def read(self, size=-1): is_done = False input_size = 0 - input_buffer = "" + input_buffer = b"" while input_size < GZIP_BUFFER_SIZE: try: - s = self.input.next() + s = next(self.input) input_buffer += s input_size = input_size + len(s) except StopIteration: diff --git a/util/registry/queuefile.py b/util/registry/queuefile.py index 5ba7392f97..dac0687014 100644 --- a/util/registry/queuefile.py +++ b/util/registry/queuefile.py @@ -11,7 +11,7 @@ def __init__(self, queue, name=None, timeout=None): self._queue = queue self._closed = False self._done = False - self._buffer = "" + self._buffer = b"" self._total_size = 0 self._name = name self.raised_exception = False @@ -26,7 +26,7 @@ def read(self, size=-1): if self._closed or self._done: if size == -1: buf = self._buffer - self._buffer = "" + self._buffer = b"" return buf buf = self._buffer[0:size] @@ -55,7 +55,7 @@ def read(self, size=-1): handled = True if handled: - return "" + return b"" else: raise exception @@ -72,7 +72,7 @@ def read(self, size=-1): # Return the requested slice of the buffer. if size == -1: buf = self._buffer - self._buffer = "" + self._buffer = b"" return buf buf = self._buffer[0:size] diff --git a/util/registry/queueprocess.py b/util/registry/queueprocess.py index 43cb1b3497..eab6524597 100644 --- a/util/registry/queueprocess.py +++ b/util/registry/queueprocess.py @@ -31,7 +31,7 @@ def create_queue(self): Any queues added will have the data produced appended. """ - queue = Queue(self._max_size / self._chunk_size) + queue = Queue(self._max_size // self._chunk_size) self._queues.append(queue) return queue @@ -62,7 +62,7 @@ def _run(get_producer, queues, chunk_size, args): try: result = QueueResult(producer(chunk_size) or None, None) except Exception as ex: - message = "%s\n%s" % (ex.message, "".join(traceback.format_exception(*sys.exc_info()))) + message = "%s\n%s" % (str(ex), "".join(traceback.format_exception(*sys.exc_info()))) result = QueueResult(None, Exception(message)) for queue in queues: diff --git a/util/registry/streamlayerformat.py b/util/registry/streamlayerformat.py index e32d3109d2..39c05ebfb7 100644 --- a/util/registry/streamlayerformat.py +++ b/util/registry/streamlayerformat.py @@ -30,10 +30,10 @@ def after_tar_layer(self): @staticmethod def _normalize_path(path): - return os.path.relpath(path.decode("utf-8"), "./") + return os.path.relpath(path, "./") def _check_deleted(self, absolute): - ubsolute = unicode(absolute) + ubsolute = str(absolute) for prefix in self.deleted_prefix_trie.iter_prefixes(ubsolute): if not os.path.relpath(ubsolute, prefix).startswith(".."): return True @@ -53,7 +53,7 @@ def is_skipped_file(self, filename): # Check if this file has already been encountered somewhere. If so, # skip it. - ubsolute = unicode(absolute) + ubsolute = str(absolute) if ubsolute in self.path_trie: return True diff --git a/util/registry/tarlayerformat.py b/util/registry/tarlayerformat.py index 1452d03c96..08d7bd7527 100644 --- a/util/registry/tarlayerformat.py +++ b/util/registry/tarlayerformat.py @@ -147,8 +147,8 @@ def get_generator(self): self.reporter.report_pass(2 if len(dangling_hard_links) > 0 else 1) # Last two records are empty in TAR spec. - yield "\0" * 512 - yield "\0" * 512 + yield b"\0" * 512 + yield b"\0" * 512 @abstractmethod def is_skipped_file(self, filename): @@ -199,4 +199,4 @@ def _emit_file(tar_file, tar_info): # Files must be padding to 512 byte multiples. if length % 512 != 0: - yield "\0" * (512 - (length % 512)) + yield b"\0" * (512 - (length % 512)) diff --git a/util/registry/test/test_filelike.py b/util/registry/test/test_filelike.py index 2170cfcdbc..e4e65f3d1b 100644 --- a/util/registry/test/test_filelike.py +++ b/util/registry/test/test_filelike.py @@ -1,107 +1,107 @@ -from StringIO import StringIO +from io import BytesIO, StringIO from util.registry.filelike import FilelikeStreamConcat, LimitingStream, StreamSlice def somegenerator(): - yield "some" - yield "cool" - yield "file-contents" + yield b"some" + yield b"cool" + yield b"file-contents" def test_parts(): - gens = iter([StringIO(s) for s in somegenerator()]) + gens = iter([BytesIO(s) for s in somegenerator()]) fileobj = FilelikeStreamConcat(gens) - assert fileobj.read(2) == "so" - assert fileobj.read(3) == "mec" - assert fileobj.read(7) == "oolfile" - assert fileobj.read(-1) == "-contents" + assert fileobj.read(2) == b"so" + assert fileobj.read(3) == b"mec" + assert fileobj.read(7) == b"oolfile" + assert fileobj.read(-1) == b"-contents" def test_entire(): - gens = iter([StringIO(s) for s in somegenerator()]) + gens = iter([BytesIO(s) for s in somegenerator()]) fileobj = FilelikeStreamConcat(gens) - assert fileobj.read(-1) == "somecoolfile-contents" + assert fileobj.read(-1) == b"somecoolfile-contents" def test_nolimit(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj) - assert stream.read(-1) == "this is a cool test" - assert len("this is a cool test") == stream.tell() + assert stream.read(-1) == b"this is a cool test" + assert len(b"this is a cool test") == stream.tell() def test_simplelimit(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 4) - assert stream.read(-1) == "this" + assert stream.read(-1) == b"this" assert 4 == stream.tell() def test_simplelimit_readdefined(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 4) - assert stream.read(2) == "th" + assert stream.read(2) == b"th" assert 2 == stream.tell() def test_nolimit_readdefined(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, -1) - assert stream.read(2) == "th" + assert stream.read(2) == b"th" assert 2 == stream.tell() def test_limit_multiread(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 7) - assert stream.read(4) == "this" - assert stream.read(3) == " is" - assert stream.read(2) == "" + assert stream.read(4) == b"this" + assert stream.read(3) == b" is" + assert stream.read(2) == b"" assert 7 == stream.tell() def test_limit_multiread2(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 7) - assert stream.read(4) == "this" - assert stream.read(-1) == " is" + assert stream.read(4) == b"this" + assert stream.read(-1) == b" is" assert 7 == stream.tell() def test_seek(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj) stream.seek(2) - assert stream.read(2) == "is" + assert stream.read(2) == b"is" assert 4 == stream.tell() def test_seek_withlimit(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 3) stream.seek(2) - assert stream.read(2) == "i" + assert stream.read(2) == b"i" assert 3 == stream.tell() def test_seek_pastlimit(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 3) stream.seek(4) - assert stream.read(1) == "" + assert stream.read(1) == b"" assert 3 == stream.tell() def test_seek_to_tell(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = LimitingStream(fileobj, 3) stream.seek(stream.tell()) - assert stream.read(4) == "thi" + assert stream.read(4) == b"thi" assert 3 == stream.tell() @@ -116,36 +116,36 @@ def read(self, size=None): def test_noslice(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = StreamSlice(fileobj, 0) - assert stream.read(-1) == "this is a cool test" - assert len("this is a cool test") == stream.tell() + assert stream.read(-1) == b"this is a cool test" + assert len(b"this is a cool test") == stream.tell() def test_startindex(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = StreamSlice(fileobj, 5) - assert stream.read(-1) == "is a cool test" - assert len("is a cool test") == stream.tell() + assert stream.read(-1) == b"is a cool test" + assert len(b"is a cool test") == stream.tell() def test_startindex_limitedread(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = StreamSlice(fileobj, 5) - assert stream.read(4) == "is a" + assert stream.read(4) == b"is a" assert 4 == stream.tell() def test_slice(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = StreamSlice(fileobj, 5, 9) - assert stream.read(-1) == "is a" - assert len("is a") == stream.tell() + assert stream.read(-1) == b"is a" + assert len(b"is a") == stream.tell() def test_slice_explictread(): - fileobj = StringIO("this is a cool test") + fileobj = BytesIO(b"this is a cool test") stream = StreamSlice(fileobj, 5, 9) - assert stream.read(2) == "is" - assert stream.read(5) == " a" - assert len("is a") == stream.tell() + assert stream.read(2) == b"is" + assert stream.read(5) == b" a" + assert len(b"is a") == stream.tell() diff --git a/util/registry/test/test_generatorfile.py b/util/registry/test/test_generatorfile.py index 552446c0c2..4e6cef02e1 100644 --- a/util/registry/test/test_generatorfile.py +++ b/util/registry/test/test_generatorfile.py @@ -1,4 +1,4 @@ -from _pyio import BufferedReader +from _pyio import BufferedReader, TextIOWrapper import magic @@ -6,92 +6,92 @@ def sample_generator(): - yield "this" - yield "is" - yield "a" - yield "test" + yield b"this" + yield b"is" + yield b"a" + yield b"test" def test_basic_generator(): with GeneratorFile(sample_generator()) as f: assert f.tell() == 0 - assert f.read() == "thisisatest" - assert f.tell() == len("thisisatest") + assert f.read() == b"thisisatest" + assert f.tell() == len(b"thisisatest") def test_same_lengths(): with GeneratorFile(sample_generator()) as f: - assert f.read(4) == "this" + assert f.read(4) == b"this" assert f.tell() == 4 - assert f.read(2) == "is" + assert f.read(2) == b"is" assert f.tell() == 6 - assert f.read(1) == "a" + assert f.read(1) == b"a" assert f.tell() == 7 - assert f.read(4) == "test" + assert f.read(4) == b"test" assert f.tell() == 11 def test_indexed_lengths(): with GeneratorFile(sample_generator()) as f: - assert f.read(6) == "thisis" + assert f.read(6) == b"thisis" assert f.tell() == 6 - assert f.read(5) == "atest" + assert f.read(5) == b"atest" assert f.tell() == 11 def test_misindexed_lengths(): with GeneratorFile(sample_generator()) as f: - assert f.read(6) == "thisis" + assert f.read(6) == b"thisis" assert f.tell() == 6 - assert f.read(3) == "ate" + assert f.read(3) == b"ate" assert f.tell() == 9 - assert f.read(2) == "st" + assert f.read(2) == b"st" assert f.tell() == 11 - assert f.read(2) == "" + assert f.read(2) == b"" assert f.tell() == 11 def test_misindexed_lengths_2(): with GeneratorFile(sample_generator()) as f: - assert f.read(8) == "thisisat" + assert f.read(8) == b"thisisat" assert f.tell() == 8 - assert f.read(1) == "e" + assert f.read(1) == b"e" assert f.tell() == 9 - assert f.read(2) == "st" + assert f.read(2) == b"st" assert f.tell() == 11 - assert f.read(2) == "" + assert f.read(2) == b"" assert f.tell() == 11 def test_overly_long(): with GeneratorFile(sample_generator()) as f: - assert f.read(60) == "thisisatest" + assert f.read(60) == b"thisisatest" assert f.tell() == 11 def test_with_bufferedreader(): with GeneratorFile(sample_generator()) as f: buffered = BufferedReader(f) - assert buffered.peek(10) == "thisisatest" - assert buffered.read(10) == "thisisates" + assert buffered.peek(10) == b"thisisatest" + assert buffered.read(10) == b"thisisates" def mimed_html_generator(): - yield "" - yield "" - yield "sometext" * 1024 - yield "" - yield "" + yield b"" + yield b"" + yield b"sometext" * 1024 + yield b"" + yield b"" def test_magic(): diff --git a/util/registry/test/test_queuefile.py b/util/registry/test/test_queuefile.py index b359aefb83..0595121acc 100644 --- a/util/registry/test/test_queuefile.py +++ b/util/registry/test/test_queuefile.py @@ -19,22 +19,22 @@ def put(self, data): def test_basic(): queue = FakeQueue() - queue.put(QueueResult("hello world", None)) - queue.put(QueueResult("! how goes there?", None)) + queue.put(QueueResult(b"hello world", None)) + queue.put(QueueResult(b"! how goes there?", None)) queue.put(QueueResult(None, None)) queuefile = QueueFile(queue) - assert queuefile.read() == "hello world! how goes there?" + assert queuefile.read() == b"hello world! how goes there?" def test_chunk_reading(): queue = FakeQueue() - queue.put(QueueResult("hello world", None)) - queue.put(QueueResult("! how goes there?", None)) + queue.put(QueueResult(b"hello world", None)) + queue.put(QueueResult(b"! how goes there?", None)) queue.put(QueueResult(None, None)) queuefile = QueueFile(queue) - data = "" + data = b"" while True: result = queuefile.read(size=2) @@ -43,14 +43,14 @@ def test_chunk_reading(): data += result - assert data == "hello world! how goes there?" + assert data == b"hello world! how goes there?" def test_unhandled_exception(): queue = FakeQueue() - queue.put(QueueResult("hello world", None)) + queue.put(QueueResult(b"hello world", None)) queue.put(QueueResult(None, IOError("some exception"))) - queue.put(QueueResult("! how goes there?", None)) + queue.put(QueueResult(b"! how goes there?", None)) queue.put(QueueResult(None, None)) queuefile = QueueFile(queue) @@ -61,9 +61,9 @@ def test_unhandled_exception(): def test_handled_exception(): queue = FakeQueue() - queue.put(QueueResult("hello world", None)) + queue.put(QueueResult(b"hello world", None)) queue.put(QueueResult(None, IOError("some exception"))) - queue.put(QueueResult("! how goes there?", None)) + queue.put(QueueResult(b"! how goes there?", None)) queue.put(QueueResult(None, None)) ex_found = [None] @@ -87,7 +87,7 @@ def test_binary_data(): queue.put(QueueResult(None, None)) queuefile = QueueFile(queue) - found_data = "" + found_data = b"" while True: current_data = queuefile.read(size=37) if len(current_data) == 0: @@ -102,12 +102,12 @@ def test_empty_data(): queue = FakeQueue() # Generate some empty binary data. - binary_data = "\0" * 1024 + binary_data = b"\0" * 1024 queue.put(QueueResult(binary_data, None)) queue.put(QueueResult(None, None)) queuefile = QueueFile(queue) - found_data = "" + found_data = b"" while True: current_data = queuefile.read(size=37) if len(current_data) == 0: diff --git a/util/registry/test/test_streamlayerformat.py b/util/registry/test/test_streamlayerformat.py index 7ba46475af..d329f3e9b2 100644 --- a/util/registry/test/test_streamlayerformat.py +++ b/util/registry/test/test_streamlayerformat.py @@ -2,14 +2,14 @@ import pytest -from StringIO import StringIO +from io import BytesIO from util.registry.streamlayerformat import StreamLayerMerger from util.registry.aufs import AUFS_WHITEOUT from util.registry.tarlayerformat import TarLayerReadException def create_layer(*file_pairs): - output = StringIO() + output = BytesIO() with tarfile.open(fileobj=output, mode="w:gz") as tar: for current_filename, current_contents in file_pairs: if current_contents is None: @@ -23,45 +23,45 @@ def create_layer(*file_pairs): else: current_filename = AUFS_WHITEOUT + parts[-1] - current_contents = "" + current_contents = b"" - if current_contents.startswith("linkto:"): + if current_contents.startswith(b"linkto:"): info = tarfile.TarInfo(name=current_filename) - info.linkname = current_contents[len("linkto:") :] + info.linkname = current_contents[len(b"linkto:") :].decode("utf-8") info.type = tarfile.LNKTYPE tar.addfile(info) else: info = tarfile.TarInfo(name=current_filename) info.size = len(current_contents) - tar.addfile(info, fileobj=StringIO(current_contents)) + tar.addfile(info, fileobj=BytesIO(current_contents)) return output.getvalue() def create_empty_layer(): - return "" + return b"" def squash_layers(layers, path_prefix=None): def getter_for_layer(layer): - return lambda: StringIO(layer) + return lambda: BytesIO(layer) def layer_stream_getter(): return [getter_for_layer(layer) for layer in layers] merger = StreamLayerMerger(layer_stream_getter, path_prefix=path_prefix) - merged_data = "".join(merger.get_generator()) + merged_data = b"".join(list(merger.get_generator())) return merged_data def assertHasFile(squashed, filename, contents): - with tarfile.open(fileobj=StringIO(squashed), mode="r:*") as tar: + with tarfile.open(fileobj=BytesIO(squashed), mode="r:*") as tar: member = tar.getmember(filename) - assert contents == "\n".join(tar.extractfile(member).readlines()) + assert contents == b"\n".join(tar.extractfile(member).readlines()) def assertDoesNotHaveFile(squashed, filename): - with tarfile.open(fileobj=StringIO(squashed), mode="r:*") as tar: + with tarfile.open(fileobj=BytesIO(squashed), mode="r:*") as tar: try: member = tar.getmember(filename) except Exception as ex: @@ -71,134 +71,140 @@ def assertDoesNotHaveFile(squashed, filename): def test_single_layer(): - tar_layer = create_layer(("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh")) + tar_layer = create_layer( + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") + ) squashed = squash_layers([tar_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "another_file", "bar") - assertHasFile(squashed, "third_file", "meh") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "another_file", b"bar") + assertHasFile(squashed, "third_file", b"meh") def test_multiple_layers(): second_layer = create_layer( - ("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") ) - first_layer = create_layer(("top_file", "top")) + first_layer = create_layer(("top_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "another_file", "bar") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "top_file", "top") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "another_file", b"bar") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "top_file", b"top") def test_multiple_layers_dot(): second_layer = create_layer( - ("./some_file", "foo"), ("another_file", "bar"), ("./third_file", "meh") + ("./some_file", b"foo"), ("another_file", b"bar"), ("./third_file", b"meh") ) - first_layer = create_layer(("top_file", "top")) + first_layer = create_layer(("top_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "./some_file", "foo") - assertHasFile(squashed, "another_file", "bar") - assertHasFile(squashed, "./third_file", "meh") - assertHasFile(squashed, "top_file", "top") + assertHasFile(squashed, "./some_file", b"foo") + assertHasFile(squashed, "another_file", b"bar") + assertHasFile(squashed, "./third_file", b"meh") + assertHasFile(squashed, "top_file", b"top") def test_multiple_layers_overwrite(): second_layer = create_layer( - ("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") ) - first_layer = create_layer(("another_file", "top")) + first_layer = create_layer(("another_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "another_file", "top") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "another_file", b"top") def test_multiple_layers_overwrite_base_dot(): second_layer = create_layer( - ("some_file", "foo"), ("./another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("./another_file", b"bar"), ("third_file", b"meh") ) - first_layer = create_layer(("another_file", "top")) + first_layer = create_layer(("another_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "another_file", "top") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "another_file", b"top") assertDoesNotHaveFile(squashed, "./another_file") def test_multiple_layers_overwrite_top_dot(): second_layer = create_layer( - ("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") ) - first_layer = create_layer(("./another_file", "top")) + first_layer = create_layer(("./another_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "./another_file", "top") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "./another_file", b"top") assertDoesNotHaveFile(squashed, "another_file") def test_deleted_file(): second_layer = create_layer( - ("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") ) first_layer = create_layer(("another_file", None)) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") assertDoesNotHaveFile(squashed, "another_file") def test_deleted_readded_file(): - third_layer = create_layer(("another_file", "bar")) + third_layer = create_layer(("another_file", b"bar")) - second_layer = create_layer(("some_file", "foo"), ("another_file", None), ("third_file", "meh")) + second_layer = create_layer( + ("some_file", b"foo"), ("another_file", None), ("third_file", b"meh") + ) - first_layer = create_layer(("another_file", "newagain")) + first_layer = create_layer(("another_file", b"newagain")) squashed = squash_layers([first_layer, second_layer, third_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "another_file", "newagain") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "another_file", b"newagain") def test_deleted_in_lower_layer(): - third_layer = create_layer(("deleted_file", "bar")) + third_layer = create_layer(("deleted_file", b"bar")) - second_layer = create_layer(("some_file", "foo"), ("deleted_file", None), ("third_file", "meh")) + second_layer = create_layer( + ("some_file", b"foo"), ("deleted_file", None), ("third_file", b"meh") + ) - first_layer = create_layer(("top_file", "top")) + first_layer = create_layer(("top_file", b"top")) squashed = squash_layers([first_layer, second_layer, third_layer]) - assertHasFile(squashed, "some_file", "foo") - assertHasFile(squashed, "third_file", "meh") - assertHasFile(squashed, "top_file", "top") + assertHasFile(squashed, "some_file", b"foo") + assertHasFile(squashed, "third_file", b"meh") + assertHasFile(squashed, "top_file", b"top") assertDoesNotHaveFile(squashed, "deleted_file") def test_deleted_in_lower_layer_with_added_dot(): - third_layer = create_layer(("./deleted_file", "something")) + third_layer = create_layer(("./deleted_file", b"something")) second_layer = create_layer(("deleted_file", None)) @@ -207,7 +213,7 @@ def test_deleted_in_lower_layer_with_added_dot(): def test_deleted_in_lower_layer_with_deleted_dot(): - third_layer = create_layer(("./deleted_file", "something")) + third_layer = create_layer(("./deleted_file", b"something")) second_layer = create_layer(("./deleted_file", None)) @@ -216,29 +222,29 @@ def test_deleted_in_lower_layer_with_deleted_dot(): def test_directory(): - second_layer = create_layer(("foo/some_file", "foo"), ("foo/another_file", "bar")) + second_layer = create_layer(("foo/some_file", b"foo"), ("foo/another_file", b"bar")) - first_layer = create_layer(("foo/some_file", "top")) + first_layer = create_layer(("foo/some_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "foo/some_file", "top") - assertHasFile(squashed, "foo/another_file", "bar") + assertHasFile(squashed, "foo/some_file", b"top") + assertHasFile(squashed, "foo/another_file", b"bar") def test_sub_directory(): - second_layer = create_layer(("foo/some_file", "foo"), ("foo/bar/another_file", "bar")) + second_layer = create_layer(("foo/some_file", b"foo"), ("foo/bar/another_file", b"bar")) - first_layer = create_layer(("foo/some_file", "top")) + first_layer = create_layer(("foo/some_file", b"top")) squashed = squash_layers([first_layer, second_layer]) - assertHasFile(squashed, "foo/some_file", "top") - assertHasFile(squashed, "foo/bar/another_file", "bar") + assertHasFile(squashed, "foo/some_file", b"top") + assertHasFile(squashed, "foo/bar/another_file", b"bar") def test_delete_directory(): - second_layer = create_layer(("foo/some_file", "foo"), ("foo/another_file", "bar")) + second_layer = create_layer(("foo/some_file", b"foo"), ("foo/another_file", b"bar")) first_layer = create_layer(("foo/", None)) @@ -249,29 +255,29 @@ def test_delete_directory(): def test_delete_sub_directory(): - second_layer = create_layer(("foo/some_file", "foo"), ("foo/bar/another_file", "bar")) + second_layer = create_layer(("foo/some_file", b"foo"), ("foo/bar/another_file", b"bar")) first_layer = create_layer(("foo/bar/", None)) squashed = squash_layers([first_layer, second_layer]) assertDoesNotHaveFile(squashed, "foo/bar/another_file") - assertHasFile(squashed, "foo/some_file", "foo") + assertHasFile(squashed, "foo/some_file", b"foo") def test_delete_sub_directory_with_dot(): - second_layer = create_layer(("foo/some_file", "foo"), ("foo/bar/another_file", "bar")) + second_layer = create_layer(("foo/some_file", b"foo"), ("foo/bar/another_file", b"bar")) first_layer = create_layer(("./foo/bar/", None)) squashed = squash_layers([first_layer, second_layer]) assertDoesNotHaveFile(squashed, "foo/bar/another_file") - assertHasFile(squashed, "foo/some_file", "foo") + assertHasFile(squashed, "foo/some_file", b"foo") def test_delete_sub_directory_with_subdot(): - second_layer = create_layer(("./foo/some_file", "foo"), ("./foo/bar/another_file", "bar")) + second_layer = create_layer(("./foo/some_file", b"foo"), ("./foo/bar/another_file", b"bar")) first_layer = create_layer(("foo/bar/", None)) @@ -279,46 +285,46 @@ def test_delete_sub_directory_with_subdot(): assertDoesNotHaveFile(squashed, "foo/bar/another_file") assertDoesNotHaveFile(squashed, "./foo/bar/another_file") - assertHasFile(squashed, "./foo/some_file", "foo") + assertHasFile(squashed, "./foo/some_file", b"foo") def test_delete_directory_recreate(): - third_layer = create_layer(("foo/some_file", "foo"), ("foo/another_file", "bar")) + third_layer = create_layer(("foo/some_file", b"foo"), ("foo/another_file", b"bar")) second_layer = create_layer(("foo/", None)) - first_layer = create_layer(("foo/some_file", "baz")) + first_layer = create_layer(("foo/some_file", b"baz")) squashed = squash_layers([first_layer, second_layer, third_layer]) - assertHasFile(squashed, "foo/some_file", "baz") + assertHasFile(squashed, "foo/some_file", b"baz") assertDoesNotHaveFile(squashed, "foo/another_file") def test_delete_directory_prefix(): - third_layer = create_layer(("foobar/some_file", "foo"), ("foo/another_file", "bar")) + third_layer = create_layer(("foobar/some_file", b"foo"), ("foo/another_file", b"bar")) second_layer = create_layer(("foo/", None)) squashed = squash_layers([second_layer, third_layer]) - assertHasFile(squashed, "foobar/some_file", "foo") + assertHasFile(squashed, "foobar/some_file", b"foo") assertDoesNotHaveFile(squashed, "foo/another_file") def test_delete_directory_pre_prefix(): - third_layer = create_layer(("foobar/baz/some_file", "foo"), ("foo/another_file", "bar")) + third_layer = create_layer(("foobar/baz/some_file", b"foo"), ("foo/another_file", b"bar")) second_layer = create_layer(("foo/", None)) squashed = squash_layers([second_layer, third_layer]) - assertHasFile(squashed, "foobar/baz/some_file", "foo") + assertHasFile(squashed, "foobar/baz/some_file", b"foo") assertDoesNotHaveFile(squashed, "foo/another_file") def test_delete_root_directory(): - third_layer = create_layer(("build/first_file", "foo"), ("build/second_file", "bar")) + third_layer = create_layer(("build/first_file", b"foo"), ("build/second_file", b"bar")) second_layer = create_layer(("build", None)) @@ -329,65 +335,67 @@ def test_delete_root_directory(): def test_tar_empty_layer(): - third_layer = create_layer(("build/first_file", "foo"), ("build/second_file", "bar")) + third_layer = create_layer(("build/first_file", b"foo"), ("build/second_file", b"bar")) empty_layer = create_layer() squashed = squash_layers([empty_layer, third_layer]) - assertHasFile(squashed, "build/first_file", "foo") - assertHasFile(squashed, "build/second_file", "bar") + assertHasFile(squashed, "build/first_file", b"foo") + assertHasFile(squashed, "build/second_file", b"bar") def test_data_empty_layer(): - third_layer = create_layer(("build/first_file", "foo"), ("build/second_file", "bar")) + third_layer = create_layer(("build/first_file", b"foo"), ("build/second_file", b"bar")) empty_layer = create_empty_layer() squashed = squash_layers([empty_layer, third_layer]) - assertHasFile(squashed, "build/first_file", "foo") - assertHasFile(squashed, "build/second_file", "bar") + assertHasFile(squashed, "build/first_file", b"foo") + assertHasFile(squashed, "build/second_file", b"bar") def test_broken_layer(): - third_layer = create_layer(("build/first_file", "foo"), ("build/second_file", "bar")) + third_layer = create_layer(("build/first_file", b"foo"), ("build/second_file", b"bar")) - broken_layer = "not valid data" + broken_layer = b"not valid data" with pytest.raises(TarLayerReadException): squash_layers([broken_layer, third_layer]) def test_single_layer_with_prefix(): - tar_layer = create_layer(("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh")) + tar_layer = create_layer( + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") + ) squashed = squash_layers([tar_layer], path_prefix="foo/") - assertHasFile(squashed, "foo/some_file", "foo") - assertHasFile(squashed, "foo/another_file", "bar") - assertHasFile(squashed, "foo/third_file", "meh") + assertHasFile(squashed, "foo/some_file", b"foo") + assertHasFile(squashed, "foo/another_file", b"bar") + assertHasFile(squashed, "foo/third_file", b"meh") def test_multiple_layers_overwrite_with_prefix(): second_layer = create_layer( - ("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh") + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") ) - first_layer = create_layer(("another_file", "top")) + first_layer = create_layer(("another_file", b"top")) squashed = squash_layers([first_layer, second_layer], path_prefix="foo/") - assertHasFile(squashed, "foo/some_file", "foo") - assertHasFile(squashed, "foo/third_file", "meh") - assertHasFile(squashed, "foo/another_file", "top") + assertHasFile(squashed, "foo/some_file", b"foo") + assertHasFile(squashed, "foo/third_file", b"meh") + assertHasFile(squashed, "foo/another_file", b"top") def test_superlong_filename(): tar_layer = create_layer( ( "this_is_the_filename_that_never_ends_it_goes_on_and_on_my_friend_some_people_started", - "meh", + b"meh", ) ) @@ -395,12 +403,14 @@ def test_superlong_filename(): assertHasFile( squashed, "foo/this_is_the_filename_that_never_ends_it_goes_on_and_on_my_friend_some_people_started", - "meh", + b"meh", ) def test_superlong_prefix(): - tar_layer = create_layer(("some_file", "foo"), ("another_file", "bar"), ("third_file", "meh")) + tar_layer = create_layer( + ("some_file", b"foo"), ("another_file", b"bar"), ("third_file", b"meh") + ) squashed = squash_layers( [tar_layer], @@ -410,50 +420,50 @@ def test_superlong_prefix(): assertHasFile( squashed, "foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/some_file", - "foo", + b"foo", ) assertHasFile( squashed, "foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/another_file", - "bar", + b"bar", ) assertHasFile( squashed, "foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/third_file", - "meh", + b"meh", ) def test_hardlink_to_deleted_file(): first_layer = create_layer( - ("tobedeletedfile", "somecontents"), - ("link_to_deleted_file", "linkto:tobedeletedfile"), - ("third_file", "meh"), + ("tobedeletedfile", b"somecontents"), + ("link_to_deleted_file", b"linkto:tobedeletedfile"), + ("third_file", b"meh"), ) second_layer = create_layer(("tobedeletedfile", None)) squashed = squash_layers([second_layer, first_layer], path_prefix="foo/") - assertHasFile(squashed, "foo/third_file", "meh") - assertHasFile(squashed, "foo/link_to_deleted_file", "somecontents") + assertHasFile(squashed, "foo/third_file", b"meh") + assertHasFile(squashed, "foo/link_to_deleted_file", b"somecontents") assertDoesNotHaveFile(squashed, "foo/tobedeletedfile") def test_multiple_hardlink_to_deleted_file(): first_layer = create_layer( - ("tobedeletedfile", "somecontents"), - ("link_to_deleted_file", "linkto:tobedeletedfile"), - ("another_link_to_deleted_file", "linkto:tobedeletedfile"), - ("third_file", "meh"), + ("tobedeletedfile", b"somecontents"), + ("link_to_deleted_file", b"linkto:tobedeletedfile"), + ("another_link_to_deleted_file", b"linkto:tobedeletedfile"), + ("third_file", b"meh"), ) second_layer = create_layer(("tobedeletedfile", None)) squashed = squash_layers([second_layer, first_layer], path_prefix="foo/") - assertHasFile(squashed, "foo/third_file", "meh") - assertHasFile(squashed, "foo/link_to_deleted_file", "somecontents") - assertHasFile(squashed, "foo/another_link_to_deleted_file", "somecontents") + assertHasFile(squashed, "foo/third_file", b"meh") + assertHasFile(squashed, "foo/link_to_deleted_file", b"somecontents") + assertHasFile(squashed, "foo/another_link_to_deleted_file", b"somecontents") assertDoesNotHaveFile(squashed, "foo/tobedeletedfile") diff --git a/util/repomirror/api.py b/util/repomirror/api.py index 68f7b658fc..f76d3148b7 100644 --- a/util/repomirror/api.py +++ b/util/repomirror/api.py @@ -168,18 +168,17 @@ def ping(self): return self._call("GET", _API_METHOD_PING) except requests.exceptions.Timeout as tie: logger.exception("Timeout when trying to connect to repository mirror endpoint") - msg = "Timeout when trying to connect to repository mirror endpoint: %s" % tie.message + msg = "Timeout when trying to connect to repository mirror endpoint: %s" % str(tie) raise Exception(msg) except requests.exceptions.ConnectionError as ce: logger.exception( "Connection error when trying to connect to repository mirror endpoint" ) - msg = ( - "Connection error when trying to connect to repository mirror endpoint: %s" - % ce.message + msg = "Connection error when trying to connect to repository mirror endpoint: %s" % str( + ce ) raise Exception(msg) except (requests.exceptions.RequestException, ValueError) as ve: logger.exception("Exception when trying to connect to repository mirror endpoint") - msg = "Exception when trying to connect to repository mirror endpoint: %s" % ve + msg = "Exception when trying to connect to repository mirror endpoint: %s" % str(ve) raise Exception(msg) diff --git a/util/saas/analytics.py b/util/saas/analytics.py index ec14e26385..a49a288d1e 100644 --- a/util/saas/analytics.py +++ b/util/saas/analytics.py @@ -1,7 +1,7 @@ import json import logging -from Queue import Queue +from queue import Queue from threading import Thread from mixpanel import BufferedConsumer, Mixpanel diff --git a/util/saas/cloudwatch.py b/util/saas/cloudwatch.py index ecede40d32..5cd4496fa7 100644 --- a/util/saas/cloudwatch.py +++ b/util/saas/cloudwatch.py @@ -2,7 +2,7 @@ import time import random -from Queue import Empty +from queue import Empty from threading import Thread import boto diff --git a/util/secscan/api.py b/util/secscan/api.py index 5cbbeba0e8..1fb9e747ec 100644 --- a/util/secscan/api.py +++ b/util/secscan/api.py @@ -3,7 +3,7 @@ from abc import ABCMeta, abstractmethod from six import add_metaclass -from urlparse import urljoin +from urllib.parse import urljoin import requests @@ -283,7 +283,7 @@ def _get_image_url_and_auth(self, image): auth_token = generate_bearer_token( audience, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, self._instance_keys ) - auth_header = "Bearer " + auth_token + auth_header = "Bearer " + auth_token.decode("ascii") uri = self._uri_creator(repository_and_namespace, image.storage.content_checksum) diff --git a/util/secscan/blob.py b/util/secscan/blob.py index 0048bff5a5..adb73697fb 100644 --- a/util/secscan/blob.py +++ b/util/secscan/blob.py @@ -1,4 +1,4 @@ -from urlparse import urljoin +from urllib.parse import urljoin from flask import url_for @@ -72,4 +72,4 @@ def headers_for_download(self, repository_ref, blob, timeout=60): audience, subject, context, access, timeout, self._instance_keys ) - return {"Authorization": ["Bearer " + auth_token]} + return {"Authorization": ["Bearer " + auth_token.decode("ascii")]} diff --git a/util/secscan/fake.py b/util/secscan/fake.py index e63af9bf53..1cba85d354 100644 --- a/util/secscan/fake.py +++ b/util/secscan/fake.py @@ -1,7 +1,7 @@ import json import copy import uuid -import urlparse +import urllib.parse from contextlib import contextmanager from httmock import urlmatch, HTTMock, all_requests @@ -364,7 +364,7 @@ def get_notification(url, _): "content": json.dumps({"Error": {"Message": "Unknown notification"}}), } - query_params = urlparse.parse_qs(url.query) + query_params = urllib.parse.parse_qs(url.query) limit = int(query_params.get("limit", [2])[0]) page = int(query_params.get("page", [0])[0]) diff --git a/util/secscan/notifier.py b/util/secscan/notifier.py index d817d8a539..9d1db5df10 100644 --- a/util/secscan/notifier.py +++ b/util/secscan/notifier.py @@ -50,12 +50,12 @@ def send_notifications(self): new_vuln = self.vulnerability_info new_severity = PRIORITY_LEVELS.get( - new_vuln.get("Severity", "Unknown"), {"index": sys.maxint} + new_vuln.get("Severity", "Unknown"), {"index": sys.maxsize} ) # For each of the tags found, issue a notification. with notification_batch() as spawn_notification: - for repository_id, tags in self.tags_by_repository_map.iteritems(): + for repository_id, tags in self.tags_by_repository_map.items(): event_data = { "tags": list(tags), "vulnerability": { @@ -93,10 +93,10 @@ def process_notification_page_data(self, notification_page_data): old_layer_ids = old_data.get("LayersIntroducingVulnerability", []) new_severity = PRIORITY_LEVELS.get( - new_vuln.get("Severity", "Unknown"), {"index": sys.maxint} + new_vuln.get("Severity", "Unknown"), {"index": sys.maxsize} ) old_severity = PRIORITY_LEVELS.get( - old_vuln.get("Severity", "Unknown"), {"index": sys.maxint} + old_vuln.get("Severity", "Unknown"), {"index": sys.maxsize} ) # Check if the severity of the vulnerability has increased. If so, then we report this diff --git a/util/secscan/secscan_util.py b/util/secscan/secscan_util.py index dbd8a3ecd4..ad31034657 100644 --- a/util/secscan/secscan_util.py +++ b/util/secscan/secscan_util.py @@ -1,4 +1,4 @@ -from urlparse import urljoin +from urllib.parse import urljoin from flask import url_for diff --git a/util/secscan/v4/api.py b/util/secscan/v4/api.py index e96e336d7d..695159d5ca 100644 --- a/util/secscan/v4/api.py +++ b/util/secscan/v4/api.py @@ -8,7 +8,7 @@ from abc import ABCMeta, abstractmethod from six import add_metaclass -from urlparse import urljoin +from urllib.parse import urljoin from jsonschema import validate, RefResolver from data.registry_model.datatypes import Manifest as ManifestDataType @@ -103,7 +103,7 @@ def state(self): try: resp = self._perform(actions["IndexState"]()) except (Non200ResponseException, IncompatibleAPIResponse) as ex: - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) return resp.json() @@ -137,7 +137,7 @@ def _join(first, second): try: resp = self._perform(actions["Index"](body)) except (Non200ResponseException, IncompatibleAPIResponse) as ex: - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) return (resp.json(), resp.headers["etag"]) @@ -145,11 +145,11 @@ def index_report(self, manifest_hash): try: resp = self._perform(actions["GetIndexReport"](manifest_hash)) except IncompatibleAPIResponse as ex: - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) except Non200ResponseException as ex: if ex.response.status_code == 404: return None - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) return resp.json() @@ -157,11 +157,11 @@ def vulnerability_report(self, manifest_hash): try: resp = self._perform(actions["GetVulnerabilityReport"](manifest_hash)) except IncompatibleAPIResponse as ex: - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) except Non200ResponseException as ex: if ex.response.status_code == 404: return None - raise APIRequestFailure(ex.message) + raise APIRequestFailure(ex) return resp.json() @@ -174,9 +174,8 @@ def _perform(self, action): resp = self._client.request(method, url, json=body) except requests.exceptions.ConnectionError as ce: logger.exception("Connection error when trying to connect to security scanner endpoint") - msg = ( - "Connection error when trying to connect to security scanner endpoint: %s" - % ce.message + msg = "Connection error when trying to connect to security scanner endpoint: %s" % str( + ce ) raise APIRequestFailure(msg) diff --git a/util/security/aes.py b/util/security/aes.py index 5017615e21..5b8935201e 100644 --- a/util/security/aes.py +++ b/util/security/aes.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import base64 import hashlib from Crypto import Random @@ -18,6 +16,7 @@ def __init__(self, key): self.key = key def encrypt(self, raw): + assert isinstance(raw, bytes) raw = self._pad(raw) iv = Random.new().read(AES.block_size) cipher = AES.new(self.key, AES.MODE_CBC, iv) @@ -30,7 +29,7 @@ def decrypt(self, enc): return self._unpad(cipher.decrypt(enc[AES.block_size :])).decode("utf-8") def _pad(self, s): - return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs) + return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs).encode("ascii") @staticmethod def _unpad(s): diff --git a/util/security/crypto.py b/util/security/crypto.py index ff1648d487..8b6d58ee55 100644 --- a/util/security/crypto.py +++ b/util/security/crypto.py @@ -10,7 +10,14 @@ def encrypt_string(string, key): The key must be 32 raw bytes. """ f = Fernet(key) - return f.encrypt(string) + + # Fernet() works only on byte objects. Convert the string to bytes. + unencrypted_bytes = string.encode() + encrypted_bytes = f.encrypt(unencrypted_bytes) + + # Fernet() returns a byte object. Convert it to a string before returning. + encrypted_string = encrypted_bytes.decode() + return encrypted_string def decrypt_string(string, key, ttl=None): @@ -20,9 +27,21 @@ def decrypt_string(string, key, ttl=None): The key must be 32 raw bytes. """ f = Fernet(key) + + # Fernet() works only on byte objects. Convert the string to bytes before decrypting. + encrypted_bytes = string.encode() # str -> bytes + try: - return f.decrypt(str(string), ttl=ttl) + decrypted_bytes = f.decrypt(encrypted_bytes, ttl=ttl) except InvalidToken: - return None - except TypeError: - return None + """ + From the the Cryptography's library documentation: + + If the token is in any way invalid, this exception is raised. + A token may be invalid for a number of reasons: it is older than the + ttl, it is malformed, or it does not have a valid signature. + """ + return None # TODO(kmullins): Shall we log this case? Is it expected? + + decrypted_string = decrypted_bytes.decode() # bytes -> str + return decrypted_string diff --git a/util/security/fingerprint.py b/util/security/fingerprint.py index dd56bbc314..b2f33efbd1 100644 --- a/util/security/fingerprint.py +++ b/util/security/fingerprint.py @@ -14,4 +14,4 @@ def canonical_kid(jwk): Returns: string: the unique kid for the given JWK. """ - return sha256(json.dumps(canonicalize(jwk), separators=(",", ":"))).hexdigest() + return sha256(json.dumps(canonicalize(jwk), separators=(",", ":")).encode("utf-8")).hexdigest() diff --git a/util/security/registry_jwt.py b/util/security/registry_jwt.py index 2110cb175f..d12da12896 100644 --- a/util/security/registry_jwt.py +++ b/util/security/registry_jwt.py @@ -180,7 +180,7 @@ def build_context_and_subject(auth_context=None, tuf_roots=None): # TODO: remove once Apostille has been upgraded to not use the single root. single_root = ( - tuf_roots.values()[0] + list(tuf_roots.values())[0] if tuf_roots is not None and len(tuf_roots) == 1 else DISABLED_TUF_ROOT ) diff --git a/util/security/secret.py b/util/security/secret.py index 0c65782782..96ee7302d8 100644 --- a/util/security/secret.py +++ b/util/security/secret.py @@ -11,7 +11,7 @@ def convert_secret_key(config_secret_key): # First try parsing the key as an int. try: big_int = int(config_secret_key) - secret_key = str(bytearray.fromhex("{:02x}".format(big_int))) + secret_key = bytearray.fromhex("{:02x}".format(big_int)) except ValueError: pass @@ -23,8 +23,9 @@ def convert_secret_key(config_secret_key): pass if secret_key is None: - secret_key = str(bytearray(map(ord, config_secret_key))) + secret_key = bytearray(list(map(ord, config_secret_key))) # Otherwise, use the bytes directly. - assert len(secret_key) - return "".join(itertools.islice(itertools.cycle(secret_key), 32)) + assert len(secret_key) > 0 + + return b"".join(itertools.islice(itertools.cycle([bytes([b]) for b in secret_key]), 32)) diff --git a/util/security/signing.py b/util/security/signing.py index 7fd7f39041..2eb7c6880a 100644 --- a/util/security/signing.py +++ b/util/security/signing.py @@ -1,11 +1,10 @@ -import gpgme -import os +import gpg import features import logging +from io import BytesIO -logger = logging.getLogger(__name__) -from StringIO import StringIO +logger = logging.getLogger(__name__) class GPG2Signer(object): @@ -23,7 +22,7 @@ def __init__(self, config, config_provider): if not config.get("GPG2_PUBLIC_KEY_FILENAME"): raise Exception("Missing configuration key GPG2_PUBLIC_KEY_FILENAME") - self._ctx = gpgme.Context() + self._ctx = gpg.Context() self._ctx.armor = True self._private_key_name = config["GPG2_PRIVATE_KEY_NAME"] self._public_key_filename = config["GPG2_PUBLIC_KEY_FILENAME"] @@ -33,7 +32,7 @@ def __init__(self, config, config_provider): raise Exception("Missing key file %s" % config["GPG2_PRIVATE_KEY_FILENAME"]) with config_provider.get_volume_file(config["GPG2_PRIVATE_KEY_FILENAME"], mode="rb") as fp: - self._ctx.import_(fp) + self._ctx.op_import(fp) @property def name(self): @@ -44,18 +43,20 @@ def open_public_key_file(self): def detached_sign(self, stream): """ - Signs the given stream, returning the signature. + Signs the given byte-like stream, returning the signature. """ ctx = self._ctx try: - ctx.signers = [ctx.get_key(self._private_key_name)] + ctx.signers = [ctx.get_key(self._private_key_name, 0)] except: raise Exception("Invalid private key name") - signature = StringIO() - new_sigs = ctx.sign(stream, signature, gpgme.SIG_MODE_DETACH) - signature.seek(0) - return signature.getvalue() + data = stream.read() + if not isinstance(data, bytes): + raise TypeError("Stream is not byte-like") + + sign_res = ctx.sign(data, mode=gpg.constants.sig.mode.DETACH) + return sign_res[0] class Signer(object): diff --git a/util/security/ssh.py b/util/security/ssh.py index 6dc0598a56..c269e164aa 100644 --- a/util/security/ssh.py +++ b/util/security/ssh.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from Crypto.PublicKey import RSA diff --git a/util/security/ssl.py b/util/security/ssl.py index fd6c9f409d..cc1323bfe0 100644 --- a/util/security/ssl.py +++ b/util/security/ssl.py @@ -31,7 +31,7 @@ def load_certificate(cert_contents): raise CertInvalidException(ex.args[0][0][2]) -_SUBJECT_ALT_NAME = "subjectAltName" +_SUBJECT_ALT_NAME = b"subjectAltName" class SSLCertificate(object): diff --git a/util/security/test/test_secret.py b/util/security/test/test_secret.py new file mode 100644 index 0000000000..ff2e375a8f --- /dev/null +++ b/util/security/test/test_secret.py @@ -0,0 +1,29 @@ +import uuid +import pytest + +from util.security.secret import convert_secret_key + + +@pytest.mark.parametrize( + "config_secret_key, expected_secret_key", + [ + pytest.param("somesecretkey", b"somesecretkeysomesecretkeysomese", id="Some string"), + pytest.param("255", b"\xff" * 32, id="Some int that can be represented as a byte",), + pytest.param( + "256", + b"25625625625625625625625625625625", + id="Some int that can't be represented as a byte multiple (256 is 100 in hex -> 12 bits)", + ), + pytest.param( + "123e4567-e89b-12d3-a456-426655440000", + uuid.UUID("123e4567-e89b-12d3-a456-426655440000").bytes * 2, + id="Some 16bit UUID", + ), + ], +) +def test_convert_secret_key(config_secret_key, expected_secret_key): + converted_secret_key = convert_secret_key(config_secret_key) + + assert len(converted_secret_key) == 32 + assert isinstance(converted_secret_key, bytes) + assert converted_secret_key == expected_secret_key diff --git a/util/security/test/test_signing.py b/util/security/test/test_signing.py new file mode 100644 index 0000000000..2965dae024 --- /dev/null +++ b/util/security/test/test_signing.py @@ -0,0 +1,28 @@ +import pytest +from io import StringIO, BytesIO + +from app import app, config_provider +from util.security.signing import Signer + + +@pytest.fixture(params=["gpg2"]) +def signer(request): + app.config["SIGNING_ENGINE"] = request.param + return Signer(app, config_provider) + + +@pytest.mark.parametrize( + "data, expected_exception", + [ + ("Unicode strings not allowed", AttributeError), + (StringIO("Not OK, because this does not implement buffer protocol"), TypeError), + (b"bytes are not ok. It should be wrapped in a file-like object", AttributeError), + (BytesIO(b"Thisisfine"), None), + ], +) +def test_detached_sign(data, expected_exception, signer): + if expected_exception is not None: + with pytest.raises(expected_exception): + signer.detached_sign(data) + else: + signer.detached_sign(data) diff --git a/util/security/test/test_ssl_util.py b/util/security/test/test_ssl_util.py index 7e7b06f3b1..d24e052b3f 100644 --- a/util/security/test/test_ssl_util.py +++ b/util/security/test/test_ssl_util.py @@ -23,7 +23,7 @@ def generate_test_cert(hostname="somehostname", san_list=None, expires=1000000): # Add the subjectAltNames (if necessary). if san_list is not None: - cert.add_extensions([crypto.X509Extension("subjectAltName", False, ", ".join(san_list))]) + cert.add_extensions([crypto.X509Extension(b"subjectAltName", False, b", ".join(san_list))]) cert.set_serial_number(1000) cert.gmtime_adj_notBefore(0) @@ -62,7 +62,7 @@ def test_expired_certificate(): def test_hostnames(): - (public_key_data, _) = generate_test_cert(hostname="foo", san_list=["DNS:bar", "DNS:baz"]) + (public_key_data, _) = generate_test_cert(hostname="foo", san_list=[b"DNS:bar", b"DNS:baz"]) cert = load_certificate(public_key_data) assert cert.names == set(["foo", "bar", "baz"]) @@ -71,7 +71,7 @@ def test_hostnames(): def test_wildcard_hostnames(): - (public_key_data, _) = generate_test_cert(hostname="foo", san_list=["DNS:*.bar"]) + (public_key_data, _) = generate_test_cert(hostname="foo", san_list=[b"DNS:*.bar"]) cert = load_certificate(public_key_data) assert cert.names == set(["foo", "*.bar"]) @@ -85,7 +85,7 @@ def test_wildcard_hostnames(): def test_nondns_hostnames(): - (public_key_data, _) = generate_test_cert(hostname="foo", san_list=["URI:yarg"]) + (public_key_data, _) = generate_test_cert(hostname="foo", san_list=[b"URI:yarg"]) cert = load_certificate(public_key_data) assert cert.names == set(["foo"]) @@ -105,7 +105,7 @@ def test_invalid_private_key(): (public_key_data, _) = generate_test_cert() private_key = NamedTemporaryFile(delete=True) - private_key.write("somerandomdata") + private_key.write(b"somerandomdata") private_key.seek(0) cert = load_certificate(public_key_data) diff --git a/util/security/token.py b/util/security/token.py index 8db6395f45..cab82bf2fd 100644 --- a/util/security/token.py +++ b/util/security/token.py @@ -1,7 +1,8 @@ from collections import namedtuple - import base64 +from util.bytes import Bytes + DELIMITER = ":" DecodedToken = namedtuple("DecodedToken", ["public_code", "private_token"]) @@ -13,18 +14,21 @@ def encode_public_private_token(public_code, private_token, allow_public_only=Fa assert allow_public_only return public_code - assert isinstance(private_token, basestring) - return base64.b64encode("%s%s%s" % (public_code, DELIMITER, private_token)) + assert isinstance(private_token, str) and isinstance(public_code, str) + b = ("%s%s%s" % (public_code, DELIMITER, private_token)).encode("utf-8") + + return base64.b64encode(b) def decode_public_private_token(encoded, allow_public_only=False): + token = Bytes.for_string_or_unicode(encoded) try: - decoded = base64.b64decode(encoded) + decoded = base64.b64decode(token.as_encoded_str()).decode("utf-8") except (ValueError, TypeError): if not allow_public_only: return None - return DecodedToken(encoded, None) + return DecodedToken(token.as_unicode(), None) parts = decoded.split(DELIMITER, 2) if len(parts) != 2: diff --git a/util/streamingjsonencoder.py b/util/streamingjsonencoder.py index a38244ac7d..0ccd091477 100644 --- a/util/streamingjsonencoder.py +++ b/util/streamingjsonencoder.py @@ -30,10 +30,13 @@ import collections import json -from json.encoder import encode_basestring, encode_basestring_ascii, FLOAT_REPR, INFINITY +from json.encoder import encode_basestring, encode_basestring_ascii, INFINITY from types import GeneratorType +FLOAT_REPR = str + + class StreamingJSONEncoder(json.JSONEncoder): def iterencode(self, o, _one_shot=False): """ @@ -57,12 +60,6 @@ def iterencode(self, o, _one_shot=False): _encoder = encode_basestring_ascii else: _encoder = encode_basestring - if self.encoding != "utf-8": - - def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): - if isinstance(o, str): - o = o.decode(_encoding) - return _orig_encoder(o) def floatstr( o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY @@ -110,10 +107,7 @@ def _make_iterencode( _sort_keys, _skipkeys, _one_shot, - False=False, - True=True, ValueError=ValueError, - basestring=basestring, dict=dict, float=float, GeneratorType=GeneratorType, @@ -121,7 +115,7 @@ def _make_iterencode( int=int, isinstance=isinstance, list=list, - long=long, + long=int, str=str, tuple=tuple, ): @@ -156,7 +150,7 @@ def _iterencode_list(lst, _current_indent_level): first = False else: buf = separator - if isinstance(value, basestring): + if isinstance(value, str): yield buf + _encoder(value) elif value is None: yield buf + "null" @@ -164,7 +158,7 @@ def _iterencode_list(lst, _current_indent_level): yield buf + "true" elif value is False: yield buf + "false" - elif isinstance(value, (int, long)): + elif isinstance(value, int): yield buf + str(value) elif isinstance(value, float): yield buf + _floatstr(value) @@ -208,18 +202,18 @@ def _iterencode_dict(dct, _current_indent_level): item_separator = _item_separator first = True if _sort_keys: - items = dct.items() + items = list(dct.items()) items.sort(key=lambda kv: kv[0]) else: - items = dct.iteritems() + items = iter(dct.items()) for key, value in items: - if isinstance(key, basestring): + if isinstance(key, str): pass # JavaScript is weakly typed for these, so it makes sense to # also allow them. Many encoders seem to do something like this. elif isinstance(key, float): key = _floatstr(key) - elif isinstance(key, (int, long)): + elif isinstance(key, int): key = str(key) elif key is True: key = "true" @@ -237,7 +231,7 @@ def _iterencode_dict(dct, _current_indent_level): yield item_separator yield _encoder(key) yield _key_separator - if isinstance(value, basestring): + if isinstance(value, str): yield _encoder(value) elif value is None: yield "null" @@ -245,7 +239,7 @@ def _iterencode_dict(dct, _current_indent_level): yield "true" elif value is False: yield "false" - elif isinstance(value, (int, long)): + elif isinstance(value, int): yield str(value) elif isinstance(value, float): yield _floatstr(value) @@ -266,7 +260,7 @@ def _iterencode_dict(dct, _current_indent_level): del markers[markerid] def _iterencode(o, _current_indent_level): - if isinstance(o, basestring): + if isinstance(o, str): yield _encoder(o) elif o is None: yield "null" @@ -274,7 +268,7 @@ def _iterencode(o, _current_indent_level): yield "true" elif o is False: yield "false" - elif isinstance(o, (int, long)): + elif isinstance(o, int): yield str(o) elif isinstance(o, float): yield _floatstr(o) diff --git a/util/test/test_dockerfileparse.py b/util/test/test_dockerfileparse.py index 7ec34821cb..199162a0ff 100644 --- a/util/test/test_dockerfileparse.py +++ b/util/test/test_dockerfileparse.py @@ -69,9 +69,7 @@ def test_unicode_parse_as_unicode(): """ FROM someimage:latest MAINTAINER José Schorr - """.decode( - "utf-8" - ) + """ ) assert parsed.get_image_and_tag() == ("someimage", "latest") diff --git a/util/test/test_failover.py b/util/test/test_failover.py index 970cee78bd..8df9056fa8 100644 --- a/util/test/test_failover.py +++ b/util/test/test_failover.py @@ -40,7 +40,7 @@ def test_readonly_failover(stop_on, exception): """ counter = Counter() arg_sets = [] - for i in xrange(stop_on): + for i in range(stop_on): should_raise = exception if exception is not None and i == stop_on - 1 else None arg_sets.append(((counter,), {"should_raise": should_raise})) diff --git a/util/test/test_morecollections.py b/util/test/test_morecollections.py index 27d02be29e..3c562ce100 100644 --- a/util/test/test_morecollections.py +++ b/util/test/test_morecollections.py @@ -6,36 +6,36 @@ def test_fastindexlist_basic_usage(): # Add 1 indexlist.add(1) - assert indexlist.values() == [1] + assert list(indexlist.values()) == [1] assert indexlist.index(1) == 0 # Add 2 indexlist.add(2) - assert indexlist.values() == [1, 2] + assert list(indexlist.values()) == [1, 2] assert indexlist.index(1) == 0 assert indexlist.index(2) == 1 # Pop nothing. indexlist.pop_until(-1) - assert indexlist.values() == [1, 2] + assert list(indexlist.values()) == [1, 2] assert indexlist.index(1) == 0 assert indexlist.index(2) == 1 # Pop 1. assert indexlist.pop_until(0) == [1] - assert indexlist.values() == [2] + assert list(indexlist.values()) == [2] assert indexlist.index(1) is None assert indexlist.index(2) == 0 # Add 3. indexlist.add(3) - assert indexlist.values() == [2, 3] + assert list(indexlist.values()) == [2, 3] assert indexlist.index(2) == 0 assert indexlist.index(3) == 1 # Pop 2, 3. assert indexlist.pop_until(1) == [2, 3] - assert indexlist.values() == [] + assert list(indexlist.values()) == [] assert indexlist.index(1) is None assert indexlist.index(2) is None assert indexlist.index(3) is None diff --git a/util/test/test_names.py b/util/test/test_names.py index 4df6d3f26a..6ee1602045 100644 --- a/util/test/test_names.py +++ b/util/test/test_names.py @@ -28,7 +28,7 @@ def test_escape_tag(input_tag, expected): ("dev-table", True), # Hyphens allowed ("dev_table", True), # Underscores allowed ("devtable123", True), # Numbers allowed - (u"🌸", False), # Non-ASCII NOT allowed + ("🌸", False), # Non-ASCII NOT allowed (".foo", False), # Cannot start with a dot ("_foo", False), # Cannot start with an underscore ("-foo", False), # Cannot start with a dash diff --git a/util/test/test_validation.py b/util/test/test_validation.py index 3e51ace79d..4c6f02a84b 100644 --- a/util/test/test_validation.py +++ b/util/test/test_validation.py @@ -23,7 +23,7 @@ ("_test", False), ("Test", False), ("hello world", False), - (u"hello→world", False), + ("hello→world", False), ("te---st", False), ], ) @@ -96,19 +96,20 @@ def test_validate_label_key(key, is_valid): ("abc", "abc"), ("abcdefghijklmnopqrstuvwxyz1234567890", "abcdefghijklmnopqrstuvwxyz1234567890"), ("c" * 256, "c" * 255), - (u"\xc6neid", "aeneid"), - (u"\xe9tude", "etude"), - (u"\u5317\u4eb0", "bei_jing"), - (u"\u1515\u14c7\u14c7", "shanana"), - (u"\u13d4\u13b5\u13c6", "taliqua"), - (u"\u0726\u071b\u073d\u0710\u073a", "ptu_i"), - (u"\u0905\u092d\u093f\u091c\u0940\u0924", "abhijiit"), - (u"\u0985\u09ad\u09bf\u099c\u09c0\u09a4", "abhijiit"), - (u"\u0d05\u0d2d\u0d3f\u0d1c\u0d40\u0d24", "abhijiit"), - (u"\u0d2e\u0d32\u0d2f\u0d3e\u0d32\u0d2e\u0d4d", "mlyaalm"), - (u"\ue000", "00"), - (u"\u03ff", "00"), - (u"\u0d2e\u0d32\u03ff\u03ff\u0d2e\u0d32", "mlml"), + ("\xc6neid", "aeneid"), + ("\xe9tude", "etude"), + ("\u5317\u4eb0", "bei_jing"), + ("\u1515\u14c7\u14c7", "shanana"), + ("\u13d4\u13b5\u13c6", "taliqua"), + ("\u0726\u071b\u073d\u0710\u073a", "ptu_i"), + ("\u0905\u092d\u093f\u091c\u0940\u0924", "abhijiit"), + ("\u0985\u09ad\u09bf\u099c\u09c0\u09a4", "abhijiit"), + ("\u0d05\u0d2d\u0d3f\u0d1c\u0d40\u0d24", "abhijiit"), + ("\u0d2e\u0d32\u0d2f\u0d3e\u0d32\u0d2e\u0d4d", "mlyaalm"), + ("\ue000", "00"), + ("\u03ff", "00"), + ("\u0d2e\u0d32\u03ff\u03ff\u0d2e\u0d32", "ml_ml"), + ("\u0d2e\u0d32\u0d2e\u0d32", "mlml"), ], ) def test_generate_valid_usernames(input_username, expected_output): diff --git a/util/tufmetadata/api.py b/util/tufmetadata/api.py index df0b214c9e..b173b29f7f 100644 --- a/util/tufmetadata/api.py +++ b/util/tufmetadata/api.py @@ -1,6 +1,6 @@ import logging -from urlparse import urljoin +from urllib.parse import urljoin from posixpath import join from abc import ABCMeta, abstractmethod @@ -278,7 +278,7 @@ def _auth_header(self, gun, actions): TOKEN_VALIDITY_LIFETIME_S, self._instance_keys, ) - return {"Authorization": "Bearer %s" % token} + return {"Authorization": "Bearer %s" % token.decode("ascii")} def _get(self, gun, metadata_file): return self._call( diff --git a/util/useremails.py b/util/useremails.py index 444685704f..670c1142ab 100644 --- a/util/useremails.py +++ b/util/useremails.py @@ -70,7 +70,7 @@ def send_email(recipient, subject, template_file, parameters, action=None): mail.send(msg) except Exception as ex: logger.exception("Error while trying to send email to %s", recipient) - raise CannotSendEmailException(ex.message) + raise CannotSendEmailException(str(ex)) def render_email(app_title, app_url, recipient, subject, template_file, parameters, action=None): diff --git a/util/validation.py b/util/validation.py index 55a40eb41a..d16b7bf66f 100644 --- a/util/validation.py +++ b/util/validation.py @@ -2,14 +2,14 @@ import re import json -import anunidecode # Don't listen to pylint's lies. This import is required. +from text_unidecode import unidecode from peewee import OperationalError INVALID_PASSWORD_MESSAGE = ( "Invalid password, password must be at least " + "8 characters and contain no whitespace." ) -VALID_CHARACTERS = string.digits + string.lowercase +VALID_CHARACTERS = string.digits + string.ascii_lowercase MIN_USERNAME_LENGTH = 2 MAX_USERNAME_LENGTH = 255 @@ -68,7 +68,7 @@ def _gen_filler_chars(num_filler_chars): def generate_valid_usernames(input_username): - normalized = input_username.encode("unidecode", "ignore").strip().lower() + normalized = unidecode(input_username).strip().lower() prefix = re.sub(INVALID_USERNAME_CHARACTERS, "_", normalized)[:MAX_USERNAME_LENGTH] prefix = re.sub(r"_{2,}", "_", prefix) diff --git a/util/vendor/paxtarfile.py b/util/vendor/paxtarfile.py index 2c40091eed..5e13c93035 100644 --- a/util/vendor/paxtarfile.py +++ b/util/vendor/paxtarfile.py @@ -47,7 +47,7 @@ # --------- # Imports # --------- -from __builtin__ import open as bltn_open +from builtins import open as bltn_open import sys import os import shutil @@ -146,26 +146,26 @@ # --------------------------------------------------------- # Bits used in the mode field, values in octal. # --------------------------------------------------------- -S_IFLNK = 0120000 # symbolic link -S_IFREG = 0100000 # regular file -S_IFBLK = 0060000 # block device -S_IFDIR = 0040000 # directory -S_IFCHR = 0020000 # character device -S_IFIFO = 0010000 # fifo - -TSUID = 04000 # set UID on execution -TSGID = 02000 # set GID on execution -TSVTX = 01000 # reserved - -TUREAD = 0400 # read by owner -TUWRITE = 0200 # write by owner -TUEXEC = 0100 # execute/search by owner -TGREAD = 0040 # read by group -TGWRITE = 0020 # write by group -TGEXEC = 0010 # execute/search by group -TOREAD = 0004 # read by other -TOWRITE = 0002 # write by other -TOEXEC = 0001 # execute/search by other +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other # --------------------------------------------------------- # initialization @@ -203,14 +203,14 @@ def nti(s): """ # There are two possible encodings for a number field, see # itn() below. - if s[0] != chr(0200): + if s[0] != chr(0o200): try: n = int(nts(s).strip() or "0", 8) except ValueError: raise InvalidHeaderError("invalid header") else: - n = 0L - for i in xrange(len(s) - 1): + n = 0 + for i in range(len(s) - 1): n <<= 8 n += ord(s[i + 1]) return n @@ -238,10 +238,10 @@ def itn(n, digits=8, format=DEFAULT_FORMAT): n = struct.unpack("L", struct.pack("l", n))[0] s = "" - for i in xrange(digits - 1): - s = chr(n & 0377) + s + for i in range(digits - 1): + s = chr(n & 0o377) + s n >>= 8 - s = chr(0200) + s + s = chr(0o200) + s return s @@ -299,7 +299,7 @@ def copyfileobj(src, dst, length=None): BUFSIZE = 16 * 1024 blocks, remainder = divmod(length, BUFSIZE) - for b in xrange(blocks): + for b in range(blocks): buf = src.read(BUFSIZE) if len(buf) < BUFSIZE: raise IOError("end of file reached") @@ -454,7 +454,7 @@ def __init__(self, name, mode): mode = {"r": os.O_RDONLY, "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,}[mode] if hasattr(os, "O_BINARY"): mode |= os.O_BINARY - self.fd = os.open(name, mode, 0666) + self.fd = os.open(name, mode, 0o666) def close(self): os.close(self.fd) @@ -497,7 +497,7 @@ def __init__(self, name, mode, comptype, fileobj, bufsize): self.fileobj = fileobj self.bufsize = bufsize self.buf = "" - self.pos = 0L + self.pos = 0 self.closed = False try: @@ -507,7 +507,7 @@ def __init__(self, name, mode, comptype, fileobj, bufsize): except ImportError: raise CompressionError("zlib module is not available") self.zlib = zlib - self.crc = zlib.crc32("") & 0xFFFFFFFFL + self.crc = zlib.crc32("") & 0xFFFFFFFF if mode == "r": self._init_read_gz() else: @@ -540,9 +540,9 @@ def _init_write_gz(self): self.cmp = self.zlib.compressobj( 9, self.zlib.DEFLATED, -self.zlib.MAX_WBITS, self.zlib.DEF_MEM_LEVEL, 0 ) - timestamp = struct.pack("= 0: blocks, remainder = divmod(pos - self.pos, self.bufsize) - for i in xrange(blocks): + for i in range(blocks): self.read(self.bufsize) self.read(remainder) else: @@ -1061,7 +1061,7 @@ def __init__(self, name=""): name is the optional name of the member. """ self.name = name # member name - self.mode = 0644 # file permissions + self.mode = 0o644 # file permissions self.uid = 0 # user id self.gid = 0 # group id self.size = 0 # file size @@ -1106,7 +1106,7 @@ def get_info(self, encoding, errors): """ info = { "name": self.name, - "mode": self.mode & 07777, + "mode": self.mode & 0o7777, "uid": self.uid, "gid": self.gid, "size": self.size, @@ -1124,7 +1124,7 @@ def get_info(self, encoding, errors): info["name"] += "/" for key in ("name", "linkname", "uname", "gname"): - if type(info[key]) is unicode: + if type(info[key]) is str: info[key] = info[key].encode(encoding, errors) return info @@ -1218,7 +1218,7 @@ def create_pax_header(self, info, encoding, errors): val = info[name] if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): - pax_headers[name] = unicode(val) + pax_headers[name] = str(val) info[name] = 0 # Create a pax extended header if necessary. @@ -1260,7 +1260,7 @@ def _create_header(info, format): """ parts = [ stn(info.get("name", ""), 100), - itn(info.get("mode", 0) & 07777, 8, format), + itn(info.get("mode", 0) & 0o7777, 8, format), itn(info.get("uid", 0), 8, format), itn(info.get("gid", 0), 8, format), itn(info.get("size", 0), 12, format), @@ -1316,7 +1316,7 @@ def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE): The values must be unicode objects. """ records = [] - for keyword, value in pax_headers.iteritems(): + for keyword, value in pax_headers.items(): keyword = keyword.encode("utf8") value = value.encode("utf8") l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' @@ -1467,11 +1467,11 @@ def _proc_sparse(self, tarfile): buf = self.buf sp = _ringbuffer() pos = 386 - lastpos = 0L - realpos = 0L + lastpos = 0 + realpos = 0 # There are 4 possible sparse structs in the # first header. - for i in xrange(4): + for i in range(4): try: offset = nti(buf[pos : pos + 12]) numbytes = nti(buf[pos + 12 : pos + 24]) @@ -1492,7 +1492,7 @@ def _proc_sparse(self, tarfile): while isextended == 1: buf = tarfile.fileobj.read(BLOCKSIZE) pos = 0 - for i in xrange(21): + for i in range(21): try: offset = nti(buf[pos : pos + 12]) numbytes = nti(buf[pos + 12 : pos + 24]) @@ -1586,7 +1586,7 @@ def _apply_pax_info(self, pax_headers, encoding, errors): """ Replace fields with supplemental information from a previous pax extended or global header. """ - for keyword, value in pax_headers.iteritems(): + for keyword, value in pax_headers.items(): if keyword not in PAX_FIELDS: continue @@ -1761,7 +1761,7 @@ def __init__( try: if self.mode == "r": self.firstmember = None - self.firstmember = self.next() + self.firstmember = next(self) if self.mode == "a": # Move to the end of the archive, @@ -1774,7 +1774,7 @@ def __init__( except EOFHeaderError: self.fileobj.seek(self.offset) break - except HeaderError, e: + except HeaderError as e: raise ReadError(str(e)) if self.mode in "aw": @@ -1850,7 +1850,7 @@ def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): saved_pos = fileobj.tell() try: return func(name, "r", fileobj, **kwargs) - except (ReadError, CompressionError), e: + except (ReadError, CompressionError) as e: if fileobj is not None: fileobj.seek(saved_pos) continue @@ -2117,7 +2117,7 @@ def gettarinfo(self, name=None, arcname=None, fileobj=None): if type == REGTYPE: tarinfo.size = statres.st_size else: - tarinfo.size = 0L + tarinfo.size = 0 tarinfo.mtime = statres.st_mtime tarinfo.type = type tarinfo.linkname = linkname @@ -2149,22 +2149,24 @@ def list(self, verbose=True): for tarinfo in self: if verbose: - print filemode(tarinfo.mode), - print "%s/%s" % (tarinfo.uname or tarinfo.uid, tarinfo.gname or tarinfo.gid), + print(filemode(tarinfo.mode), end=" ") + print( + "%s/%s" % (tarinfo.uname or tarinfo.uid, tarinfo.gname or tarinfo.gid), end=" " + ) if tarinfo.ischr() or tarinfo.isblk(): - print "%10s" % ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)), + print("%10s" % ("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)), end=" ") else: - print "%10d" % tarinfo.size, - print "%d-%02d-%02d %02d:%02d:%02d" % time.localtime(tarinfo.mtime)[:6], + print("%10d" % tarinfo.size, end=" ") + print("%d-%02d-%02d %02d:%02d:%02d" % time.localtime(tarinfo.mtime)[:6], end=" ") - print tarinfo.name + ("/" if tarinfo.isdir() else ""), + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=" ") if verbose: if tarinfo.issym(): - print "->", tarinfo.linkname, + print("->", tarinfo.linkname, end=" ") if tarinfo.islnk(): - print "link to", tarinfo.linkname, - print + print("link to", tarinfo.linkname, end=" ") + print() def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): """ @@ -2207,7 +2209,7 @@ def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): # Change or exclude the TarInfo object. if filter is not None: - tarinfo = filter(tarinfo) + tarinfo = list(filter(tarinfo)) if tarinfo is None: self._dbg(2, "tarfile: Excluded %r" % name) return @@ -2273,7 +2275,7 @@ def extractall(self, path=".", members=None): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) - tarinfo.mode = 0700 + tarinfo.mode = 0o700 self.extract(tarinfo, path) # Reverse sort directories. @@ -2287,7 +2289,7 @@ def extractall(self, path=".", members=None): self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) - except ExtractError, e: + except ExtractError as e: if self.errorlevel > 1: raise else: @@ -2302,7 +2304,7 @@ def extract(self, member, path=""): """ self._check("r") - if isinstance(member, basestring): + if isinstance(member, str): tarinfo = self.getmember(member) else: tarinfo = member @@ -2313,7 +2315,7 @@ def extract(self, member, path=""): try: self._extract_member(tarinfo, os.path.join(path, tarinfo.name)) - except EnvironmentError, e: + except EnvironmentError as e: if self.errorlevel > 0: raise else: @@ -2321,7 +2323,7 @@ def extract(self, member, path=""): self._dbg(1, "tarfile: %s" % e.strerror) else: self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) - except ExtractError, e: + except ExtractError as e: if self.errorlevel > 1: raise else: @@ -2341,7 +2343,7 @@ def extractfile(self, member): """ self._check("r") - if isinstance(member, basestring): + if isinstance(member, str): tarinfo = self.getmember(member) else: tarinfo = member @@ -2422,8 +2424,8 @@ def makedir(self, tarinfo, targetpath): try: # Use a safe mode for the directory, the real mode is set # later in _extract_member(). - os.mkdir(targetpath, 0700) - except EnvironmentError, e: + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: if e.errno != errno.EEXIST: raise @@ -2516,7 +2518,7 @@ def chown(self, tarinfo, targetpath): else: if sys.platform != "os2emx": os.chown(targetpath, u, g) - except EnvironmentError, e: + except EnvironmentError as e: raise ExtractError("could not change owner") def chmod(self, tarinfo, targetpath): @@ -2526,7 +2528,7 @@ def chmod(self, tarinfo, targetpath): if hasattr(os, "chmod"): try: os.chmod(targetpath, tarinfo.mode) - except EnvironmentError, e: + except EnvironmentError as e: raise ExtractError("could not change mode") def utime(self, tarinfo, targetpath): @@ -2537,11 +2539,11 @@ def utime(self, tarinfo, targetpath): return try: os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) - except EnvironmentError, e: + except EnvironmentError as e: raise ExtractError("could not change modification time") # -------------------------------------------------------------------------- - def next(self): + def __next__(self): """ Return the next member of the archive as a TarInfo object, when TarFile is opened for reading. @@ -2565,12 +2567,12 @@ def next(self): while True: try: tarinfo = self.tarinfo.fromtarfile(self) - except EOFHeaderError, e: + except EOFHeaderError as e: if self.ignore_zeros: self._dbg(2, "0x%X: %s" % (self.offset, e)) self.offset += BLOCKSIZE continue - except InvalidHeaderError, e: + except InvalidHeaderError as e: if self.ignore_zeros: self._dbg(2, "0x%X: %s" % (self.offset, e)) self.offset += BLOCKSIZE @@ -2580,10 +2582,10 @@ def next(self): except EmptyHeaderError: if self.offset == 0: raise ReadError("empty file") - except TruncatedHeaderError, e: + except TruncatedHeaderError as e: if self.offset == 0: raise ReadError(str(e)) - except SubsequentHeaderError, e: + except SubsequentHeaderError as e: raise ReadError(str(e)) break @@ -2627,7 +2629,7 @@ def _load(self): Read through the entire archive file and look for readable members. """ while True: - tarinfo = self.next() + tarinfo = next(self) if tarinfo is None: break self._loaded = True @@ -2647,7 +2649,9 @@ def _find_link_target(self, tarinfo): """ if tarinfo.issym(): # Always search the entire archive. - linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname))) + linkname = "/".join( + [_f for _f in (os.path.dirname(tarinfo.name), tarinfo.linkname) if _f] + ) limit = None else: # Search the archive before the link, because a hard link is @@ -2674,7 +2678,7 @@ def _dbg(self, level, msg): Write debugging output to sys.stderr. """ if level <= self.debug: - print >> sys.stderr, msg + print(msg, file=sys.stderr) def __enter__(self): self._check() @@ -2714,7 +2718,7 @@ def __iter__(self): """ return self - def next(self): + def __next__(self): """ Return the next item using TarFile's next() method. @@ -2725,11 +2729,11 @@ def next(self): # which will cause TarIter to stop prematurely. if self.index == 0 and self.tarfile.firstmember is not None: - tarinfo = self.tarfile.next() + tarinfo = next(self.tarfile) elif self.index < len(self.tarfile.members): tarinfo = self.tarfile.members[self.index] elif not self.tarfile._loaded: - tarinfo = self.tarfile.next() + tarinfo = next(self.tarfile) if not tarinfo: self.tarfile._loaded = True raise StopIteration @@ -2825,10 +2829,10 @@ def __init__(self, file, mode="r", compression=TAR_PLAIN): m.date_time = time.gmtime(m.mtime)[:6] def namelist(self): - return map(lambda m: m.name, self.infolist()) + return [m.name for m in self.infolist()] def infolist(self): - return filter(lambda m: m.type in REGULAR_TYPES, self.tarfile.getmembers()) + return [m for m in self.tarfile.getmembers() if m.type in REGULAR_TYPES] def printdir(self): self.tarfile.list() @@ -2847,9 +2851,9 @@ def write(self, filename, arcname=None, compress_type=None): def writestr(self, zinfo, bytes): try: - from cStringIO import StringIO + from io import StringIO except ImportError: - from StringIO import StringIO + from io import StringIO import calendar tinfo = TarInfo(zinfo.filename) diff --git a/util/verifybackfill.py b/util/verifybackfill.py index 8a00b6dcc8..04bc4bf984 100644 --- a/util/verifybackfill.py +++ b/util/verifybackfill.py @@ -49,10 +49,10 @@ def verify_backfill(namespace_name): assert tag.repository == repo_tag.repository, _vs(tag.repository_id, repo_tag.repository_id) assert tag.reversion == repo_tag.reversion, _vs(tag.reversion, repo_tag.reversion) - start_check = int(tag.lifetime_start_ms / 1000) == repo_tag.lifetime_start_ts + start_check = int(tag.lifetime_start_ms // 1000) == repo_tag.lifetime_start_ts assert start_check, _vs(tag.lifetime_start_ms, repo_tag.lifetime_start_ts) if repo_tag.lifetime_end_ts is not None: - end_check = int(tag.lifetime_end_ms / 1000) == repo_tag.lifetime_end_ts + end_check = int(tag.lifetime_end_ms // 1000) == repo_tag.lifetime_end_ts assert end_check, _vs(tag.lifetime_end_ms, repo_tag.lifetime_end_ts) else: assert tag.lifetime_end_ms is None diff --git a/web.py b/web.py index a684e462b7..064f7dcd0e 100644 --- a/web.py +++ b/web.py @@ -1,8 +1,3 @@ -# NOTE: Must be before we import or call anything that may be synchronous. -from gevent import monkey - -monkey.patch_all() - from app import app as application from endpoints.api import api_bp from endpoints.bitbuckettrigger import bitbuckettrigger diff --git a/workers/blobuploadcleanupworker/blobuploadcleanupworker.py b/workers/blobuploadcleanupworker/blobuploadcleanupworker.py index 10fb0ba339..bb5edaf288 100644 --- a/workers/blobuploadcleanupworker/blobuploadcleanupworker.py +++ b/workers/blobuploadcleanupworker/blobuploadcleanupworker.py @@ -60,7 +60,7 @@ def _cleanup_uploads(self): logger.debug( "Got error when trying to cancel chunked upload %s: %s", stale_upload.uuid, - ex.message, + str(ex), ) # Delete the stale upload's row. diff --git a/workers/buildlogsarchiver/buildlogsarchiver.py b/workers/buildlogsarchiver/buildlogsarchiver.py index 51493dacce..62ffe25a53 100644 --- a/workers/buildlogsarchiver/buildlogsarchiver.py +++ b/workers/buildlogsarchiver/buildlogsarchiver.py @@ -49,7 +49,7 @@ def _archive_redis_buildlogs(self): with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile: with GzipFile("testarchive", fileobj=tempfile) as zipstream: for chunk in StreamingJSONEncoder().iterencode(to_encode): - zipstream.write(chunk) + zipstream.write(chunk.encode("utf-8")) tempfile.seek(0) log_archive.store_file( diff --git a/workers/chunkcleanupworker.py b/workers/chunkcleanupworker.py index 5dde25ea9d..e631971f2c 100644 --- a/workers/chunkcleanupworker.py +++ b/workers/chunkcleanupworker.py @@ -37,7 +37,7 @@ def process_queue_item(self, job_details): logging.config.fileConfig(logfile_path(debug=False), disable_existing_loggers=False) engines = set( - [config[0] for config in app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values()] + [config[0] for config in list(app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values())] ) if "SwiftStorage" not in engines: logger.debug("Swift storage not detected; sleeping") diff --git a/workers/exportactionlogsworker.py b/workers/exportactionlogsworker.py index 9a2332d247..fde1c57edc 100644 --- a/workers/exportactionlogsworker.py +++ b/workers/exportactionlogsworker.py @@ -117,7 +117,7 @@ def _process_queue_item(self, job_details, storage): upload_id, 0, -1, - BytesIO(str(prefix_data)), + BytesIO(prefix_data.encode("utf-8")), upload_metadata, ) uploaded_byte_count = len(prefix_data) @@ -157,7 +157,7 @@ def _process_queue_item(self, job_details, storage): upload_id, 0, -1, - BytesIO(str(suffix_data)), + BytesIO(suffix_data.encode("utf-8")), upload_metadata, ) if upload_error is not None: diff --git a/workers/logrotateworker.py b/workers/logrotateworker.py index d91c3d173b..2cfd6dffcd 100644 --- a/workers/logrotateworker.py +++ b/workers/logrotateworker.py @@ -112,7 +112,7 @@ def _write_logs(filename, logs, log_archive): with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile: with GzipFile("temp_action_log_rotate", fileobj=tempfile, compresslevel=1) as zipstream: for chunk in StreamingJSONEncoder().iterencode(logs): - zipstream.write(chunk) + zipstream.write(chunk.encode("utf-8")) tempfile.seek(0) log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding="gzip", file_id=filename) diff --git a/workers/notificationworker/notificationworker.py b/workers/notificationworker/notificationworker.py index e88c8b3e6a..3a57f2af4d 100644 --- a/workers/notificationworker/notificationworker.py +++ b/workers/notificationworker/notificationworker.py @@ -22,11 +22,11 @@ def process_queue_item(self, job_details): event_handler = NotificationEvent.get_event(event_name) method_handler = NotificationMethod.get_method(method_name) except InvalidNotificationMethodException as ex: - logger.exception("Cannot find notification method: %s", ex.message) - raise JobException("Cannot find notification method: %s" % ex.message) + logger.exception("Cannot find notification method: %s", str(ex)) + raise JobException("Cannot find notification method: %s" % str(ex)) except InvalidNotificationEventException as ex: - logger.exception("Cannot find notification event: %s", ex.message) - raise JobException("Cannot find notification event: %s" % ex.message) + logger.exception("Cannot find notification event: %s", str(ex)) + raise JobException("Cannot find notification event: %s" % str(ex)) if event_handler.should_perform(job_details["event_data"], notification): try: diff --git a/workers/queueworker.py b/workers/queueworker.py index 3f7ffd4cb4..1c3aa884d6 100644 --- a/workers/queueworker.py +++ b/workers/queueworker.py @@ -94,7 +94,7 @@ def run_watchdog(self): logger.error( "The worker has encountered an error via watchdog and will not take new jobs" ) - logger.error(exc.message) + logger.error(str(exc)) self.mark_current_incomplete(restore_retry=True) self._stop.set() @@ -131,7 +131,7 @@ def poll_queue(self): logger.error( "The worker has encountered an error via the job and will not take new jobs" ) - logger.error(exc.message) + logger.error(str(exc)) self.mark_current_incomplete(restore_retry=True) self._stop.set() diff --git a/workers/repomirrorworker/__init__.py b/workers/repomirrorworker/__init__.py index e73389035f..bac4fc98e8 100644 --- a/workers/repomirrorworker/__init__.py +++ b/workers/repomirrorworker/__init__.py @@ -116,7 +116,7 @@ def perform_mirror(skopeo, mirror): "repo_mirror_sync_failed", "end", "'%s' with tag pattern '%s': %s" - % (mirror.external_reference, ",".join(mirror.root_rule.rule_value), e.message), + % (mirror.external_reference, ",".join(mirror.root_rule.rule_value), str(e)), tags=", ".join(tags), stdout=e.stdout, stderr=e.stderr, @@ -277,7 +277,7 @@ def tags_to_mirror(skopeo, mirror): matching_tags = [] for pattern in mirror.root_rule.rule_value: - matching_tags = matching_tags + filter(lambda tag: fnmatch.fnmatch(tag, pattern), all_tags) + matching_tags = matching_tags + [tag for tag in all_tags if fnmatch.fnmatch(tag, pattern)] matching_tags = list(set(matching_tags)) matching_tags.sort() return matching_tags @@ -351,12 +351,14 @@ def rollback(mirror, since_ms): if tag.lifetime_end_ms: # If a future entry exists with a start time equal to the end time for this tag, # then the action was a move, rather than a delete and a create. - newer_tag = filter( - lambda t: tag != t - and tag.name == t.name - and tag.lifetime_end_ms - and t.lifetime_start_ms == tag.lifetime_end_ms, - tags, + newer_tag = list( + filter( + lambda t: tag != t + and tag.name == t.name + and tag.lifetime_end_ms + and t.lifetime_start_ms == tag.lifetime_end_ms, + tags, + ) )[0] if newer_tag: logger.debug("Repo mirroring rollback revert tag '%s'" % tag) @@ -373,7 +375,7 @@ def rollback(mirror, since_ms): def delete_obsolete_tags(mirror, tags): existing_tags = lookup_alive_tags_shallow(mirror.repository.id) - obsolete_tags = list(filter(lambda tag: tag.name not in tags, existing_tags)) + obsolete_tags = list([tag for tag in existing_tags if tag.name not in tags]) for tag in obsolete_tags: delete_tag(mirror.repository, tag.name) diff --git a/workers/repomirrorworker/test/test_repomirrorworker.py b/workers/repomirrorworker/test/test_repomirrorworker.py index 4a3095342d..cd9646f5ab 100644 --- a/workers/repomirrorworker/test/test_repomirrorworker.py +++ b/workers/repomirrorworker/test/test_repomirrorworker.py @@ -43,18 +43,18 @@ def _create_tag(repo, name): app_config = {"TESTING": True} config_json = json.dumps( { - "config": {"author": u"Repo Mirror",}, + "config": {"author": "Repo Mirror",}, "rootfs": {"type": "layers", "diff_ids": []}, "history": [ { "created": "2019-07-30T18:37:09.284840891Z", "created_by": "base", - "author": u"Repo Mirror", + "author": "Repo Mirror", }, ], } ) - upload.upload_chunk(app_config, BytesIO(config_json)) + upload.upload_chunk(app_config, BytesIO(config_json.encode("utf-8"))) blob = upload.commit_to_blob(app_config) builder = DockerSchema2ManifestBuilder() builder.set_config_digest(blob.digest, blob.compressed_size) @@ -83,7 +83,7 @@ def test_successful_mirror(run_skopeo_mock, initialized_db, app): "/usr/bin/skopeo", "inspect", "--tls-verify=False", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -96,8 +96,8 @@ def test_successful_mirror(run_skopeo_mock, initialized_db, app): "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:latest", - u"docker://localhost:5000/mirror/repo:latest", + "docker://registry.example.com/namespace/repository:latest", + "docker://localhost:5000/mirror/repo:latest", ], "results": SkopeoResults(True, [], "stdout", "stderr"), }, @@ -140,7 +140,7 @@ def test_successful_disabled_sync_now(run_skopeo_mock, initialized_db, app): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -153,8 +153,8 @@ def test_successful_disabled_sync_now(run_skopeo_mock, initialized_db, app): "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:latest", - u"docker://localhost:5000/mirror/repo:latest", + "docker://registry.example.com/namespace/repository:latest", + "docker://localhost:5000/mirror/repo:latest", ], "results": SkopeoResults(True, [], "stdout", "stderr"), }, @@ -195,7 +195,7 @@ def test_successful_mirror_verbose_logs(run_skopeo_mock, initialized_db, app, mo "--debug", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -209,8 +209,8 @@ def test_successful_mirror_verbose_logs(run_skopeo_mock, initialized_db, app, mo "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:latest", - u"docker://localhost:5000/mirror/repo:latest", + "docker://registry.example.com/namespace/repository:latest", + "docker://localhost:5000/mirror/repo:latest", ], "results": SkopeoResults(True, [], "Success", ""), }, @@ -257,7 +257,7 @@ def test_rollback(run_skopeo_mock, initialized_db, app): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:updated", + "docker://registry.example.com/namespace/repository:updated", ], "results": SkopeoResults( True, [], '{"RepoTags": ["latest", "updated", "created", "zzerror"]}', "" @@ -272,8 +272,8 @@ def test_rollback(run_skopeo_mock, initialized_db, app): "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:created", - u"docker://localhost:5000/mirror/repo:created", + "docker://registry.example.com/namespace/repository:created", + "docker://localhost:5000/mirror/repo:created", ], "results": SkopeoResults(True, [], "Success", ""), }, @@ -286,8 +286,8 @@ def test_rollback(run_skopeo_mock, initialized_db, app): "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:updated", - u"docker://localhost:5000/mirror/repo:updated", + "docker://registry.example.com/namespace/repository:updated", + "docker://localhost:5000/mirror/repo:updated", ], "results": SkopeoResults(True, [], "Success", ""), }, @@ -300,8 +300,8 @@ def test_rollback(run_skopeo_mock, initialized_db, app): "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:zzerror", - u"docker://localhost:5000/mirror/repo:zzerror", + "docker://registry.example.com/namespace/repository:zzerror", + "docker://localhost:5000/mirror/repo:zzerror", ], "results": SkopeoResults(False, [], "", "ERROR"), }, @@ -347,7 +347,7 @@ def test_remove_obsolete_tags(initialized_db): incoming_tags = ["one", "two"] deleted_tags = delete_obsolete_tags(mirror, incoming_tags) - assert [tag.name for tag in deleted_tags] == [tag.name] + assert [tag.name for tag in deleted_tags] == ["oldtag"] @disable_existing_mirrors @@ -366,7 +366,7 @@ def test_mirror_config_server_hostname(run_skopeo_mock, initialized_db, app, mon "--debug", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -380,8 +380,8 @@ def test_mirror_config_server_hostname(run_skopeo_mock, initialized_db, app, mon "--dest-creds", "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), - u"docker://registry.example.com/namespace/repository:latest", - u"docker://config_server_hostname/mirror/repo:latest", + "docker://registry.example.com/namespace/repository:latest", + "docker://config_server_hostname/mirror/repo:latest", ], "results": SkopeoResults(True, [], "Success", ""), }, @@ -429,8 +429,8 @@ def test_quote_params(run_skopeo_mock, initialized_db, app): "inspect", "--tls-verify=True", "--creds", - u"`rm -rf /`", - u"'docker://& rm -rf /;/namespace/repository:latest'", + "`rm -rf /`", + "'docker://& rm -rf /;/namespace/repository:latest'", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -444,9 +444,9 @@ def test_quote_params(run_skopeo_mock, initialized_db, app): "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), "--src-creds", - u"`rm -rf /`", - u"'docker://& rm -rf /;/namespace/repository:latest'", - u"docker://localhost:5000/mirror/repo:latest", + "`rm -rf /`", + "'docker://& rm -rf /;/namespace/repository:latest'", + "docker://localhost:5000/mirror/repo:latest", ], "results": SkopeoResults(True, [], "stdout", "stderr"), }, @@ -491,8 +491,8 @@ def test_quote_params_password(run_skopeo_mock, initialized_db, app): "inspect", "--tls-verify=True", "--creds", - u'`rm -rf /`:""$PATH\\"', - u"'docker://& rm -rf /;/namespace/repository:latest'", + '`rm -rf /`:""$PATH\\"', + "'docker://& rm -rf /;/namespace/repository:latest'", ], "results": SkopeoResults(True, [], '{"RepoTags": ["latest"]}', ""), }, @@ -503,12 +503,12 @@ def test_quote_params_password(run_skopeo_mock, initialized_db, app): "--src-tls-verify=True", "--dest-tls-verify=True", "--dest-creds", - u"%s:%s" + "%s:%s" % (mirror.internal_robot.username, retrieve_robot_token(mirror.internal_robot)), "--src-creds", - u'`rm -rf /`:""$PATH\\"', - u"'docker://& rm -rf /;/namespace/repository:latest'", - u"docker://localhost:5000/mirror/repo:latest", + '`rm -rf /`:""$PATH\\"', + "'docker://& rm -rf /;/namespace/repository:latest'", + "docker://localhost:5000/mirror/repo:latest", ], "results": SkopeoResults(True, [], "stdout", "stderr"), }, @@ -566,7 +566,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:7.1", + "docker://registry.example.com/namespace/repository:7.1", ], "results": SkopeoResults( False, @@ -580,7 +580,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults( False, @@ -602,7 +602,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:7.1", + "docker://registry.example.com/namespace/repository:7.1", ], "results": SkopeoResults( False, @@ -616,7 +616,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults( False, @@ -638,7 +638,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:7.1", + "docker://registry.example.com/namespace/repository:7.1", ], "results": SkopeoResults( False, @@ -652,7 +652,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults( False, @@ -674,7 +674,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:7.1", + "docker://registry.example.com/namespace/repository:7.1", ], "results": SkopeoResults( False, @@ -688,7 +688,7 @@ def skopeo_test(args, proxy): "/usr/bin/skopeo", "inspect", "--tls-verify=True", - u"docker://registry.example.com/namespace/repository:latest", + "docker://registry.example.com/namespace/repository:latest", ], "results": SkopeoResults( False, diff --git a/workers/storagereplication.py b/workers/storagereplication.py index e9717594e3..953f018466 100644 --- a/workers/storagereplication.py +++ b/workers/storagereplication.py @@ -176,7 +176,7 @@ def replicate_storage(self, namespace, storage_uuid, storage, backoff_check=True has_local_storage = False if features.STORAGE_REPLICATION: - for storage_type, _ in app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values(): + for storage_type, _ in list(app.config.get("DISTRIBUTED_STORAGE_CONFIG", {}).values()): if storage_type == "LocalStorage": has_local_storage = True break diff --git a/workers/test/test_exportactionlogsworker.py b/workers/test/test_exportactionlogsworker.py index fd51face11..3889d99be7 100644 --- a/workers/test/test_exportactionlogsworker.py +++ b/workers/test/test_exportactionlogsworker.py @@ -46,7 +46,7 @@ def storage_engine(request): def test_export_logs_failure(initialized_db): # Make all uploads fail. - test_storage.put_content("local_us", "except_upload", "true") + test_storage.put_content("local_us", "except_upload", b"true") repo = model.repository.get_repository("devtable", "simple") user = model.user.get_user("devtable") @@ -83,8 +83,8 @@ def format_date(datetime): test_storage.remove("local_us", "except_upload") assert called[0] - assert called[0][u"export_id"] == "someid" - assert called[0][u"status"] == "failed" + assert called[0]["export_id"] == "someid" + assert called[0]["status"] == "failed" @pytest.mark.parametrize("has_logs", [True, False,]) @@ -137,17 +137,19 @@ def format_date(datetime): ) assert called[0] - assert called[0][u"export_id"] == "someid" - assert called[0][u"status"] == "success" + assert called[0]["export_id"] == "someid" + assert called[0]["status"] == "success" - url = called[0][u"exported_data_url"] + url = called[0]["exported_data_url"] if url.find("http://localhost:5000/exportedlogs/") == 0: storage_id = url[len("http://localhost:5000/exportedlogs/") :] else: - assert url.find("https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/") == 0 + assert ( + url.find("https://some_bucket.s3.amazonaws.com:443/some/path/exportedactionlogs/") == 0 + ) storage_id, _ = url[ - len("https://some_bucket.s3.amazonaws.com/some/path/exportedactionlogs/") : + len("https://some_bucket.s3.amazonaws.com:443/some/path/exportedactionlogs/") : ].split("?") created = storage_engine.get_content( diff --git a/workers/test/test_logrotateworker.py b/workers/test/test_logrotateworker.py index c88022d080..738f687a3d 100644 --- a/workers/test/test_logrotateworker.py +++ b/workers/test/test_logrotateworker.py @@ -135,7 +135,7 @@ def test_logrotateworker_with_cutoff(logs_model): assert len(logs) == days # Set the cutoff datetime to be the midpoint of the logs - midpoint = logs[0 : len(logs) / 2] + midpoint = logs[0 : len(logs) // 2] assert midpoint assert len(midpoint) < len(logs) @@ -155,7 +155,7 @@ def test_logrotateworker_with_cutoff(logs_model): # If current model uses ES, check that the indices were also deleted if isinstance(logs_model, DocumentLogsModel): - assert len(logs_model.list_indices()) == days - (len(logs) / 2) + assert len(logs_model.list_indices()) == days - (len(logs) // 2) for index in logs_model.list_indices(): dt = datetime.strptime(index[len(INDEX_NAME_PREFIX) :], INDEX_DATE_FORMAT) assert dt >= cutoff_date diff --git a/workers/test/test_storagereplication.py b/workers/test/test_storagereplication.py index a1a3cc5839..058dc4b6c0 100644 --- a/workers/test/test_storagereplication.py +++ b/workers/test/test_storagereplication.py @@ -48,13 +48,13 @@ def test_storage_replication_v1(storage_user, storage_paths, replication_worker, # Add a storage entry with a V1 path. v1_storage = model.storage.create_v1_storage("local_us") content_path = storage_paths.v1_image_layer_path(v1_storage.uuid) - storage.put_content(["local_us"], content_path, "some content") + storage.put_content(["local_us"], content_path, b"some content") # Call replicate on it and verify it replicates. replication_worker.replicate_storage(storage_user, v1_storage.uuid, storage) # Ensure that the data was replicated to the other "region". - assert storage.get_content(["local_eu"], content_path) == "some content" + assert storage.get_content(["local_eu"], content_path) == b"some content" locations = model.storage.get_storage_locations(v1_storage.uuid) assert len(locations) == 2 @@ -62,20 +62,20 @@ def test_storage_replication_v1(storage_user, storage_paths, replication_worker, def test_storage_replication_cas(storage_user, storage_paths, replication_worker, storage, app): # Add a storage entry with a CAS path. - content_checksum = "sha256:" + hashlib.sha256("some content").hexdigest() + content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest() cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum) location = database.ImageStorageLocation.get(name="local_us") database.ImageStoragePlacement.create(storage=cas_storage, location=location) content_path = storage_paths.blob_path(cas_storage.content_checksum) - storage.put_content(["local_us"], content_path, "some content") + storage.put_content(["local_us"], content_path, b"some content") # Call replicate on it and verify it replicates. replication_worker.replicate_storage(storage_user, cas_storage.uuid, storage) # Ensure that the data was replicated to the other "region". - assert storage.get_content(["local_eu"], content_path) == "some content" + assert storage.get_content(["local_eu"], content_path) == b"some content" locations = model.storage.get_storage_locations(cas_storage.uuid) assert len(locations) == 2 @@ -85,7 +85,7 @@ def test_storage_replication_missing_base( storage_user, storage_paths, replication_worker, storage, app ): # Add a storage entry with a CAS path. - content_checksum = "sha256:" + hashlib.sha256("some content").hexdigest() + content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest() cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum) location = database.ImageStorageLocation.get(name="local_us") @@ -108,17 +108,17 @@ def test_storage_replication_copy_error( storage_user, storage_paths, replication_worker, storage, app ): # Add a storage entry with a CAS path. - content_checksum = "sha256:" + hashlib.sha256("some content").hexdigest() + content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest() cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum) location = database.ImageStorageLocation.get(name="local_us") database.ImageStoragePlacement.create(storage=cas_storage, location=location) content_path = storage_paths.blob_path(cas_storage.content_checksum) - storage.put_content(["local_us"], content_path, "some content") + storage.put_content(["local_us"], content_path, b"some content") # Tell storage to break copying. - storage.put_content(["local_us"], "break_copying", "true") + storage.put_content(["local_us"], "break_copying", b"true") # Attempt to replicate storage. This should fail because the write fails. with pytest.raises(JobException): @@ -135,17 +135,17 @@ def test_storage_replication_copy_didnot_copy( storage_user, storage_paths, replication_worker, storage, app ): # Add a storage entry with a CAS path. - content_checksum = "sha256:" + hashlib.sha256("some content").hexdigest() + content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest() cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum) location = database.ImageStorageLocation.get(name="local_us") database.ImageStoragePlacement.create(storage=cas_storage, location=location) content_path = storage_paths.blob_path(cas_storage.content_checksum) - storage.put_content(["local_us"], content_path, "some content") + storage.put_content(["local_us"], content_path, b"some content") # Tell storage to fake copying (i.e. not actually copy the data). - storage.put_content(["local_us"], "fake_copying", "true") + storage.put_content(["local_us"], "fake_copying", b"true") # Attempt to replicate storage. This should fail because the copy doesn't actually do the copy. with pytest.raises(JobException): @@ -162,17 +162,17 @@ def test_storage_replication_copy_unhandled_exception( storage_user, storage_paths, replication_worker, storage, app ): # Add a storage entry with a CAS path. - content_checksum = "sha256:" + hashlib.sha256("some content").hexdigest() + content_checksum = "sha256:" + hashlib.sha256(b"some content").hexdigest() cas_storage = database.ImageStorage.create(cas_path=True, content_checksum=content_checksum) location = database.ImageStorageLocation.get(name="local_us") database.ImageStoragePlacement.create(storage=cas_storage, location=location) content_path = storage_paths.blob_path(cas_storage.content_checksum) - storage.put_content(["local_us"], content_path, "some content") + storage.put_content(["local_us"], content_path, b"some content") # Tell storage to raise an exception when copying. - storage.put_content(["local_us"], "except_copying", "true") + storage.put_content(["local_us"], "except_copying", b"true") # Attempt to replicate storage. This should fail because the copy raises an unhandled exception. with pytest.raises(WorkerUnhealthyException):