diff --git a/Dockerfile b/Dockerfile index 7ae8d32b..c903d87c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -97,7 +97,7 @@ EXPOSE 8000 # run as non priviledged user USER app -RUN poetry install --with remotesettings,taskcluster --without dev --no-ansi --no-interaction --verbose +RUN poetry install --with remotesettings --without dev --no-ansi --no-interaction --verbose ENTRYPOINT ["/app/bin/run.sh"] CMD ["server"] diff --git a/Makefile b/Makefile index 96d7c17c..c32c9a9f 100644 --- a/Makefile +++ b/Makefile @@ -19,7 +19,7 @@ install: $(INSTALL_STAMP) ## Install dependencies $(INSTALL_STAMP): pyproject.toml poetry.lock @if [ -z $(POETRY) ]; then echo "Poetry could not be found. See https://python-poetry.org/docs/"; exit 2; fi $(POETRY) --version - $(POETRY) install --with remotesettings,taskcluster --no-ansi --no-interaction --verbose + $(POETRY) install --with remotesettings --no-ansi --no-interaction --verbose touch $(INSTALL_STAMP) clean: ## Delete cache files diff --git a/bin/run.sh b/bin/run.sh index 10532045..fd7c5422 100755 --- a/bin/run.sh +++ b/bin/run.sh @@ -6,7 +6,7 @@ elif [ $1 == "check" ]; then exec poetry run python -m telescope $@ elif [ $1 == "test" ]; then - poetry install --with remotesettings,taskcluster --no-ansi --no-interaction --verbose + poetry install --with remotesettings --no-ansi --no-interaction --verbose poetry run pytest tests else diff --git a/checks/taskcluster/__init__.py b/checks/taskcluster/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/checks/taskcluster/create_task.py b/checks/taskcluster/create_task.py deleted file mode 100644 index 0407c1f7..00000000 --- a/checks/taskcluster/create_task.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -A check to verify that tasks can be created. - -Information about the lastest created task is returned. -""" - -import logging -import shlex -import textwrap -from datetime import datetime, timedelta - -import taskcluster -import taskcluster.aio -import taskcluster.exceptions - -from telescope import config -from telescope.typings import CheckResult - -from . import utils as tc_utils - - -logger = logging.getLogger(__name__) - - -TASK_METADATA = { - "owner": config.CONTACT_EMAIL, - "source": config.SOURCE_URL, - "description": textwrap.dedent( - """ - This task is a test and is generated routinely by {config.SERVICE_NAME} - in order to monitor the Taskcluster Queue services. It ensures that tasks - are able to be created, and they intentionally have a short expiry - to reduce resource usage. - """ - ), -} - - -async def run( - root_url: str, - queue_id: str = "proj-taskcluster/gw-ci-ubuntu-18-04", - command: str = "/bin/echo 'hola mundo!'", - task_source_url: str = "", - deadline_seconds: int = 3 * 60 * 60, - expires_seconds: int = 24 * 60 * 60, - max_run_time: int = 10 * 60, - client_id: str = "", - access_token: str = "", - certificate: str = "", -) -> CheckResult: - # Build connection infos from parameters. - options = tc_utils.options_from_params( - root_url, client_id, access_token, certificate - ) - queue = taskcluster.aio.Queue(options) - - name = "task-test" - task_id = taskcluster.stableSlugId()(name) # type: ignore - - now = datetime.utcnow() - deadline = now + timedelta(seconds=deadline_seconds) - expires = now + timedelta(seconds=expires_seconds) - - payload = { - "taskQueueId": queue_id, - "created": now.isoformat(), - "deadline": deadline.isoformat(), - "expires": expires.isoformat(), - "payload": { - "command": [shlex.split(cmd) for cmd in command.splitlines()], - "maxRunTime": max_run_time, - }, - "metadata": { - **TASK_METADATA, - "name": name, - "source": task_source_url or config.SOURCE_URL, - }, - } - - status = await queue.createTask(task_id, payload) - - return True, status["status"] diff --git a/checks/taskcluster/latest_indexed.py b/checks/taskcluster/latest_indexed.py deleted file mode 100644 index c14437c3..00000000 --- a/checks/taskcluster/latest_indexed.py +++ /dev/null @@ -1,97 +0,0 @@ -""" -A check to verify that a task exists for the specified index path, that it ran recently, -and published the expected artifact. - -This check can be used to verify that a certain hook (defined elsewhere) is -regularly triggered as expected. - -Information about the lastest indexed task is returned. -""" - -import logging -from typing import List - -import taskcluster -import taskcluster.aio -import taskcluster.exceptions - -from telescope import utils -from telescope.typings import CheckResult - -from . import utils as tc_utils - - -logger = logging.getLogger(__name__) - -# List which check parameters are visible in the UI. -EXPOSED_PARAMETERS = ["index_path", "max_age"] - - -async def run( - max_age: int, - index_path: str, - artifacts_names: List[str], - root_url: str, - client_id: str = "", - access_token: str = "", - certificate: str = "", -) -> CheckResult: - """ - Example configuration: - - .. code-block:: toml - - [checks.queue.latest-indexed] - description = "" - module = "checks.taskcluster.latest_indexed" - params.root_url = "${TASKCLUSTER_ROOT_URL}" - params.client_id = "${TASKCLUSTER_CLIENT_ID}" - params.access_token = "${TASKCLUSTER_ACCESS_TOKEN}" - params.max_age = 360 - params.index_path = "project.taskcluster.telescope.periodic-task" - params.artifacts_names = ["public/results/status.json"] - - """ - # Build connection infos from parameters. - options = tc_utils.options_from_params( - root_url, client_id, access_token, certificate - ) - - # 1. Get the task id from the index. - index = taskcluster.aio.Index(options) - try: - indexed_task = await index.findTask(index_path) - task_id = indexed_task["taskId"] - except taskcluster.exceptions.TaskclusterRestFailure as e: - if getattr(e, "status_code") != 404: - raise - # No indexed task found. Failing. - return False, f"No task found at {index_path!r}" - - # 2. Inspect the task using the queue. - queue = taskcluster.aio.Queue(options) - futures = [queue.latestArtifactInfo(task_id, a) for a in artifacts_names] - try: - artifacts = await utils.run_parallel(*futures) - except taskcluster.exceptions.TaskclusterRestFailure as e: - failed_call = e.body["requestInfo"]["params"] - return False, "Artifact {name!r} of task {taskId!r} not available".format( - **failed_call - ) - - # 3. Verify that latest run is not too old. - status = await queue.status(task_id) - last_run = status["status"]["runs"][-1] - resolved_at = utils.utcfromisoformat(last_run["resolved"]) - age_task = utils.utcnow() - resolved_at - if age_task.seconds > max_age: - return ( - False, - f"Latest task at {index_path!r} ({task_id!r}) is {age_task.seconds} seconds old", - ) - - # 4. Success! Return status info. - return True, { - **status, - "artifacts": artifacts, - } diff --git a/checks/taskcluster/utils.py b/checks/taskcluster/utils.py deleted file mode 100644 index 65ec8d6e..00000000 --- a/checks/taskcluster/utils.py +++ /dev/null @@ -1,13 +0,0 @@ -from telescope import config - - -def options_from_params(root_url, client_id, access_token, certificate): - return { - "rootUrl": root_url, - "credentials": ( - {"clientId": client_id.strip(), "accessToken": access_token.strip()} - if client_id and access_token - else {"certificate": certificate.strip()} - ), - "maxRetries": config.REQUESTS_MAX_RETRIES, - } diff --git a/checks/taskcluster/write_secrets.py b/checks/taskcluster/write_secrets.py deleted file mode 100644 index 92d52e4b..00000000 --- a/checks/taskcluster/write_secrets.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -A check to verify that the Secrets service is operational. - -Information about the lastest indexed task is returned. -""" - -import logging -from datetime import timedelta - -import taskcluster -import taskcluster.aio -import taskcluster.exceptions - -from telescope import utils -from telescope.typings import CheckResult - -from . import utils as tc_utils - - -logger = logging.getLogger(__name__) - - -DEFAULT_NAME = "project/taskcluster/secrets-test" -DEFAULT_EXPIRES_SECONDS = 600 -EXPOSED_PARAMETERS = ["secret_name"] - - -async def run( - root_url: str, - secret_name: str = DEFAULT_NAME, - expires_seconds: int = DEFAULT_EXPIRES_SECONDS, - client_id: str = "", - access_token: str = "", - certificate: str = "", -) -> CheckResult: - # Build connection infos from parameters. - options = tc_utils.options_from_params( - root_url, client_id, access_token, certificate - ) - - secrets = taskcluster.aio.Secrets(options) - - # 1. Write and read. - payload = { - "expires": (utils.utcnow() + timedelta(seconds=expires_seconds)).isoformat(), - "secret": {"hello": "beautiful world"}, - } - await secrets.set(secret_name, payload) - try: - await secrets.get(secret_name) - except taskcluster.exceptions.TaskclusterRestFailure: - return False, f"Secret {secret_name!r} could not be retrieved" - - # 2. Remove and check. - await secrets.remove(secret_name) - try: - await secrets.get(secret_name) - return False, f"Secret {secret_name!r} was not removed" - except taskcluster.exceptions.TaskclusterRestFailure as e: - if getattr(e, "status_code") != 404: - raise - - return True, {} diff --git a/poetry.lock b/poetry.lock index 1b54a748..10618535 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -1100,20 +1100,6 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] -[[package]] -name = "mohawk" -version = "1.1.0" -description = "Library for Hawk HTTP authorization" -optional = false -python-versions = "*" -files = [ - {file = "mohawk-1.1.0-py3-none-any.whl", hash = "sha256:3ed296a30453d0b724679e0fd41e4e940497f8e461a9a9c3b7f36e43bab0fa09"}, - {file = "mohawk-1.1.0.tar.gz", hash = "sha256:d2a0e3ab10a209cc79e95e28f2dd54bd4a73fd1998ffe27b7ba0f962b6be9723"}, -] - -[package.dependencies] -six = "*" - [[package]] name = "multidict" version = "6.0.2" @@ -1659,17 +1645,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "slugid" -version = "2.0.0" -description = "Base64 encoded uuid v4 slugs" -optional = false -python-versions = "*" -files = [ - {file = "slugid-2.0.0-py2.py3-none-any.whl", hash = "sha256:aec8b0e01c4ad32e38e12d609eab3ec912fd129aaf6b2ded0199b56a5f8fd67c"}, - {file = "slugid-2.0.0.tar.gz", hash = "sha256:a950d98b72691178bdd4d6c52743c4a2aa039207cf7a97d71060a111ff9ba297"}, -] - [[package]] name = "soupsieve" version = "2.3.2.post1" @@ -1695,41 +1670,6 @@ files = [ [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" -[[package]] -name = "taskcluster" -version = "68.0.4" -description = "Python client for Taskcluster" -optional = false -python-versions = "*" -files = [ - {file = "taskcluster-68.0.4-py3-none-any.whl", hash = "sha256:b911708471416c47225110134dbb0b02fcd8a8d96d8f03a4467f5beb554e9cf5"}, - {file = "taskcluster-68.0.4.tar.gz", hash = "sha256:f401f0c30b5f4d0c783b633ce55daabc78c5d004508a9b976756924e7a67727e"}, -] - -[package.dependencies] -aiohttp = ">=3.7.4" -async-timeout = ">=2.0.0" -mohawk = ">=0.3.4" -python-dateutil = ">=2.8.2" -requests = ">=2.4.3" -slugid = ">=2" -taskcluster-urls = ">=12.1.0" - -[package.extras] -test = ["aiofiles", "coverage", "flake8", "httmock", "httptest", "hypothesis", "mock", "psutil", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "setuptools-lint", "tox"] - -[[package]] -name = "taskcluster-urls" -version = "13.0.1" -description = "Standardized url generator for taskcluster resources." -optional = false -python-versions = "*" -files = [ - {file = "taskcluster-urls-13.0.1.tar.gz", hash = "sha256:b25e122ecec249c4299ac7b20b08db76e3e2025bdaeb699a9d444556de5fd367"}, - {file = "taskcluster_urls-13.0.1-py2-none-any.whl", hash = "sha256:5e25e7e6818e8877178b175ff43d2e6548afad72694aa125f404a7329ece0973"}, - {file = "taskcluster_urls-13.0.1-py3-none-any.whl", hash = "sha256:f66dcbd6572a6216ab65949f0fa0b91f2df647918028436c384e6af5cd12ae2b"}, -] - [[package]] name = "termcolor" version = "2.4.0" @@ -2015,4 +1955,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.12" -content-hash = "192928940314623d216ec48f2aac775c9a1c6415564409e48c7557caea2ffc2f" +content-hash = "e8acbe1141d7ed81228260181137f58e29128a63acf5574b70ae42830fafee1a" diff --git a/pyproject.toml b/pyproject.toml index 827af915..8795fc91 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,9 +35,6 @@ detect-secrets = "^1.5.0" [tool.poetry.group.remotesettings] optional = true -[tool.poetry.group.taskcluster] -optional = true - [tool.poetry.group.remotesettings.dependencies] kinto-http = "^11.2.0" cryptography = "^43.0.1" @@ -47,9 +44,6 @@ beautifulsoup4 = "^4.12.3" autograph-utils = "^0.3.0" canonicaljson-rs = "^0.6.0" -[tool.poetry.group.taskcluster.dependencies] -taskcluster = "^68.0.4" - [tool.pytest.ini_options] asyncio_mode = "auto" diff --git a/tests/checks/taskcluster/test_create_task.py b/tests/checks/taskcluster/test_create_task.py deleted file mode 100644 index 894ac9a6..00000000 --- a/tests/checks/taskcluster/test_create_task.py +++ /dev/null @@ -1,43 +0,0 @@ -from unittest import mock - -import pytest -import taskcluster.exceptions - -from checks.taskcluster.create_task import run - - -MODULE = "checks.taskcluster.create_task" - -PARAMS = { - "root_url": "http://server", -} - - -async def test_positive(): - class FakeQueue: - async def createTask(self, *args, **kwargs): - self.called_with = args, kwargs - return {"status": {"taskId": 42}} - - fake_queue = FakeQueue() - - with mock.patch(f"{MODULE}.taskcluster.aio.Queue", return_value=fake_queue): - status, data = await run(**PARAMS) - - assert status is True - assert data == {"taskId": 42} - _, definition = fake_queue.called_with[0] - assert definition["payload"]["command"] == [["/bin/echo", "hola mundo!"]] - - -async def test_negative(): - class FakeQueue: - async def createTask(self, *args, **kwargs): - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - raise e - - fake_queue = FakeQueue() - - with mock.patch(f"{MODULE}.taskcluster.aio.Queue", return_value=fake_queue): - with pytest.raises(taskcluster.exceptions.TaskclusterRestFailure): - await run(**PARAMS) diff --git a/tests/checks/taskcluster/test_latest_indexed.py b/tests/checks/taskcluster/test_latest_indexed.py deleted file mode 100644 index 550ac3fe..00000000 --- a/tests/checks/taskcluster/test_latest_indexed.py +++ /dev/null @@ -1,120 +0,0 @@ -from datetime import timedelta -from unittest import mock - -import pytest -import taskcluster.exceptions - -from checks.taskcluster.latest_indexed import run -from telescope.utils import utcnow - - -MODULE = "checks.taskcluster.latest_indexed" - -PARAMS = { - "root_url": "http://server", - "index_path": "project.myproject.task", - "artifacts_names": ["public/status.json"], - "max_age": 10, -} - - -@pytest.fixture -def fake_index(): - class FakeIndex: - async def findTask(self, *args, **kwargs): - return {"taskId": "task-42"} - - fake_index = FakeIndex() - with mock.patch(f"{MODULE}.taskcluster.aio.Index", return_value=fake_index) as m: - yield m - - -@pytest.fixture -def fake_queue(): - class FakeQueue: - async def latestArtifactInfo(self, task_id, a): - return {} - - async def status(self, task_id): - task_status = { - "runs": [{"resolved": (utcnow() - timedelta(seconds=5)).isoformat()}] - } - return {"status": task_status} - - fake_queue = FakeQueue() - with mock.patch(f"{MODULE}.taskcluster.aio.Queue", return_value=fake_queue) as m: - yield m - - -async def test_positive(fake_index, fake_queue): - status, data = await run(**PARAMS) - - assert status is True - assert "artifacts" in data - assert "status" in data - - -async def test_index_errors_are_raised(): - class FakeIndex: - async def findTask(self, *args, **kwargs): - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - raise e - - fake_index = FakeIndex() - with mock.patch(f"{MODULE}.taskcluster.aio.Index", return_value=fake_index): - with pytest.raises(taskcluster.exceptions.TaskclusterRestFailure): - await run(**PARAMS) - - -async def test_negative_missing_task(): - class FakeIndex: - async def findTask(self, *args, **kwargs): - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - e.status_code = 404 - raise e - - fake_index = FakeIndex() - with mock.patch(f"{MODULE}.taskcluster.aio.Index", return_value=fake_index): - status, data = await run(**PARAMS) - - assert status is False - assert data == "No task found at 'project.myproject.task'" - - -async def test_negative_fail_artifact(fake_index): - class FakeQueue: - async def latestArtifactInfo(self, task_id, a): - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - e.body = {"requestInfo": {"params": {"name": a, "taskId": task_id}}} - raise e - - fake_queue = FakeQueue() - with mock.patch(f"{MODULE}.taskcluster.aio.Queue", return_value=fake_queue): - status, data = await run(**PARAMS) - - assert status is False - assert data == "Artifact 'public/status.json' of task 'task-42' not available" - - -async def test_negative_task_too_told(fake_index): - class FakeQueue: - async def latestArtifactInfo(self, task_id, a): - return {} - - async def status(self, task_id): - return { - "status": { - "runs": [ - {"resolved": (utcnow() - timedelta(seconds=11)).isoformat()} - ] - } - } - - fake_queue = FakeQueue() - with mock.patch(f"{MODULE}.taskcluster.aio.Queue", return_value=fake_queue): - status, data = await run(**PARAMS) - - assert status is False - assert ( - "Latest task at 'project.myproject.task' ('task-42') is 11 seconds old" in data - ) diff --git a/tests/checks/taskcluster/test_write_secrets.py b/tests/checks/taskcluster/test_write_secrets.py deleted file mode 100644 index dfb21f7d..00000000 --- a/tests/checks/taskcluster/test_write_secrets.py +++ /dev/null @@ -1,83 +0,0 @@ -from typing import Any, Dict -from unittest import mock - -import pytest -import taskcluster.exceptions - -from checks.taskcluster.write_secrets import run - - -MODULE = "checks.taskcluster.write_secrets" - -PARAMS = { - "root_url": "http://server", -} - - -class FakeSecrets: - _content: Dict[str, Any] = {} - - async def set(self, name, payload): - self._content[name] = payload - - async def get(self, name): - try: - return self._content[name] - except KeyError: - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - e.status_code = 404 - raise e - - async def remove(self, name): - del self._content[name] - - -async def test_positive(): - fake_secrets = FakeSecrets() - with mock.patch(f"{MODULE}.taskcluster.aio.Secrets", return_value=fake_secrets): - status, data = await run(**PARAMS) - - assert status is True - assert data == {} - - -async def test_negative_cannot_write(): - class FailingSecrets(FakeSecrets): - async def set(self, name, payload): - pass # Do not store. - - fake_secrets = FailingSecrets() - with mock.patch(f"{MODULE}.taskcluster.aio.Secrets", return_value=fake_secrets): - status, data = await run(**PARAMS) - - assert status is False - assert "could not be retrieved" in data - - -async def test_negative_cannot_remove(): - class FailingSecrets(FakeSecrets): - async def remove(self, name): - pass # Do not remove. - - fake_secrets = FailingSecrets() - with mock.patch(f"{MODULE}.taskcluster.aio.Secrets", return_value=fake_secrets): - status, data = await run(**PARAMS) - - assert status is False - assert "was not removed" in data - - -async def test_secrets_errors_are_raised(): - class FailingSecrets(FakeSecrets): - async def get(self, name): - try: - return self._content[name] - except KeyError: - e = taskcluster.exceptions.TaskclusterRestFailure("", None) - e.status_code = 503 - raise e - - fake_secrets = FailingSecrets() - with mock.patch(f"{MODULE}.taskcluster.aio.Secrets", return_value=fake_secrets): - with pytest.raises(taskcluster.exceptions.TaskclusterRestFailure): - await run(**PARAMS)