Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Framework for Nebari deployment via pytest for extensive testing #1867

Merged
merged 26 commits into from
Aug 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions .github/workflows/test_integration.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
name: "Deploy on Digital Ocean"

on:
schedule:
- cron: "0 0 * * MON"
workflow_dispatch:

jobs:
test-integration:
name: "Pytest Integration"
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
steps:
- name: "Checkout Infrastructure"
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11

- name: Retrieve secret from Vault
uses: hashicorp/vault-action@v2.5.0
with:
method: jwt
url: "https://quansight-vault-public-vault-b2379fa7.d415e30e.z1.hashicorp.cloud:8200"
namespace: "admin/quansight"
role: "repository-nebari-dev-nebari-role"
secrets: |
kv/data/repository/nebari-dev/nebari/amazon_web_services/nebari-dev-ci role_name | AWS_ROLE_ARN;
kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci project_id | PROJECT_ID;
kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci workload_identity_provider | GCP_WORKFLOW_PROVIDER;
kv/data/repository/nebari-dev/nebari/google_cloud_platform/nebari-dev-ci/github-nebari-dev-repo-ci service_account_name | GCP_SERVICE_ACCOUNT;
kv/data/repository/nebari-dev/nebari/shared_secrets DIGITALOCEAN_TOKEN | DIGITALOCEAN_TOKEN;
kv/data/repository/nebari-dev/nebari/cloudflare/internal-devops@quansight.com/nebari-dev-ci token | CLOUDFLARE_TOKEN;

- name: Install Nebari
run: |
pip install .[dev]
conda install --quiet --yes conda-build

- name: Integration Tests
run: |
pytest --version
pytest tests_integration/ -vvv -s
env:
NEBARI_K8S_VERSION: 1.25.12-do.0
SPACES_ACCESS_KEY_ID: ${{ secrets.SPACES_ACCESS_KEY_ID }}
SPACES_SECRET_ACCESS_KEY: ${{ secrets.SPACES_SECRET_ACCESS_KEY }}
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,6 @@ nebari-config.yaml
.ipynb_checkpoints
.DS_Store
/.ruff_cache

# Integration tests deployments
_test_deploy
2 changes: 1 addition & 1 deletion src/_nebari/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# 04-kubernetes-ingress
DEFAULT_TRAEFIK_IMAGE_TAG = "2.9.1"

HIGHEST_SUPPORTED_K8S_VERSION = "1.24.13"
HIGHEST_SUPPORTED_K8S_VERSION = "1.25.12"
DEFAULT_GKE_RELEASE_CHANNEL = "UNSPECIFIED"

DEFAULT_NEBARI_DASK_VERSION = CURRENT_RELEASE
Expand Down
4 changes: 3 additions & 1 deletion src/_nebari/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,7 @@ def guided_install(
print(
"Additional administration docs can be found at https://docs.nebari.dev/en/stable/source/admin_guide/"
)
return stage_outputs


def deploy_configuration(
Expand Down Expand Up @@ -293,7 +294,7 @@ def deploy_configuration(

with timer(logger, "deploying Nebari"):
try:
guided_install(
return guided_install(
config,
dns_provider,
dns_auto_provision,
Expand All @@ -302,5 +303,6 @@ def deploy_configuration(
skip_remote_state_provision,
)
except subprocess.CalledProcessError as e:
logger.error("subprocess command failed")
logger.error(e.output)
raise e
7 changes: 5 additions & 2 deletions src/_nebari/provider/cloud/digital_ocean.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ def regions():
def kubernetes_versions(region):
"""Return list of available kubernetes supported by cloud provider. Sorted from oldest to latest."""
supported_kubernetes_versions = sorted(
[_["slug"] for _ in _kubernetes_options()["options"]["versions"]]
[_["slug"].split("-")[0] for _ in _kubernetes_options()["options"]["versions"]]
)
return filter_by_highest_supported_k8s_version(supported_kubernetes_versions)
filtered_versions = filter_by_highest_supported_k8s_version(
supported_kubernetes_versions
)
return [f"{v}-do.0" for v in filtered_versions]
aktech marked this conversation as resolved.
Show resolved Hide resolved
3 changes: 2 additions & 1 deletion src/_nebari/stages/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,8 @@ def _attempt_keycloak_connection(
)
print(f"Attempt {i+1} succeeded connecting to keycloak master realm")
return True
except KeycloakError:
except KeycloakError as e:
print(e)
print(f"Attempt {i+1} failed connecting to keycloak master realm")
time.sleep(timeout)
return False
Expand Down
1 change: 1 addition & 0 deletions src/_nebari/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def kill_process():
if timeout_timer is not None:
timeout_timer.cancel()

process.stdout.close()
return process.wait(
timeout=10
) # Should already have finished because we have drained stdout
Expand Down
27 changes: 1 addition & 26 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,8 @@
from functools import partial
from unittest.mock import Mock

import pytest

from _nebari.initialize import render_config

INIT_INPUTS = [
# project, namespace, domain, cloud_provider, ci_provider, auth_provider
("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"),
("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"),
("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"),
("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"),
]

NEBARI_CONFIG_FN = "nebari-config.yaml"
PRESERVED_DIR = "preserved_dir"
DEFAULT_GH_REPO = "github.com/test/test"
DEFAULT_TERRAFORM_STATE = "remote"


# use this partial function for all tests that need to call `render_config`
render_config_partial = partial(
render_config,
repository=DEFAULT_GH_REPO,
repository_auto_provision=False,
auth_auto_provision=False,
terraform_state=DEFAULT_TERRAFORM_STATE,
disable_prompt=True,
)
from tests.utils import INIT_INPUTS, NEBARI_CONFIG_FN, PRESERVED_DIR


@pytest.fixture(params=INIT_INPUTS)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_init.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest

from .conftest import render_config_partial
from .utils import render_config_partial


@pytest.mark.parametrize(
Expand Down
2 changes: 1 addition & 1 deletion tests/test_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from _nebari.render import render_template, set_env_vars_in_config

from .conftest import PRESERVED_DIR, render_config_partial
from .utils import PRESERVED_DIR, render_config_partial


@pytest.fixture
Expand Down
2 changes: 1 addition & 1 deletion tests/test_schema.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import _nebari.schema

from .conftest import render_config_partial
from .utils import render_config_partial


def test_schema(setup_fixture):
Expand Down
25 changes: 25 additions & 0 deletions tests/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from functools import partial

from _nebari.initialize import render_config

DEFAULT_TERRAFORM_STATE = "remote"

DEFAULT_GH_REPO = "github.com/test/test"
render_config_partial = partial(
render_config,
repository=DEFAULT_GH_REPO,
repository_auto_provision=False,
auth_auto_provision=False,
terraform_state=DEFAULT_TERRAFORM_STATE,
disable_prompt=True,
)
INIT_INPUTS = [
# project, namespace, domain, cloud_provider, ci_provider, auth_provider
("pytestdo", "dev", "do.nebari.dev", "do", "github-actions", "github"),
("pytestaws", "dev", "aws.nebari.dev", "aws", "github-actions", "github"),
("pytestgcp", "dev", "gcp.nebari.dev", "gcp", "github-actions", "github"),
("pytestazure", "dev", "azure.nebari.dev", "azure", "github-actions", "github"),
]

NEBARI_CONFIG_FN = "nebari-config.yaml"
PRESERVED_DIR = "preserved_dir"
19 changes: 0 additions & 19 deletions tests_e2e/playwright/navigator.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import contextlib
import datetime as dt
import logging
import os
import re
import time
import urllib

import dotenv
from playwright.sync_api import expect, sync_playwright

logger = logging.getLogger()
Expand Down Expand Up @@ -410,20 +408,3 @@ def write_file(self, filepath, content):
self.run_terminal_command(f"ls {filepath}")
logger.debug(f"time to complete {dt.datetime.now() - start}")
time.sleep(2)


if __name__ == "__main__":
dotenv.load_dotenv()
nav = Navigator(
nebari_url="https://nebari.quansight.dev/",
username=os.environ["KEYCLOAK_USERNAME"],
password=os.environ["KEYCLOAK_PASSWORD"],
auth="password",
instance_name="small-instance",
headless=False,
slow_mo=100,
)
nav.login()
nav.start_server()
nav.reset_workspace()
nav.teardown()
37 changes: 2 additions & 35 deletions tests_e2e/playwright/run_notebook.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,18 @@
import contextlib
import logging
import os
from pathlib import Path

import dotenv
from navigator import Navigator

logger = logging.getLogger()


class RunNotebook:
class Notebook:
def __init__(self, navigator: Navigator):
self.nav = navigator
self.nav.initialize

def run_notebook(
self, path, expected_output_text, conda_env, runtime=30000, retry=2
):
def run(self, path, expected_output_text, conda_env, runtime=30000, retry=2):
"""Run jupyter notebook and check for expected output text anywhere on
the page.

Expand Down Expand Up @@ -84,32 +80,3 @@ def _restart_run_all(self):
)
if restart_dialog_button.is_visible():
restart_dialog_button.click()


if __name__ == "__main__":
dotenv.load_dotenv()
nav = Navigator(
nebari_url="https://nebari.quansight.dev/",
username=os.environ["KEYCLOAK_USERNAME"],
password=os.environ["KEYCLOAK_PASSWORD"],
auth="password",
instance_name="small-instance",
headless=False,
slow_mo=100,
)
nav.login()
nav.start_server()
nav.reset_workspace()
test_app = RunNotebook(navigator=nav)
notebook_filepath_in_repo = "test_data/test_notebook_output.ipynb"
notebook_filepath_on_nebari = "test_notebook_output.ipynb"
with open(notebook_filepath_in_repo, "r") as notebook:
test_app.nav.write_file(
filepath=notebook_filepath_on_nebari, content=notebook.read()
)
test_app.run_notebook(
path="nebari/tests_e2e/playwright/test_data/test_notebook_output.ipynb",
expected_output_text="success: 6",
conda_env="conda-env-default-py",
)
nav.teardown()
6 changes: 3 additions & 3 deletions tests_e2e/playwright/test_playwright.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
from run_notebook import RunNotebook
from run_notebook import Notebook


def test_notebook(navigator, test_data_root):
test_app = RunNotebook(navigator=navigator)
test_app = Notebook(navigator=navigator)
notebook_name = "test_notebook_output.ipynb"
with open(test_data_root / notebook_name, "r") as notebook:
test_app.nav.write_file(filepath=notebook_name, content=notebook.read())
test_app.run_notebook(
test_app.run(
path=notebook_name,
expected_output_text="success: 6",
conda_env="conda-env-default-py",
Expand Down
26 changes: 26 additions & 0 deletions tests_integration/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Integration Testing via Pytest

These tests are designed to test things on Nebari deployed
on cloud. At the moment it only deploys on DigitalOcean.

You need the following environment variables to run these.

```bash
DIGITALOCEAN_TOKEN
NEBARI_K8S_VERSION
SPACES_ACCESS_KEY_ID
SPACES_SECRET_ACCESS_KEY
CLOUDFLARE_TOKEN
```

For instructions on how to get these variables check the documentation
for DigitalOcean deployment.

Running Tests:

```bash
pytest tests_integration -vvv -s
```

This would deploy on digitalocean, run tests on the deployment
and then teardown the cluster.
Empty file added tests_integration/__init__.py
Empty file.
1 change: 1 addition & 0 deletions tests_integration/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
pytest_plugins = ["tests_integration.deployment_fixtures"]
Loading