Skip to content

Commit

Permalink
[pre-commit.ci] Apply automatic pre-commit fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
pre-commit-ci[bot] committed Jan 20, 2025
1 parent d517a1d commit 0c96971
Showing 1 changed file with 11 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@

from kubespawner import KubeSpawner # noqa: E402


# conda-store default page size
DEFAULT_PAGE_SIZE_LIMIT = 100


@gen.coroutine
def get_username_hook(spawner):
auth_state = yield spawner.user.get_auth_state()
Expand All @@ -30,14 +30,17 @@ def get_username_hook(spawner):
# adding tests in a traditional sense is not possible. See https://github.com/soapy1/nebari/tree/try-unit-test-spawner
# for a demo on one approach to adding test.
def get_conda_store_environments(user_info: dict):
import urllib3
import yarl
import math
import os

import urllib3
import yarl

# Check for the environment variable `CONDA_STORE_API_PAGE_SIZE_LIMIT`. Fall
# back to using the default page size limit if not set.
page_size = os.environ.get("CONDA_STORE_API_PAGE_SIZE_LIMIT", DEFAULT_PAGE_SIZE_LIMIT)
page_size = os.environ.get(
"CONDA_STORE_API_PAGE_SIZE_LIMIT", DEFAULT_PAGE_SIZE_LIMIT
)

external_url = z2jh.get_config("custom.conda-store-service-name")
token = z2jh.get_config("custom.conda-store-jhub-apps-token")
Expand All @@ -59,8 +62,10 @@ def get_conda_store_environments(user_info: dict):
if total_records > page_size:
# Already pulled the first page of results, start looping through
# the envs starting on the 2nd page
for page in range(2, math.ceil(total_records/page_size)+1):
url = yarl.URL(f"http://{external_url}/{endpoint}/?size={page_size}&page={page}")
for page in range(2, math.ceil(total_records / page_size) + 1):
url = yarl.URL(
f"http://{external_url}/{endpoint}/?size={page_size}&page={page}"
)
response = http.request(
"GET", str(url), headers={"Authorization": f"Bearer {token}"}
)
Expand Down

0 comments on commit 0c96971

Please sign in to comment.