Skip to content

Commit

Permalink
add failing tests
Browse files Browse the repository at this point in the history
  • Loading branch information
valsteen committed Oct 11, 2023
1 parent 8780c9c commit fc3e650
Show file tree
Hide file tree
Showing 6 changed files with 156 additions and 55 deletions.
5 changes: 4 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,10 @@ ignore_missing_imports = true
[tool.pytest.ini_options]
addopts = ["-rxXs", "--strict-config", "--strict-markers"]
markers = ["copied_from(source, changes=None): mark test as copied from somewhere else, along with a description of changes made to accodomate e.g. our test setup"]
filterwarnings = ["error"]
filterwarnings = [
"error",
"ignore::trio.TrioDeprecationWarning", # due to trio.MultiError deprecation
]

[tool.coverage.run]
omit = [
Expand Down
53 changes: 42 additions & 11 deletions tests/_async/test_connection_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
import hpack
import hyperframe.frame
import pytest
import trio as concurrency

import httpcore
from tests.concurrency import async_open_nursery as open_nursery
from tests.time import async_current_time as current_time, async_sleep as sleep


@pytest.mark.anyio
Expand Down Expand Up @@ -515,7 +516,7 @@ async def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend
) as pool:
info_list: typing.List[str] = []
async with concurrency.open_nursery() as nursery:
async with open_nursery() as nursery:
for domain in ["a.com", "b.com", "c.com", "d.com", "e.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -561,7 +562,7 @@ async def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend, http2=True
) as pool:
info_list: typing.List[str] = []
async with concurrency.open_nursery() as nursery:
async with open_nursery() as nursery:
for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -603,7 +604,7 @@ async def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend, http2=True
) as pool:
info_list: typing.List[str] = []
async with concurrency.open_nursery() as nursery:
async with open_nursery() as nursery:
for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -662,7 +663,14 @@ async def test_connection_pool_closed_while_request_in_flight():
async def test_connection_pool_timeout():
"""
Ensure that exceeding max_connections can cause a request to timeout.
100 concurrent requests will be sent with a limit of 1 connection.
The request time is fixed to 6ms, and there is a pool timeout of 50ms.
Less than 9 requests will succeed and the rest won't have a chance to get
a connection before 50ms, PoolTimeout will be raised instead.
"""

network_backend = httpcore.AsyncMockBackend(
[
b"HTTP/1.1 200 OK\r\n",
Expand All @@ -671,18 +679,41 @@ async def test_connection_pool_timeout():
b"\r\n",
b"Hello, world!",
]
* 100
)

successes: int = 0
max_wait: float = 0

async def fetch(pool):
nonlocal successes, max_wait

start_time = current_time()
extensions = {"timeout": {"pool": 0.05}}

try:
async with pool.stream(
"GET", "https://example.com/", extensions=extensions
) as response:
await sleep(0.006) # we block the connection for that time
await response.aread()
successes += 1
except httpcore.PoolTimeout:
pass
finally:
max_wait = max(current_time() - start_time, max_wait)

async with httpcore.AsyncConnectionPool(
network_backend=network_backend, max_connections=1
) as pool:
# Send a request to a pool that is configured to only support a single
# connection, and then ensure that a second concurrent request
# fails with a timeout.
async with pool.stream("GET", "https://example.com/"):
with pytest.raises(httpcore.PoolTimeout):
extensions = {"timeout": {"pool": 0.0001}}
await pool.request("GET", "https://example.com/", extensions=extensions)
async with open_nursery() as nursery:
for _ in range(100):
nursery.start_soon(fetch, pool)

assert 1 < successes < 9, "less than 9 requests should succeed"

# Theoretically, the whole operation should take 50ms + 6ms; we should stay within that range
assert max_wait <= 0.2


@pytest.mark.anyio
Expand Down
53 changes: 42 additions & 11 deletions tests/_sync/test_connection_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
import hpack
import hyperframe.frame
import pytest
from tests import concurrency

import httpcore
from tests.concurrency import sync_open_nursery as open_nursery
from tests.time import sync_current_time as current_time, sync_sleep as sleep



Expand Down Expand Up @@ -515,7 +516,7 @@ def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend
) as pool:
info_list: typing.List[str] = []
with concurrency.open_nursery() as nursery:
with open_nursery() as nursery:
for domain in ["a.com", "b.com", "c.com", "d.com", "e.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -561,7 +562,7 @@ def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend, http2=True
) as pool:
info_list: typing.List[str] = []
with concurrency.open_nursery() as nursery:
with open_nursery() as nursery:
for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -603,7 +604,7 @@ def fetch(pool, domain, info_list):
max_connections=1, network_backend=network_backend, http2=True
) as pool:
info_list: typing.List[str] = []
with concurrency.open_nursery() as nursery:
with open_nursery() as nursery:
for domain in ["a.com", "a.com", "a.com", "a.com", "a.com"]:
nursery.start_soon(fetch, pool, domain, info_list)

Expand Down Expand Up @@ -662,7 +663,14 @@ def test_connection_pool_closed_while_request_in_flight():
def test_connection_pool_timeout():
"""
Ensure that exceeding max_connections can cause a request to timeout.
100 concurrent requests will be sent with a limit of 1 connection.
The request time is fixed to 6ms, and there is a pool timeout of 50ms.
Less than 9 requests will succeed and the rest won't have a chance to get
a connection before 50ms, PoolTimeout will be raised instead.
"""

network_backend = httpcore.MockBackend(
[
b"HTTP/1.1 200 OK\r\n",
Expand All @@ -671,18 +679,41 @@ def test_connection_pool_timeout():
b"\r\n",
b"Hello, world!",
]
* 100
)

successes: int = 0
max_wait: float = 0

def fetch(pool):
nonlocal successes, max_wait

start_time = current_time()
extensions = {"timeout": {"pool": 0.05}}

try:
with pool.stream(
"GET", "https://example.com/", extensions=extensions
) as response:
sleep(0.006) # we block the connection for that time
response.read()
successes += 1
except httpcore.PoolTimeout:
pass
finally:
max_wait = max(current_time() - start_time, max_wait)

with httpcore.ConnectionPool(
network_backend=network_backend, max_connections=1
) as pool:
# Send a request to a pool that is configured to only support a single
# connection, and then ensure that a second concurrent request
# fails with a timeout.
with pool.stream("GET", "https://example.com/"):
with pytest.raises(httpcore.PoolTimeout):
extensions = {"timeout": {"pool": 0.0001}}
pool.request("GET", "https://example.com/", extensions=extensions)
with open_nursery() as nursery:
for _ in range(100):
nursery.start_soon(fetch, pool)

assert 1 < successes < 9, "less than 9 requests should succeed"

# Theoretically, the whole operation should take 50ms + 6ms; we should stay within that range
assert max_wait <= 0.2



Expand Down
9 changes: 8 additions & 1 deletion tests/concurrency.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
from types import TracebackType
from typing import Any, Callable, List, Optional, Type

from anyio import create_task_group
from anyio.abc import TaskGroup


class Nursery:
def __init__(self) -> None:
Expand All @@ -36,5 +39,9 @@ def start_soon(self, func: Callable[..., object], *args: Any) -> None:
self._threads.append(thread)


def open_nursery() -> Nursery:
def sync_open_nursery() -> Nursery:
return Nursery()


def async_open_nursery() -> TaskGroup:
return create_task_group()
19 changes: 19 additions & 0 deletions tests/time.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import time

import anyio


def async_current_time() -> float:
return anyio.current_time()


def sync_current_time() -> float:
return time.monotonic()


async def async_sleep(delay: float):
await anyio.sleep(delay)


def sync_sleep(delay: float):
time.sleep(delay)
72 changes: 41 additions & 31 deletions unasync.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,43 @@
from pprint import pprint

SUBS = [
('from .._backends.auto import AutoBackend', 'from .._backends.sync import SyncBackend'),
('import trio as concurrency', 'from tests import concurrency'),
('AsyncIterator', 'Iterator'),
('Async([A-Z][A-Za-z0-9_]*)', r'\2'),
('async def', 'def'),
('async with', 'with'),
('async for', 'for'),
('await ', ''),
('handle_async_request', 'handle_request'),
('aclose', 'close'),
('aiter_stream', 'iter_stream'),
('aread', 'read'),
('asynccontextmanager', 'contextmanager'),
('__aenter__', '__enter__'),
('__aexit__', '__exit__'),
('__aiter__', '__iter__'),
('@pytest.mark.anyio', ''),
('@pytest.mark.trio', ''),
('AutoBackend', 'SyncBackend'),
(
"from .._backends.auto import AutoBackend",
"from .._backends.sync import SyncBackend",
),
(
"from tests.time import async_current_time as current_time, async_sleep as sleep",
"from tests.time import sync_current_time as current_time, sync_sleep as sleep",
),
(
"from tests.concurrency import async_open_nursery as open_nursery",
"from tests.concurrency import sync_open_nursery as open_nursery",
),
("AsyncIterator", "Iterator"),
("Async([A-Z][A-Za-z0-9_]*)", r"\2"),
("async def", "def"),
("async with", "with"),
("async for", "for"),
("await ", ""),
("handle_async_request", "handle_request"),
("aclose", "close"),
("aiter_stream", "iter_stream"),
("aread", "read"),
("asynccontextmanager", "contextmanager"),
("__aenter__", "__enter__"),
("__aexit__", "__exit__"),
("__aiter__", "__iter__"),
("@pytest.mark.anyio", ""),
("@pytest.mark.trio", ""),
("AutoBackend", "SyncBackend"),
]
COMPILED_SUBS = [
(re.compile(r'(^|\b)' + regex + r'($|\b)'), repl)
for regex, repl in SUBS
(re.compile(r"(^|\b)" + regex + r"($|\b)"), repl) for regex, repl in SUBS
]

USED_SUBS = set()


def unasync_line(line):
for index, (regex, repl) in enumerate(COMPILED_SUBS):
old_line = line
Expand All @@ -55,30 +65,30 @@ def unasync_file_check(in_path, out_path):
for in_line, out_line in zip(in_file.readlines(), out_file.readlines()):
expected = unasync_line(in_line)
if out_line != expected:
print(f'unasync mismatch between {in_path!r} and {out_path!r}')
print(f'Async code: {in_line!r}')
print(f'Expected sync code: {expected!r}')
print(f'Actual sync code: {out_line!r}')
print(f"unasync mismatch between {in_path!r} and {out_path!r}")
print(f"Async code: {in_line!r}")
print(f"Expected sync code: {expected!r}")
print(f"Actual sync code: {out_line!r}")
sys.exit(1)


def unasync_dir(in_dir, out_dir, check_only=False):
for dirpath, dirnames, filenames in os.walk(in_dir):
for filename in filenames:
if not filename.endswith('.py'):
if not filename.endswith(".py"):
continue
rel_dir = os.path.relpath(dirpath, in_dir)
in_path = os.path.normpath(os.path.join(in_dir, rel_dir, filename))
out_path = os.path.normpath(os.path.join(out_dir, rel_dir, filename))
print(in_path, '->', out_path)
print(in_path, "->", out_path)
if check_only:
unasync_file_check(in_path, out_path)
else:
unasync_file(in_path, out_path)


def main():
check_only = '--check' in sys.argv
check_only = "--check" in sys.argv
unasync_dir("httpcore/_async", "httpcore/_sync", check_only=check_only)
unasync_dir("tests/_async", "tests/_sync", check_only=check_only)

Expand All @@ -87,8 +97,8 @@ def main():

print("These patterns were not used:")
pprint(unused_subs)
exit(1)
exit(1)


if __name__ == '__main__':
if __name__ == "__main__":
main()

0 comments on commit fc3e650

Please sign in to comment.