Skip to content

Commit

Permalink
Fix flaky tests with unclosed warnings (aio-libs#8391)
Browse files Browse the repository at this point in the history
  • Loading branch information
Dreamsorcerer authored Apr 29, 2024
1 parent 04b1212 commit 0ba6cf2
Showing 1 changed file with 30 additions and 58 deletions.
88 changes: 30 additions & 58 deletions tests/test_proxy_functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,6 @@
from aiohttp import web
from aiohttp.client_exceptions import ClientConnectionError

pytestmark = [
pytest.mark.filterwarnings(
"ignore:unclosed <socket.socket fd=.*:ResourceWarning",
),
pytest.mark.filterwarnings(
"ignore:"
"unclosed transport <_SelectorSocketTransport closing fd=.*"
":ResourceWarning",
),
]


ASYNCIO_SUPPORTS_TLS_IN_TLS = sys.version_info >= (3, 11)


Expand Down Expand Up @@ -120,16 +108,14 @@ async def test_secure_https_proxy_absolute_path(
conn = aiohttp.TCPConnector()
sess = aiohttp.ClientSession(connector=conn)

response = await sess.get(
async with sess.get(
web_server_endpoint_url,
proxy=secure_proxy_url,
ssl=client_ssl_ctx, # used for both proxy and endpoint connections
)

assert response.status == 200
assert await response.text() == web_server_endpoint_payload
) as response:
assert response.status == 200
assert await response.text() == web_server_endpoint_payload

response.close()
await sess.close()
await conn.close()

Expand Down Expand Up @@ -195,7 +181,8 @@ async def test_https_proxy_unsupported_tls_in_tls(
ClientConnectionError,
match=expected_exception_reason,
) as conn_err:
await sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx)
async with sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx):
pass

assert isinstance(conn_err.value.__cause__, TypeError)
assert match_regex(f"^{type_err!s}$", str(conn_err.value.__cause__))
Expand Down Expand Up @@ -255,13 +242,11 @@ async def proxy_server():
def get_request(loop: Any):
async def _request(method="GET", *, url, trust_env=False, **kwargs):
connector = aiohttp.TCPConnector(ssl=False)
client = aiohttp.ClientSession(connector=connector, trust_env=trust_env)
try:
resp = await client.request(method, url, **kwargs)
await resp.release()
return resp
finally:
await client.close()
async with aiohttp.ClientSession(
connector=connector, trust_env=trust_env
) as client:
async with client.request(method, url, **kwargs) as resp:
return resp

return _request

Expand Down Expand Up @@ -411,11 +396,8 @@ async def test_proxy_http_acquired_cleanup_force(
assert 0 == len(conn._acquired)

async def request():
resp = await sess.get(url, proxy=proxy.url)

assert 1 == len(conn._acquired)

await resp.release()
async with sess.get(url, proxy=proxy.url):
assert 1 == len(conn._acquired)

await request()

Expand All @@ -439,13 +421,11 @@ async def request(pid):
# process requests only one by one
nonlocal current_pid

resp = await sess.get(url, proxy=proxy.url)

current_pid = pid
await asyncio.sleep(0.2)
assert current_pid == pid
async with sess.get(url, proxy=proxy.url) as resp:
current_pid = pid
await asyncio.sleep(0.2)
assert current_pid == pid

await resp.release()
return resp

requests = [request(pid) for pid in range(multi_conn_num)]
Expand Down Expand Up @@ -498,9 +478,8 @@ async def xtest_proxy_https_send_body(proxy_test_server: Any, loop: Any) -> None
proxy.return_value = {"status": 200, "body": b"1" * (2**20)}
url = "https://www.google.com.ua/search?q=aiohttp proxy"

resp = await sess.get(url, proxy=proxy.url)
body = await resp.read()
await resp.release()
async with sess.get(url, proxy=proxy.url) as resp:
body = await resp.read()
await sess.close()

assert body == b"1" * (2**20)
Expand Down Expand Up @@ -598,11 +577,8 @@ async def xtest_proxy_https_acquired_cleanup(proxy_test_server: Any, loop: Any)
assert 0 == len(conn._acquired)

async def request():
resp = await sess.get(url, proxy=proxy.url)

assert 1 == len(conn._acquired)

await resp.release()
async with sess.get(url, proxy=proxy.url):
assert 1 == len(conn._acquired)

await request()

Expand All @@ -624,11 +600,8 @@ async def xtest_proxy_https_acquired_cleanup_force(
assert 0 == len(conn._acquired)

async def request():
resp = await sess.get(url, proxy=proxy.url)

assert 1 == len(conn._acquired)

await resp.release()
async with sess.get(url, proxy=proxy.url):
assert 1 == len(conn._acquired)

await request()

Expand All @@ -652,13 +625,11 @@ async def request(pid):
# process requests only one by one
nonlocal current_pid

resp = await sess.get(url, proxy=proxy.url)

current_pid = pid
await asyncio.sleep(0.2)
assert current_pid == pid
async with sess.get(url, proxy=proxy.url) as resp:
current_pid = pid
await asyncio.sleep(0.2)
assert current_pid == pid

await resp.release()
return resp

requests = [request(pid) for pid in range(multi_conn_num)]
Expand Down Expand Up @@ -872,8 +843,9 @@ async def test_proxy_auth() -> None:
with pytest.raises(
ValueError, match=r"proxy_auth must be None or BasicAuth\(\) tuple"
):
await session.get(
async with session.get(
"http://python.org",
proxy="http://proxy.example.com",
proxy_auth=("user", "pass"),
)
):
pass

0 comments on commit 0ba6cf2

Please sign in to comment.