High severity7.5NVD Advisory· Published Apr 1, 2026· Updated Apr 6, 2026
CVE-2026-22815
CVE-2026-22815
Description
AIOHTTP is an asynchronous HTTP client/server framework for asyncio and Python. Prior to version 3.13.4, insufficient restrictions in header/trailer handling could cause uncapped memory usage. This issue has been patched in version 3.13.4.
Affected packages
Versions sourced from the GitHub Security Advisory.
| Package | Affected versions | Patched versions |
|---|---|---|
aiohttpPyPI | < 3.13.4 | 3.13.4 |
Affected products
1Patches
10c2e9da51126Add max_headers parameter (#11955) (#11959) (#11960)
12 files changed · +277 −130
aiohttp/client_proto.py+2 −0 modified@@ -230,6 +230,7 @@ def set_response_params( timeout_ceil_threshold: float = 5, max_line_size: int = 8190, max_field_size: int = 8190, + max_headers: int = 128, ) -> None: self._skip_payload = skip_payload @@ -248,6 +249,7 @@ def set_response_params( auto_decompress=auto_decompress, max_line_size=max_line_size, max_field_size=max_field_size, + max_headers=max_headers, ) if self._tail:
aiohttp/client.py+9 −0 modified@@ -195,6 +195,7 @@ class _RequestOptions(TypedDict, total=False): auto_decompress: Union[bool, None] max_line_size: Union[int, None] max_field_size: Union[int, None] + max_headers: Union[int, None] middlewares: Optional[Sequence[ClientMiddlewareType]] @@ -259,6 +260,7 @@ class ClientSession: "_read_bufsize", "_max_line_size", "_max_field_size", + "_max_headers", "_resolve_charset", "_default_proxy", "_default_proxy_auth", @@ -303,6 +305,7 @@ def __init__( read_bufsize: int = 2**16, max_line_size: int = 8190, max_field_size: int = 8190, + max_headers: int = 128, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", middlewares: Sequence[ClientMiddlewareType] = (), ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, @@ -402,6 +405,7 @@ def __init__( self._read_bufsize = read_bufsize self._max_line_size = max_line_size self._max_field_size = max_field_size + self._max_headers = max_headers # Convert to list of tuples if headers: @@ -518,6 +522,7 @@ async def _request( auto_decompress: Optional[bool] = None, max_line_size: Optional[int] = None, max_field_size: Optional[int] = None, + max_headers: Optional[int] = None, middlewares: Optional[Sequence[ClientMiddlewareType]] = None, ) -> ClientResponse: @@ -607,6 +612,9 @@ async def _request( if max_field_size is None: max_field_size = self._max_field_size + if max_headers is None: + max_headers = self._max_headers + traces = [ Trace( self, @@ -750,6 +758,7 @@ async def _connect_and_send_request( timeout_ceil_threshold=self._connector._timeout_ceil_threshold, max_line_size=max_line_size, max_field_size=max_field_size, + max_headers=max_headers, ) try: resp = await req.send(conn)
aiohttp/http_exceptions.py+5 −4 modified@@ -80,11 +80,12 @@ class DecompressSizeError(PayloadEncodingError): class LineTooLong(BadHttpMessage): def __init__( - self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" + self, + line: Union[str, bytes], + limit: Union[str, int] = "Unknown", + actual_size: str = "Unknown", ) -> None: - super().__init__( - f"Got more than {limit} bytes ({actual_size}) when reading {line}." - ) + super().__init__(f"Got more than {limit} bytes when reading: {line!r}.") self.args = (line, limit, actual_size)
aiohttp/http_parser.py+49 −35 modified@@ -169,20 +169,10 @@ def parse_headers( raise InvalidHeader(line) bvalue = bvalue.lstrip(b" \t") - if len(bname) > self.max_field_size: - raise LineTooLong( - "request header name {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(len(bname)), - ) name = bname.decode("utf-8", "surrogateescape") if not TOKENRE.fullmatch(name): raise InvalidHeader(bname) - header_length = len(bvalue) - # next line lines_idx += 1 line = lines[lines_idx] @@ -192,16 +182,14 @@ def parse_headers( # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding if continuation: + header_length = len(bvalue) bvalue_lst = [bvalue] while continuation: header_length += len(line) if header_length > self.max_field_size: + header_line = bname + b": " + b"".join(bvalue_lst) raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), + header_line[:100] + b"...", self.max_field_size ) bvalue_lst.append(line) @@ -215,15 +203,6 @@ def parse_headers( line = b"" break bvalue = b"".join(bvalue_lst) - else: - if header_length > self.max_field_size: - raise LineTooLong( - "request header field {}".format( - bname.decode("utf8", "backslashreplace") - ), - str(self.max_field_size), - str(header_length), - ) bvalue = bvalue.strip(b" \t") value = bvalue.decode("utf-8", "surrogateescape") @@ -254,7 +233,7 @@ def __init__( loop: Optional[asyncio.AbstractEventLoop] = None, limit: int = 2**16, max_line_size: int = 8190, - max_headers: int = 32768, + max_headers: int = 128, max_field_size: int = 8190, timer: Optional[BaseTimerContext] = None, code: Optional[int] = None, @@ -269,6 +248,7 @@ def __init__( self.max_line_size = max_line_size self.max_headers = max_headers self.max_field_size = max_field_size + self.max_headers = max_headers self.timer = timer self.code = code self.method = method @@ -327,6 +307,7 @@ def feed_data( data_len = len(data) start_pos = 0 loop = self.loop + max_line_length = self.max_line_size should_close = False while start_pos < data_len: @@ -348,11 +329,21 @@ def feed_data( line = data[start_pos:pos] if SEP == b"\n": # For lax response parsing line = line.rstrip(b"\r") + if len(line) > max_line_length: + raise LineTooLong(line[:100] + b"...", max_line_length) + self._lines.append(line) + # After processing the status/request line, everything is a header. + max_line_length = self.max_field_size + + if len(self._lines) > self.max_headers: + raise BadHttpMessage("Too many headers received") + start_pos = pos + len(SEP) # \r\n\r\n found if self._lines[-1] == EMPTY: + max_trailers = self.max_headers - len(self._lines) try: msg: _MsgT = self.parse_message(self._lines) finally: @@ -411,6 +402,9 @@ def get_content_length() -> Optional[int]: auto_decompress=self._auto_decompress, lax=self.lax, headers_parser=self._headers_parser, + max_line_size=self.max_line_size, + max_field_size=self.max_field_size, + max_trailers=max_trailers, ) if not payload_parser.done: self._payload_parser = payload_parser @@ -430,6 +424,9 @@ def get_content_length() -> Optional[int]: auto_decompress=self._auto_decompress, lax=self.lax, headers_parser=self._headers_parser, + max_line_size=self.max_line_size, + max_field_size=self.max_field_size, + max_trailers=max_trailers, ) elif not empty_body and length is None and self.read_until_eof: payload = StreamReader( @@ -449,6 +446,9 @@ def get_content_length() -> Optional[int]: auto_decompress=self._auto_decompress, lax=self.lax, headers_parser=self._headers_parser, + max_line_size=self.max_line_size, + max_field_size=self.max_field_size, + max_trailers=max_trailers, ) if not payload_parser.done: self._payload_parser = payload_parser @@ -459,6 +459,8 @@ def get_content_length() -> Optional[int]: should_close = msg.should_close else: self._tail = data[start_pos:] + if len(self._tail) > self.max_line_size: + raise LineTooLong(self._tail[:100] + b"...", self.max_line_size) data = EMPTY break @@ -594,11 +596,6 @@ def parse_message(self, lines: List[bytes]) -> RawRequestMessage: except ValueError: raise BadHttpMethod(line) from None - if len(path) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(path)) - ) - # method if not TOKENRE.fullmatch(method): raise BadHttpMethod(method) @@ -714,11 +711,6 @@ def parse_message(self, lines: List[bytes]) -> RawResponseMessage: status = status.strip() reason = "" - if len(reason) > self.max_line_size: - raise LineTooLong( - "Status line is too long", str(self.max_line_size), str(len(reason)) - ) - # version match = VERSRE.fullmatch(version) if match is None: @@ -783,6 +775,9 @@ def __init__( lax: bool = False, *, headers_parser: HeadersParser, + max_line_size: int = 8190, + max_field_size: int = 8190, + max_trailers: int = 128, ) -> None: self._length = 0 self._type = ParseState.PARSE_UNTIL_EOF @@ -792,6 +787,9 @@ def __init__( self._auto_decompress = auto_decompress self._lax = lax self._headers_parser = headers_parser + self._max_line_size = max_line_size + self._max_field_size = max_field_size + self._max_trailers = max_trailers self._trailer_lines: list[bytes] = [] self.done = False @@ -855,6 +853,15 @@ def feed_data( # Chunked transfer encoding parser elif self._type == ParseState.PARSE_CHUNKED: if self._chunk_tail: + # We should never have a tail if we're inside the payload body. + assert self._chunk != ChunkState.PARSE_CHUNKED_CHUNK + # We should check the length is sane. + max_line_length = self._max_line_size + if self._chunk == ChunkState.PARSE_TRAILERS: + max_line_length = self._max_field_size + if len(self._chunk_tail) > max_line_length: + raise LineTooLong(self._chunk_tail[:100] + b"...", max_line_length) + chunk = self._chunk_tail + chunk self._chunk_tail = b"" @@ -938,8 +945,15 @@ def feed_data( chunk = chunk[pos + len(SEP) :] if SEP == b"\n": # For lax response parsing line = line.rstrip(b"\r") + + if len(line) > self._max_field_size: + raise LineTooLong(line[:100] + b"...", self._max_field_size) + self._trailer_lines.append(line) + if len(self._trailer_lines) > self._max_trailers: + raise BadHttpMessage("Too many trailers received") + # \r\n\r\n found, end of stream if self._trailer_lines[-1] == b"": # Headers and trailers are defined the same way,
aiohttp/_http_parser.pyx+18 −13 modified@@ -279,6 +279,7 @@ cdef class HttpParser: object _name bytes _raw_value bint _has_value + int _header_name_size object _protocol object _loop @@ -329,7 +330,7 @@ cdef class HttpParser: self, cparser.llhttp_type mode, object protocol, object loop, int limit, object timer=None, - size_t max_line_size=8190, size_t max_headers=32768, + size_t max_line_size=8190, size_t max_headers=128, size_t max_field_size=8190, payload_exception=None, bint response_with_body=True, bint read_until_eof=False, bint auto_decompress=True, @@ -352,6 +353,7 @@ cdef class HttpParser: self._raw_name = EMPTY_BYTES self._raw_value = EMPTY_BYTES self._has_value = False + self._header_name_size = 0 self._max_line_size = max_line_size self._max_headers = max_headers @@ -383,11 +385,14 @@ cdef class HttpParser: value = self._raw_value.decode('utf-8', 'surrogateescape') self._headers.append((name, value)) + if len(self._headers) > self._max_headers: + raise BadHttpMessage("Too many headers received") if name is CONTENT_ENCODING: self._content_encoding = value self._has_value = False + self._header_name_size = 0 self._raw_headers.append((self._raw_name, self._raw_value)) self._raw_name = EMPTY_BYTES self._raw_value = EMPTY_BYTES @@ -574,7 +579,7 @@ cdef class HttpRequestParser(HttpParser): def __init__( self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, + size_t max_line_size=8190, size_t max_headers=128, size_t max_field_size=8190, payload_exception=None, bint response_with_body=True, bint read_until_eof=False, bint auto_decompress=True, @@ -638,7 +643,7 @@ cdef class HttpResponseParser(HttpParser): def __init__( self, protocol, loop, int limit, timer=None, - size_t max_line_size=8190, size_t max_headers=32768, + size_t max_line_size=8190, size_t max_headers=128, size_t max_field_size=8190, payload_exception=None, bint response_with_body=True, bint read_until_eof=False, bint auto_decompress=True @@ -677,8 +682,8 @@ cdef int cb_on_url(cparser.llhttp_t* parser, cdef HttpParser pyparser = <HttpParser>parser.data try: if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) + status = pyparser._buf + at[:length] + raise LineTooLong(status[:100] + b"...", pyparser._max_line_size) extend(pyparser._buf, at, length) except BaseException as ex: pyparser._last_error = ex @@ -690,11 +695,10 @@ cdef int cb_on_url(cparser.llhttp_t* parser, cdef int cb_on_status(cparser.llhttp_t* parser, const char *at, size_t length) except -1: cdef HttpParser pyparser = <HttpParser>parser.data - cdef str reason try: if length > pyparser._max_line_size: - raise LineTooLong( - 'Status line is too long', pyparser._max_line_size, length) + reason = pyparser._buf + at[:length] + raise LineTooLong(reason[:100] + b"...", pyparser._max_line_size) extend(pyparser._buf, at, length) except BaseException as ex: pyparser._last_error = ex @@ -711,8 +715,9 @@ cdef int cb_on_header_field(cparser.llhttp_t* parser, pyparser._on_status_complete() size = len(pyparser._raw_name) + length if size > pyparser._max_field_size: - raise LineTooLong( - 'Header name is too long', pyparser._max_field_size, size) + name = pyparser._raw_name + at[:length] + raise LineTooLong(name[:100] + b"...", pyparser._max_field_size) + pyparser._header_name_size = size pyparser._on_header_field(at, length) except BaseException as ex: pyparser._last_error = ex @@ -727,9 +732,9 @@ cdef int cb_on_header_value(cparser.llhttp_t* parser, cdef Py_ssize_t size try: size = len(pyparser._raw_value) + length - if size > pyparser._max_field_size: - raise LineTooLong( - 'Header value is too long', pyparser._max_field_size, size) + if pyparser._header_name_size + size > pyparser._max_field_size: + value = pyparser._raw_value + at[:length] + raise LineTooLong(value[:100] + b"...", pyparser._max_field_size) pyparser._on_header_value(at, length) except BaseException as ex: pyparser._last_error = ex
aiohttp/web_protocol.py+1 −1 modified@@ -188,7 +188,7 @@ def __init__( access_log_format: str = AccessLogger.LOG_FORMAT, debug: bool = False, max_line_size: int = 8190, - max_headers: int = 32768, + max_headers: int = 128, max_field_size: int = 8190, lingering_time: float = 10.0, read_bufsize: int = 2**16,
CHANGES/11955.feature.rst+1 −0 added@@ -0,0 +1 @@ +Added ``max_headers`` parameter to limit the number of headers that should be read from a response -- by :user:`Dreamsorcerer`.
docs/client_reference.rst+13 −4 modified@@ -57,6 +57,7 @@ The client session supports the context manager protocol for self closing. read_bufsize=2**16, \ max_line_size=8190, \ max_field_size=8190, \ + max_headers=128, \ fallback_charset_resolver=lambda r, b: "utf-8", \ ssl_shutdown_timeout=0) @@ -245,7 +246,9 @@ The client session supports the context manager protocol for self closing. :param int max_line_size: Maximum allowed size of lines in responses. - :param int max_field_size: Maximum allowed size of header fields in responses. + :param int max_field_size: Maximum allowed size of header name and value combined in responses. + + :param int max_headers: Maximum number of headers and trailers combined in responses. :param Callable[[ClientResponse,bytes],str] fallback_charset_resolver: A :term:`callable` that accepts a :class:`ClientResponse` and the @@ -425,7 +428,8 @@ The client session supports the context manager protocol for self closing. read_bufsize=None, \ auto_decompress=None, \ max_line_size=None, \ - max_field_size=None) + max_field_size=None, \ + max_headers=None) :async: :noindexentry: @@ -589,7 +593,9 @@ The client session supports the context manager protocol for self closing. :param int max_line_size: Maximum allowed size of lines in responses. - :param int max_field_size: Maximum allowed size of header fields in responses. + :param int max_field_size: Maximum allowed size of header name and value combined in responses. + + :param int max_headers: Maximum number of headers and trailers combined in responses. :return ClientResponse: a :class:`client response <ClientResponse>` object. @@ -909,6 +915,7 @@ certification chaining. auto_decompress=None, \ max_line_size=None, \ max_field_size=None, \ + max_headers=None, \ version=aiohttp.HttpVersion11, \ connector=None) :async: @@ -1046,7 +1053,9 @@ certification chaining. :param int max_line_size: Maximum allowed size of lines in responses. - :param int max_field_size: Maximum allowed size of header fields in responses. + :param int max_field_size: Maximum allowed size of header name and value combined in responses. + + :param int max_headers: Maximum number of headers and trailers combined in responses. :param aiohttp.protocol.HttpVersion version: Request HTTP version, ``HTTP 1.1`` by default. (optional)
docs/web_reference.rst+3 −2 modified@@ -2837,9 +2837,10 @@ application on specific TCP or Unix socket, e.g.:: :attr:`helpers.AccessLogger.LOG_FORMAT`. :param int max_line_size: Optional maximum header line size. Default: ``8190``. - :param int max_headers: Optional maximum header size. Default: ``32768``. - :param int max_field_size: Optional maximum header field size. Default: + :param int max_field_size: Optional maximum header combined name and value size. Default: ``8190``. + :param int max_headers: Optional maximum number of headers and trailers combined. Default: + ``128``. :param float lingering_time: Maximum time during which the server reads and ignores additional data coming from the client when
tests/test_client_functional.py+85 −32 modified@@ -4417,17 +4417,17 @@ async def handler(request): assert resp.headers["Content-Type"] == "text/plain; charset=utf-8" -async def test_max_field_size_session_default(aiohttp_client) -> None: - async def handler(request): - return web.Response(headers={"Custom": "x" * 8190}) +async def test_max_field_size_session_default(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={"Custom": "x" * 8182}) app = web.Application() app.add_routes([web.get("/", handler)]) client = await aiohttp_client(app) - async with await client.get("/") as resp: - assert resp.headers["Custom"] == "x" * 8190 + async with client.get("/") as resp: + assert resp.headers["Custom"] == "x" * 8182 async def test_max_field_size_session_default_fail(aiohttp_client) -> None: @@ -4442,43 +4442,96 @@ async def handler(request): await client.get("/") -async def test_max_field_size_session_explicit(aiohttp_client) -> None: - async def handler(request): - return web.Response(headers={"Custom": "x" * 8191}) +async def test_max_field_size_session_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={"Custom": "x" * 8192}) app = web.Application() app.add_routes([web.get("/", handler)]) - client = await aiohttp_client(app, max_field_size=8191) + client = await aiohttp_client(app, max_field_size=8200) - async with await client.get("/") as resp: - assert resp.headers["Custom"] == "x" * 8191 + async with client.get("/") as resp: + assert resp.headers["Custom"] == "x" * 8192 -async def test_max_field_size_request_explicit(aiohttp_client) -> None: - async def handler(request): - return web.Response(headers={"Custom": "x" * 8191}) +async def test_max_headers_session_default(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={f"Custom-{i}": "x" for i in range(120)}) app = web.Application() app.add_routes([web.get("/", handler)]) client = await aiohttp_client(app) - async with await client.get("/", max_field_size=8191) as resp: - assert resp.headers["Custom"] == "x" * 8191 + async with client.get("/") as resp: + assert resp.headers["Custom-119"] == "x" -async def test_max_line_size_session_default(aiohttp_client) -> None: - async def handler(request): - return web.Response(status=200, reason="x" * 8190) +async def test_max_headers_session_default_fail( + aiohttp_client: AiohttpClient, +) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={f"Custom-{i}": "x" for i in range(129)}) app = web.Application() app.add_routes([web.get("/", handler)]) client = await aiohttp_client(app) + with pytest.raises(aiohttp.ClientResponseError): + await client.get("/") - async with await client.get("/") as resp: - assert resp.reason == "x" * 8190 + +async def test_max_headers_session_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={f"Custom-{i}": "x" for i in range(130)}) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app, max_headers=140) + + async with client.get("/") as resp: + assert resp.headers["Custom-129"] == "x" + + +async def test_max_headers_request_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={f"Custom-{i}": "x" for i in range(130)}) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/", max_headers=140) as resp: + assert resp.headers["Custom-129"] == "x" + + +async def test_max_field_size_request_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(headers={"Custom": "x" * 8192}) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/", max_field_size=8200) as resp: + assert resp.headers["Custom"] == "x" * 8192 + + +async def test_max_line_size_session_default(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(status=200, reason="x" * 8177) + + app = web.Application() + app.add_routes([web.get("/", handler)]) + + client = await aiohttp_client(app) + + async with client.get("/") as resp: + assert resp.reason == "x" * 8177 async def test_max_line_size_session_default_fail(aiohttp_client) -> None: @@ -4493,30 +4546,30 @@ async def handler(request): await client.get("/") -async def test_max_line_size_session_explicit(aiohttp_client) -> None: - async def handler(request): - return web.Response(status=200, reason="x" * 8191) +async def test_max_line_size_session_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(status=200, reason="x" * 8197) app = web.Application() app.add_routes([web.get("/", handler)]) - client = await aiohttp_client(app, max_line_size=8191) + client = await aiohttp_client(app, max_line_size=8210) - async with await client.get("/") as resp: - assert resp.reason == "x" * 8191 + async with client.get("/") as resp: + assert resp.reason == "x" * 8197 -async def test_max_line_size_request_explicit(aiohttp_client) -> None: - async def handler(request): - return web.Response(status=200, reason="x" * 8191) +async def test_max_line_size_request_explicit(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: + return web.Response(status=200, reason="x" * 8197) app = web.Application() app.add_routes([web.get("/", handler)]) client = await aiohttp_client(app) - async with await client.get("/", max_line_size=8191) as resp: - assert resp.reason == "x" * 8191 + async with client.get("/", max_line_size=8210) as resp: + assert resp.reason == "x" * 8197 async def test_rejected_upload(
tests/test_http_exceptions.py+9 −9 modified@@ -69,32 +69,32 @@ def test_repr(self) -> None: class TestLineTooLong: def test_ctor(self) -> None: - err = http_exceptions.LineTooLong("spam", "10", "12") + err = http_exceptions.LineTooLong(b"spam", 10) assert err.code == 400 - assert err.message == "Got more than 10 bytes (12) when reading spam." + assert err.message == "Got more than 10 bytes when reading: b'spam'." assert err.headers is None def test_pickle(self) -> None: - err = http_exceptions.LineTooLong(line="spam", limit="10", actual_size="12") + err = http_exceptions.LineTooLong(line=b"spam", limit=10, actual_size="12") err.foo = "bar" for proto in range(pickle.HIGHEST_PROTOCOL + 1): pickled = pickle.dumps(err, proto) err2 = pickle.loads(pickled) assert err2.code == 400 - assert err2.message == ("Got more than 10 bytes (12) when reading spam.") + assert err2.message == ("Got more than 10 bytes when reading: b'spam'.") assert err2.headers is None assert err2.foo == "bar" def test_str(self) -> None: - err = http_exceptions.LineTooLong(line="spam", limit="10", actual_size="12") - expected = "400, message:\n Got more than 10 bytes (12) when reading spam." + err = http_exceptions.LineTooLong(line=b"spam", limit=10) + expected = "400, message:\n Got more than 10 bytes when reading: b'spam'." assert str(err) == expected def test_repr(self) -> None: - err = http_exceptions.LineTooLong(line="spam", limit="10", actual_size="12") + err = http_exceptions.LineTooLong(line=b"spam", limit=10) assert repr(err) == ( - "<LineTooLong: 400, message='Got more than " - "10 bytes (12) when reading spam.'>" + '<LineTooLong: 400, message="Got more than ' + "10 bytes when reading: b'spam'.\">" )
tests/test_http_parser.py+82 −30 modified@@ -23,6 +23,7 @@ HttpPayloadParser, HttpRequestParser, HttpRequestParserPy, + HttpResponseParser, HttpResponseParserPy, HttpVersion, ) @@ -75,7 +76,7 @@ def parser(loop: Any, protocol: Any, request: Any): loop, 2**16, max_line_size=8190, - max_headers=32768, + max_headers=128, max_field_size=8190, ) @@ -94,7 +95,7 @@ def response(loop: Any, protocol: Any, request: Any): loop, 2**16, max_line_size=8190, - max_headers=32768, + max_headers=128, max_field_size=8190, read_until_eof=True, ) @@ -270,15 +271,6 @@ def test_whitespace_before_header(parser: Any) -> None: parser.feed_data(text) -def test_parse_headers_longline(parser: Any) -> None: - invalid_unicode_byte = b"\xd9" - header_name = b"Test" + invalid_unicode_byte + b"Header" + b"A" * 8192 - text = b"GET /test HTTP/1.1\r\n" + header_name + b": test\r\n" + b"\r\n" + b"\r\n" - with pytest.raises((http_exceptions.LineTooLong, http_exceptions.BadHttpMessage)): - # FIXME: `LineTooLong` doesn't seem to actually be happening - parser.feed_data(text) - - @pytest.fixture def xfail_c_parser_status(request) -> None: if isinstance(request.getfixturevalue("parser"), HttpRequestParserPy): @@ -710,13 +702,14 @@ def test_max_header_field_size(parser, size) -> None: name = b"t" * size text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n" - match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" + match = "400, message:\n Got more than 8190 bytes when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - parser.feed_data(text) + for i in range(0, len(text), 5000): # pragma: no branch + parser.feed_data(text[i : i + 5000]) -def test_max_header_field_size_under_limit(parser) -> None: - name = b"t" * 8190 +def test_max_header_size_under_limit(parser: HttpRequestParser) -> None: + name = b"t" * 8185 text = b"GET /test HTTP/1.1\r\n" + name + b":data\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) @@ -738,13 +731,67 @@ def test_max_header_value_size(parser, size) -> None: name = b"t" * size text = b"GET /test HTTP/1.1\r\ndata:" + name + b"\r\n\r\n" - match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" + match = "400, message:\n Got more than 8190 bytes when reading" + with pytest.raises(http_exceptions.LineTooLong, match=match): + for i in range(0, len(text), 4000): # pragma: no branch + parser.feed_data(text[i : i + 4000]) + + +def test_max_header_combined_size(parser: HttpRequestParser) -> None: + k = b"t" * 4100 + text = b"GET /test HTTP/1.1\r\n" + k + b":" + k + b"\r\n\r\n" + + match = "400, message:\n Got more than 8190 bytes when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): parser.feed_data(text) -def test_max_header_value_size_under_limit(parser) -> None: - value = b"A" * 8190 +@pytest.mark.parametrize("size", [40960, 8191]) +async def test_max_trailer_size(parser: HttpRequestParser, size: int) -> None: + value = b"t" * size + text = ( + b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked\r\n\r\n" + + hex(4000)[2:].encode() + + b"\r\n" + + b"b" * 4000 + + b"\r\n0\r\ntest: " + + value + + b"\r\n\r\n" + ) + + match = "400, message:\n Got more than 8190 bytes when reading" + with pytest.raises(http_exceptions.LineTooLong, match=match): + payload = None + for i in range(0, len(text), 3000): # pragma: no branch + messages, upgrade, tail = parser.feed_data(text[i : i + 3000]) + if messages: + payload = messages[0][-1] + # Trailers are not seen until payload is read. + assert payload is not None + await payload.read() + + +@pytest.mark.parametrize("headers,trailers", ((129, 0), (0, 129), (64, 65))) +async def test_max_headers( + parser: HttpRequestParser, headers: int, trailers: int +) -> None: + text = ( + b"GET /test HTTP/1.1\r\nTransfer-Encoding: chunked" + + b"".join(b"\r\nHeader-%d: Value" % i for i in range(headers)) + + b"\r\n\r\n4\r\ntest\r\n0" + + b"".join(b"\r\nTrailer-%d: Value" % i for i in range(trailers)) + + b"\r\n\r\n" + ) + + match = "Too many (headers|trailers) received" + with pytest.raises(http_exceptions.BadHttpMessage, match=match): + messages, upgrade, tail = parser.feed_data(text) + # Trailers are not seen until payload is read. + await messages[0][-1].read() + + +def test_max_header_value_size_under_limit(parser: HttpRequestParser) -> None: + value = b"A" * 8185 text = b"GET /test HTTP/1.1\r\ndata:" + value + b"\r\n\r\n" messages, upgrade, tail = parser.feed_data(text) @@ -766,13 +813,16 @@ def test_max_header_value_size_continuation(response, size) -> None: name = b"T" * (size - 5) text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + name + b"\r\n\r\n" - match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" + match = "400, message:\n Got more than 8190 bytes when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): - response.feed_data(text) + for i in range(0, len(text), 9000): # pragma: no branch + response.feed_data(text[i : i + 9000]) -def test_max_header_value_size_continuation_under_limit(response) -> None: - value = b"A" * 8185 +def test_max_header_value_size_continuation_under_limit( + response: HttpResponseParser, +) -> None: + value = b"A" * 8179 text = b"HTTP/1.1 200 Ok\r\ndata: test\r\n " + value + b"\r\n\r\n" messages, upgrade, tail = response.feed_data(text) @@ -1011,13 +1061,13 @@ def test_http_request_parser_bad_nonascii_uri(parser: Any) -> None: @pytest.mark.parametrize("size", [40965, 8191]) def test_http_request_max_status_line(parser, size) -> None: path = b"t" * (size - 5) - match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" + match = "400, message:\n Got more than 8190 bytes when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): parser.feed_data(b"GET /path" + path + b" HTTP/1.1\r\n\r\n") -def test_http_request_max_status_line_under_limit(parser) -> None: - path = b"t" * (8190 - 5) +def test_http_request_max_status_line_under_limit(parser: HttpRequestParser) -> None: + path = b"t" * 8172 messages, upgraded, tail = parser.feed_data( b"GET /path" + path + b" HTTP/1.1\r\n\r\n" ) @@ -1094,13 +1144,15 @@ def test_http_response_parser_strict_obs_line_folding(response: Any) -> None: @pytest.mark.parametrize("size", [40962, 8191]) def test_http_response_parser_bad_status_line_too_long(response, size) -> None: reason = b"t" * (size - 2) - match = f"400, message:\n Got more than 8190 bytes \\({size}\\) when reading" + match = "400, message:\n Got more than 8190 bytes when reading" with pytest.raises(http_exceptions.LineTooLong, match=match): response.feed_data(b"HTTP/1.1 200 Ok" + reason + b"\r\n\r\n") -def test_http_response_parser_status_line_under_limit(response) -> None: - reason = b"O" * 8190 +def test_http_response_parser_status_line_under_limit( + response: HttpResponseParser, +) -> None: + reason = b"O" * 8177 messages, upgraded, tail = response.feed_data( b"HTTP/1.1 200 " + reason + b"\r\n\r\n" ) @@ -1610,7 +1662,7 @@ def test_parse_bad_method_for_c_parser_raises(loop, protocol): loop, 2**16, max_line_size=8190, - max_headers=32768, + max_headers=128, max_field_size=8190, ) @@ -1943,7 +1995,7 @@ async def test_streaming_decompress_large_payload( dbuf = DeflateBuffer(buf, "deflate") # Feed compressed data in chunks (simulating network streaming) - for i in range(0, len(compressed), chunk_size): + for i in range(0, len(compressed), chunk_size): # pragma: no branch chunk = compressed[i : i + chunk_size] dbuf.feed_data(chunk, len(chunk))
Vulnerability mechanics
Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.
References
5- github.com/aio-libs/aiohttp/commit/0c2e9da51126238a421568eb7c5b53e5b5d17b36nvdPatchWEB
- github.com/aio-libs/aiohttp/security/advisories/GHSA-w2fm-2cpv-w7v5nvdPatchVendor AdvisoryWEB
- github.com/advisories/GHSA-w2fm-2cpv-w7v5ghsaADVISORY
- nvd.nist.gov/vuln/detail/CVE-2026-22815ghsaADVISORY
- github.com/aio-libs/aiohttp/releases/tag/v3.13.4nvdRelease NotesWEB
News mentions
0No linked articles in our index yet.