CVE-2026-32873
Description
ewe is a Gleam web server. Versions 0.8.0 through 3.0.4 contain a bug in the handle_trailers function where rejected trailer headers (forbidden or undeclared) cause an infinite loop. When handle_trailers encounters such a trailer, three code paths (lines 520, 523, 526) recurse with the original buffer (rest) instead of advancing past the rejected header (Buffer(header_rest, 0)), causing decoder.decode_packet to re-parse the same header on every iteration. The resulting loop has no timeout or escape — the BEAM process permanently wedges at 100% CPU. Any application that calls ewe.read_body on chunked requests is affected, and this is exploitable by any unauthenticated remote client before control returns to application code, making an application-level workaround impossible. This issue is fixed in version 3.0.5.
Affected packages
Versions sourced from the GitHub Security Advisory.
| Package | Affected versions | Patched versions |
|---|---|---|
eweHex | >= 0.8.0, < 3.0.5 | 3.0.5 |
Affected products
1Patches
21 file changed · +32 −32
src/ewe/internal/http1.gleam+32 −32 modified@@ -33,7 +33,7 @@ import websocks // ----------------------------------------------------------------------------- /// Connection to a client. -/// +/// pub type Connection { Connection( transport: Transport, @@ -46,7 +46,7 @@ pub type Connection { } /// Transforms a glisten connection. -/// +/// pub fn transform_connection( conn: glisten.Connection(a), factory_name: process.Name(_), @@ -60,7 +60,7 @@ pub fn transform_connection( } /// Reads data from the socket with timeout and size limits. -/// +/// fn read_from_socket( transport transport: Transport, socket socket: Socket, @@ -86,7 +86,7 @@ fn read_from_socket( // ----------------------------------------------------------------------------- /// Errors that can occur when parsing a request. -/// +/// pub type ParseError { // request line InvalidMethod @@ -106,14 +106,14 @@ pub type ParseError { } /// HTTP version enumeration. -/// +/// pub type HttpVersion { Http10 Http11 } /// Result of parsing a request. -/// +/// pub type ParsedRequest { Http1Request(req: Request(Connection), version: HttpVersion) Http2Upgrade(upgrade: Http2Upgrade) @@ -127,7 +127,7 @@ pub type Http2Upgrade { } /// Parses an HTTP request from the given buffer. -/// +/// pub fn parse_request( conn: Connection, buffer: Buffer, @@ -234,7 +234,7 @@ pub fn parse_request( } /// Parses HTTP headers from the buffer. -/// +/// fn parse_headers( transport transport: Transport, socket socket: Socket, @@ -304,7 +304,7 @@ fn parse_headers( fn validate_field_value(value: BitArray) -> Result(String, Nil) /// Inserts a header into the headers dictionary. -/// +/// fn insert_header( headers: Dict(String, String), field: String, @@ -323,7 +323,7 @@ fn insert_header( } /// Finds an available key for set-cookie headers. -/// +/// fn available_cookie_key(headers: Dict(String, String), idx: Int) -> String { let key = case idx { 0 -> "set-cookie" @@ -340,11 +340,11 @@ fn available_cookie_key(headers: Dict(String, String), idx: Int) -> String { // ----------------------------------------------------------------------------- /// 2MB (2 million bytes). -/// +/// const max_reading_size = 2_000_000 /// Reads the request body from the socket. -/// +/// pub fn read_body( req: Request(Connection), size_limit: Int, @@ -413,7 +413,7 @@ pub fn read_body( } /// Reads a chunked transfer-encoded body. -/// +/// fn read_chunked_body( transport transport: Transport, socket socket: Socket, @@ -457,7 +457,7 @@ fn read_chunked_body( } /// Parses a single chunk from the chunked body. -/// +/// fn parse_body_chunk(buffer: Buffer) -> Result(BodyChunk, ParseError) { case split(buffer.data, <<"\r\n">>, []) { [<<"0">>, rest] -> Ok(FinalChunk(Buffer(rest, 0))) @@ -517,13 +517,13 @@ fn handle_trailers( request.set_header(req, field_name, value) |> handle_trailers(set, Buffer(header_rest, 0)) } - Error(Nil) -> handle_trailers(req, set, rest) + Error(Nil) -> handle_trailers(req, set, Buffer(header_rest, 0)) } } - False -> handle_trailers(req, set, rest) + False -> handle_trailers(req, set, Buffer(header_rest, 0)) } } - Error(Nil) -> handle_trailers(req, set, rest) + Error(Nil) -> handle_trailers(req, set, Buffer(header_rest, 0)) } } _ -> req @@ -550,28 +550,28 @@ fn is_forbidden_trailer(field: String) -> Bool { // ----------------------------------------------------------------------------- /// Possible results of consuming some amount of data from the request body. -/// +/// pub type Stream { Consumed(data: BitArray, next: fn(Int) -> Result(Stream, ParseError)) Done } /// Chunked body parsing result. -/// +/// type BodyChunk { Incomplete Chunk(BitArray, size: Int, rest: Buffer) FinalChunk(rest: Buffer) } /// State of the chunked body parsing. -/// +/// type ChunkedStreamState { ChunkedStreamState(data: Buffer, chunk: Buffer, done: Bool) } /// Streams the request body from the socket. -/// +/// pub fn stream_body(req: Request(Connection)) { use _ <- result.try( handle_continue(req) @@ -598,7 +598,7 @@ pub fn stream_body(req: Request(Connection)) { } } -/// Creates a consumer function that reads `N` amount of bytes from the chunked +/// Creates a consumer function that reads `N` amount of bytes from the chunked /// request body until it is fully consumed. fn do_stream_body_chunked( req: Request(Connection), @@ -691,7 +691,7 @@ fn read_from_socket_until( /// Creates a consumer function that reads `N` amount of bytes from the request /// body until it is fully consumed. -/// +/// fn do_stream_body( req: Request(Connection), buffer: Buffer, @@ -703,22 +703,22 @@ fn do_stream_body( // Request body is fully consumed 0, 0 -> Ok(Done) - // Request body is supposed to be fully consumed but there is more data + // Request body is supposed to be fully consumed but there is more data // in buffer 0, _ -> { let #(data, rest) = buffer.split(buffer, size) Ok(Consumed(data, do_stream_body(req, Buffer(rest, 0)))) } - // Request body is not fully consumed and there is enough data in buffer + // Request body is not fully consumed and there is enough data in buffer // to consume `size` bytes _, buffer_size if buffer_size >= size -> { let #(data, rest) = buffer.split(buffer, size) let new_buffer = Buffer(rest, buffer.pending) Ok(Consumed(data, do_stream_body(req, new_buffer))) } - // Request body is not fully consumed and there is not enough data in + // Request body is not fully consumed and there is not enough data in // buffer to consume `size` bytes _, _ -> { use read_buffer <- try(read_from_socket( @@ -745,7 +745,7 @@ fn do_stream_body( // ----------------------------------------------------------------------------- /// Errors that can occur when upgrading a WebSocket connection. -/// +/// pub type UpgradeWebsocketError { MethodNotGet MissingConnectionHeader @@ -757,7 +757,7 @@ pub type UpgradeWebsocketError { } /// Upgrades an HTTP connection to WebSocket. -/// +/// pub fn upgrade_websocket( req: Request(Connection), transport: Transport, @@ -833,7 +833,7 @@ pub fn upgrade_websocket( // ----------------------------------------------------------------------------- /// Response body variants. -/// +/// pub type ResponseBody { TextData(String) BytesData(BytesTree) @@ -847,7 +847,7 @@ pub type ResponseBody { } /// Appends default headers to HTTP responses. -/// +/// pub fn append_default_headers( resp: Response(a), req: Request(Connection), @@ -872,7 +872,7 @@ pub fn append_default_headers( } /// Sets the content length header if it is not already set. -/// +/// pub fn set_content_length(resp: Response(BitArray)) -> Response(BitArray) { case response.get_header(resp, "content-length") { Ok(_) -> resp @@ -884,7 +884,7 @@ pub fn set_content_length(resp: Response(BitArray)) -> Response(BitArray) { } /// Handles 100-continue expectations. -/// +/// pub fn handle_continue(req: Request(Connection)) -> Result(Nil, ParseError) { let expect = req.headers
4 files changed · +100 −62
src/ewe.gleam+1 −4 modified@@ -479,10 +479,7 @@ pub fn start( let information = information.worker(builder.information_name) let glisten_supervisor = - glisten.new( - fn(conn) { #(http_.transform_connection(conn), None) }, - handler_.loop(handler, on_crash), - ) + glisten.new(fn(_conn) { #(Nil, None) }, handler_.loop(handler, on_crash)) |> glisten.bind(builder.interface) |> fn(glisten_builder) { case builder.ipv6 {
src/ewe/internal/buffer.gleam+28 −0 added@@ -0,0 +1,28 @@ +import gleam/bit_array +import gleam/int + +pub type Buffer { + Buffer(data: BitArray, remaining: Int) +} + +pub fn new(initial: BitArray) -> Buffer { + Buffer(initial, 0) +} + +pub fn sized(buffer: Buffer, size: Int) -> Buffer { + Buffer(buffer.data, size) +} + +pub fn empty() -> Buffer { + Buffer(<<>>, 0) +} + +pub fn append(buffer: Buffer, data: BitArray) -> Buffer { + let remaining = int.max(0, buffer.remaining - bit_array.byte_size(data)) + Buffer(<<buffer.data:bits, data:bits>>, remaining) +} + +pub fn append_size(buffer: Buffer, data: BitArray, size: Int) -> Buffer { + let remaining = int.max(0, buffer.remaining - size) + Buffer(<<buffer.data:bits, data:bits>>, remaining) +}
src/ewe/internal/handler.gleam+6 −4 modified@@ -11,6 +11,7 @@ import gleam/result import glisten import glisten/transport +import ewe/internal/buffer import ewe/internal/encoder import ewe/internal/exception import ewe/internal/http.{ @@ -42,14 +43,15 @@ type Next { pub fn loop( handler: fn(Request(Connection)) -> Response(ResponseBody), on_crash: Response(ResponseBody), -) -> glisten.Loop(Connection, a) { - fn(http_conn, msg, _conn) { +) -> glisten.Loop(Nil, a) { + fn(state, msg, conn) { let assert glisten.Packet(msg) = msg + let http_conn = http_.transform_connection(conn) - http_.parse_request(http_conn, msg) + http_.parse_request(http_conn, buffer.new(msg)) |> result.map(fn(req) { case call_handler(req, handler, on_crash) { - Continue -> glisten.continue(http_conn) + Continue -> glisten.continue(state) Stop(Normal) -> glisten.stop() Stop(Abnormal(reason)) -> glisten.stop_abnormal(reason) }
src/ewe/internal/http.gleam+65 −54 modified@@ -28,6 +28,7 @@ import glisten/transport import gramps/websocket as ws +import ewe/internal/buffer.{type Buffer} import ewe/internal/decoder.{ AbsPath, HttpBin, HttpEoh, HttpHeader, HttpRequest, HttphBin, More, Packet, } @@ -74,7 +75,7 @@ pub type Connection { Connection( transport: transport.Transport, socket: socket.Socket, - buffer: BitArray, + buffer: buffer.Buffer, http_version: option.Option(HttpVersion), ) } @@ -99,9 +100,9 @@ pub type UpgradeWebsocketError { // Chunked body parsing result type BodyChunk { - Done(rest: BitArray) + Done(rest: Buffer) Incomplete - Chunk(BitArray, rest: BitArray) + Chunk(BitArray, rest: Buffer) } // ----------------------------------------------------------------------------- @@ -120,20 +121,20 @@ pub fn transform_connection(connection: glisten.Connection(a)) -> Connection { Connection( transport: connection.transport, socket: connection.socket, - buffer: <<>>, + buffer: buffer.empty(), http_version: option.None, ) } /// Parses an HTTP request from the given buffer pub fn parse_request( connection: Connection, - buffer: BitArray, + buffer: Buffer, ) -> Result(Request(Connection), ParseError) { let transport = connection.transport let socket = connection.socket - case decoder.decode_packet(HttpBin, buffer, []) { + case decoder.decode_packet(HttpBin, buffer.data, []) { Ok(Packet(HttpRequest(atom_method, AbsPath(target), version), rest)) -> { // Request Line use method <- try( @@ -157,7 +158,7 @@ pub fn parse_request( use #(headers, rest) <- try(parse_headers( transport, socket, - rest, + buffer.new(rest), dict.new(), )) @@ -185,7 +186,7 @@ pub fn parse_request( headers: dict.to_list(headers), body: Connection( ..connection, - buffer: rest, + buffer: buffer.new(rest), http_version: option.Some(version), ), scheme:, @@ -198,15 +199,16 @@ pub fn parse_request( Ok(More(size)) -> { let read_size = option.unwrap(size, 0) - use buffer <- try(read_from_socket( + let sized_buffer = buffer.sized(connection.buffer, read_size) + + use new_buffer <- try(read_from_socket( transport, socket, - amount: read_size, - buffer: connection.buffer, + sized_buffer, on_error: MalformedRequest, )) - parse_request(connection, buffer) + parse_request(connection, new_buffer) } _ -> Error(PacketDiscard) } @@ -228,7 +230,7 @@ pub fn read_body( case transfer_encoding { Ok("chunked") -> { - use #(body, rest) <- try(read_chunked_body( + use #(body, rest_buffer) <- try(read_chunked_body( transport, socket, req.body.buffer, @@ -248,7 +250,7 @@ pub fn read_body( set.insert(set, string.trim(field) |> string.lowercase()) }) - Ok(handle_trailers(req, set, rest)) + Ok(handle_trailers(req, set, rest_buffer)) } Error(Nil) -> Ok(req) } @@ -261,19 +263,19 @@ pub fn read_body( use <- bool.guard(content_length > size_limit, Error(BodyTooLarge)) - let left = content_length - bit_array.byte_size(req.body.buffer) + let left = content_length - bit_array.byte_size(req.body.buffer.data) case content_length, left { 0, 0 -> Ok(<<>>) - 0, _l | _cl, 0 -> Ok(req.body.buffer) + 0, _l | _cl, 0 -> Ok(req.body.buffer.data) _cl, _l -> read_from_socket( transport, socket, - amount: left, - buffer: req.body.buffer, + buffer: buffer.sized(req.body.buffer, left), on_error: InvalidBody, ) + |> result.map(fn(buffer) { buffer.data }) } |> result.map(request.set_body(req, _)) } @@ -379,23 +381,21 @@ pub fn append_default_headers( fn read_from_socket( transport: transport.Transport, socket: socket.Socket, - amount amount: Int, - buffer buffer: BitArray, + buffer buffer: Buffer, on_error on_error: ParseError, -) -> Result(BitArray, ParseError) { - let read_size = int.min(amount, max_reading_size) +) -> Result(Buffer, ParseError) { + let read_size = int.min(buffer.remaining, max_reading_size) use data <- try( transport.receive_timeout(transport, socket, read_size, 10_000) |> replace_error(on_error), ) - let amount = amount - read_size - let buffer = <<buffer:bits, data:bits>> + let new_buffer = buffer.append_size(buffer, data, read_size) - case amount > 0 { - True -> read_from_socket(transport, socket, amount:, buffer:, on_error:) - False -> Ok(buffer) + case new_buffer.remaining { + 0 -> Ok(new_buffer) + _ -> read_from_socket(transport, socket, new_buffer, on_error:) } } @@ -407,10 +407,10 @@ fn read_from_socket( fn parse_headers( transport: transport.Transport, socket: socket.Socket, - buffer: BitArray, + buffer: Buffer, headers: Dict(String, String), ) { - case decoder.decode_packet(HttphBin, buffer, []) { + case decoder.decode_packet(HttphBin, buffer.data, []) { Ok(Packet(HttpEoh, rest)) -> Ok(#(headers, rest)) Ok(Packet(HttpHeader(idx, field, value), rest)) -> { use field <- try(case decoder.formatted_field_by_idx(idx) { @@ -427,21 +427,24 @@ fn parse_headers( |> replace_error(InvalidHeaders), ) + let new_buffer = buffer.new(rest) + insert_header(headers, field, value) - |> parse_headers(transport, socket, rest, _) + |> parse_headers(transport, socket, new_buffer, _) } Ok(More(size)) -> { let read_size = option.unwrap(size, 0) - use buffer <- try(read_from_socket( + let sized_buffer = buffer.sized(buffer, read_size) + + use new_buffer <- try(read_from_socket( transport, socket, - amount: read_size, - buffer:, + sized_buffer, on_error: InvalidHeaders, )) - parse_headers(transport, socket, buffer, headers) + parse_headers(transport, socket, new_buffer, headers) } _ -> Error(InvalidHeaders) } @@ -510,25 +513,31 @@ fn handle_continue(req: Request(Connection)) -> Result(Nil, ParseError) { fn read_chunked_body( transport: transport.Transport, socket: socket.Socket, - buffer: BitArray, + buffer: Buffer, body: BitArray, size_limit: Int, total_size: Int, -) -> Result(#(BitArray, BitArray), ParseError) { +) -> Result(#(BitArray, Buffer), ParseError) { use <- bool.guard(total_size > size_limit, Error(BodyTooLarge)) case parse_body_chunk(buffer) { Ok(Done(rest)) -> Ok(#(body, rest)) Ok(Incomplete) -> { - use buffer <- try(read_from_socket( + use new_buffer <- try(read_from_socket( transport, socket, - amount: 0, - buffer:, + buffer, on_error: InvalidBody, )) - read_chunked_body(transport, socket, buffer, body, size_limit, total_size) + read_chunked_body( + transport, + socket, + new_buffer, + body, + size_limit, + total_size, + ) } Ok(Chunk(chunk, rest)) -> { let body = <<body:bits, chunk:bits>> @@ -541,9 +550,9 @@ fn read_chunked_body( } /// Parses a single chunk from the chunked body -fn parse_body_chunk(buffer: BitArray) -> Result(BodyChunk, ParseError) { - case split(buffer, <<"\r\n">>, []) { - [<<"0">>, rest] -> Ok(Done(rest)) +fn parse_body_chunk(buffer: Buffer) -> Result(BodyChunk, ParseError) { + case split(buffer.data, <<"\r\n">>, []) { + [<<"0">>, rest] -> Ok(Done(buffer.new(rest))) [chunk_size, rest] -> { use size <- try( bit_array.to_string(chunk_size) @@ -554,7 +563,7 @@ fn parse_body_chunk(buffer: BitArray) -> Result(BodyChunk, ParseError) { case split(rest, <<"\r\n">>, []) { [chunk, rest] -> { case bit_array.byte_size(chunk) == size { - True -> Ok(Chunk(chunk, rest)) + True -> Ok(Chunk(chunk, buffer.new(rest))) False -> Error(InvalidBody) } } @@ -573,27 +582,29 @@ fn parse_body_chunk(buffer: BitArray) -> Result(BodyChunk, ParseError) { fn handle_trailers( req: Request(BitArray), set: set.Set(String), - rest: BitArray, + rest: Buffer, ) -> Request(BitArray) { - case decoder.decode_packet(HttphBin, rest, []) { + case decoder.decode_packet(HttphBin, rest.data, []) { Ok(Packet(HttpEoh, _)) -> req - Ok(Packet(HttpHeader(idx, field, value), rest)) -> { - let field = case decoder.formatted_field_by_idx(idx) { - Ok(field) -> Ok(field) + Ok(Packet(HttpHeader(idx, field, value), header_rest)) -> { + let field_name = case decoder.formatted_field_by_idx(idx) { + Ok(field_name) -> Ok(field_name) Error(Nil) -> { bit_array.to_string(field) |> result.map(string.lowercase) } } - case field { - Ok(field) -> { - case set.contains(set, field) && !is_forbidden_trailer(field) { + case field_name { + Ok(field_name) -> { + case + set.contains(set, field_name) && !is_forbidden_trailer(field_name) + { True -> { case bit_array.to_string(value) { Ok(value) -> { - request.set_header(req, field, value) - |> handle_trailers(set, rest) + request.set_header(req, field_name, value) + |> handle_trailers(set, buffer.new(header_rest)) } Error(Nil) -> handle_trailers(req, set, rest) }
Vulnerability mechanics
Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.
References
5- github.com/vshakitskiy/ewe/commit/8513de9dcdd0005f727c0f6f15dd89f8d626f560nvdPatchWEB
- github.com/vshakitskiy/ewe/commit/d8b9b8a86470c0cb5696647997c2f34763506e37nvdPatchWEB
- github.com/vshakitskiy/ewe/security/advisories/GHSA-4w98-xf39-23gpnvdExploitPatchVendor AdvisoryWEB
- github.com/advisories/GHSA-4w98-xf39-23gpghsaADVISORY
- nvd.nist.gov/vuln/detail/CVE-2026-32873ghsaADVISORY
News mentions
0No linked articles in our index yet.