VYPR
Low severityNVD Advisory· Published Sep 3, 2025· Updated Sep 5, 2025

Netty is vulnerable to request smuggling due to incorrect parsing of chunk extensions

CVE-2025-58056

Description

Netty is an asynchronous event-driven network application framework for development of maintainable high performance protocol servers and clients. In versions 4.1.124.Final, and 4.2.0.Alpha3 through 4.2.4.Final, Netty incorrectly accepts standalone newline characters (LF) as a chunk-size line terminator, regardless of a preceding carriage return (CR), instead of requiring CRLF per HTTP/1.1 standards. When combined with reverse proxies that parse LF differently (treating it as part of the chunk extension), attackers can craft requests that the proxy sees as one request but Netty processes as two, enabling request smuggling attacks. This is fixed in versions 4.1.125.Final and 4.2.5.Final.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
io.netty:netty-codec-httpMaven
< 4.1.125.Final4.1.125.Final
io.netty:netty-codec-httpMaven
>= 4.2.0.Alpha1, < 4.2.5.Final4.2.5.Final

Affected products

1
  • Range: >= 4.2.0.Alpha3, < 4.2.5.Final

Patches

1
edb55fd8e0a3

Merge commit from fork (#15611)

https://github.com/netty/nettyNorman MaurerSep 3, 2025via ghsa
8 files changed · +398 59
  • codec-http/src/main/java/io/netty/handler/codec/http/HttpDecoderConfig.java+30 0 modified
    @@ -34,6 +34,7 @@ public final class HttpDecoderConfig implements Cloneable {
         private int maxInitialLineLength = HttpObjectDecoder.DEFAULT_MAX_INITIAL_LINE_LENGTH;
         private int maxHeaderSize = HttpObjectDecoder.DEFAULT_MAX_HEADER_SIZE;
         private int initialBufferSize = HttpObjectDecoder.DEFAULT_INITIAL_BUFFER_SIZE;
    +    private boolean strictLineParsing = HttpObjectDecoder.DEFAULT_STRICT_LINE_PARSING;
     
         public int getInitialBufferSize() {
             return initialBufferSize;
    @@ -217,6 +218,35 @@ public HttpDecoderConfig setTrailersFactory(HttpHeadersFactory trailersFactory)
             return this;
         }
     
    +    public boolean isStrictLineParsing() {
    +        return strictLineParsing;
    +    }
    +
    +    /**
    +     * The RFC 9112 specification for the HTTP protocol says that the initial start-line, and the following header
    +     * field-lines, must be separated by a Carriage Return (CR) and Line Feed (LF) octet pair, but also offers that
    +     * implementations "MAY" accept just a Line Feed octet as a separator.
    +     * <p>
    +     * Parsing leniencies can increase compatibility with a wider range of implementations, but can also cause
    +     * security vulnerabilities, when multiple systems disagree on the meaning of leniently parsed messages.
    +     * <p>
    +     * When <em>strict line parsing</em> is enabled ({@code true}), then Netty will enforce that start- and header
    +     * field-lines MUST be separated by a CR LF octet pair, and will produce messagas with failed
    +     * {@link io.netty.handler.codec.DecoderResult}s.
    +     * <p>
    +     * When <em>strict line parsing</em> is disabled ({@code false}), then Netty will accept lone LF octets as line
    +     * seperators for the start- and header field-lines.
    +     * <p>
    +     * See <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-message-format">RFC 9112 Section 2.1</a>.
    +     * @param strictLineParsing Whether <em>strict line parsing</em> should be enabled ({@code true}),
    +     * or not ({@code false}).
    +     * @return This decoder config.
    +     */
    +    public HttpDecoderConfig setStrictLineParsing(boolean strictLineParsing) {
    +        this.strictLineParsing = strictLineParsing;
    +        return this;
    +    }
    +
         @Override
         public HttpDecoderConfig clone() {
             try {
    
  • codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectDecoder.java+42 19 modified
    @@ -26,6 +26,7 @@
     import io.netty.util.AsciiString;
     import io.netty.util.ByteProcessor;
     import io.netty.util.internal.StringUtil;
    +import io.netty.util.internal.SystemPropertyUtil;
     
     import java.util.List;
     import java.util.concurrent.atomic.AtomicBoolean;
    @@ -151,6 +152,23 @@ public abstract class HttpObjectDecoder extends ByteToMessageDecoder {
         public static final boolean DEFAULT_VALIDATE_HEADERS = true;
         public static final int DEFAULT_INITIAL_BUFFER_SIZE = 128;
         public static final boolean DEFAULT_ALLOW_DUPLICATE_CONTENT_LENGTHS = false;
    +    public static final boolean DEFAULT_STRICT_LINE_PARSING =
    +            SystemPropertyUtil.getBoolean("io.netty.handler.codec.http.defaultStrictLineParsing", true);
    +
    +    private static final Runnable THROW_INVALID_CHUNK_EXTENSION = new Runnable() {
    +        @Override
    +        public void run() {
    +            throw new InvalidChunkExtensionException();
    +        }
    +    };
    +
    +    private static final Runnable THROW_INVALID_LINE_SEPARATOR = new Runnable() {
    +        @Override
    +        public void run() {
    +            throw new InvalidLineSeparatorException();
    +        }
    +    };
    +
         private final int maxChunkSize;
         private final boolean chunkedSupported;
         private final boolean allowPartialChunks;
    @@ -163,6 +181,7 @@ public abstract class HttpObjectDecoder extends ByteToMessageDecoder {
         protected final HttpHeadersFactory trailersFactory;
         private final boolean allowDuplicateContentLengths;
         private final ByteBuf parserScratchBuffer;
    +    private final Runnable defaultStrictCRLFCheck;
         private final HeaderParser headerParser;
         private final LineParser lineParser;
     
    @@ -315,6 +334,7 @@ protected HttpObjectDecoder(HttpDecoderConfig config) {
             checkNotNull(config, "config");
     
             parserScratchBuffer = Unpooled.buffer(config.getInitialBufferSize());
    +        defaultStrictCRLFCheck = config.isStrictLineParsing() ? THROW_INVALID_LINE_SEPARATOR : null;
             lineParser = new LineParser(parserScratchBuffer, config.getMaxInitialLineLength());
             headerParser = new HeaderParser(parserScratchBuffer, config.getMaxHeaderSize());
             maxChunkSize = config.getMaxChunkSize();
    @@ -344,7 +364,7 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> ou
             case SKIP_CONTROL_CHARS:
                 // Fall-through
             case READ_INITIAL: try {
    -            ByteBuf line = lineParser.parse(buffer);
    +            ByteBuf line = lineParser.parse(buffer, defaultStrictCRLFCheck);
                 if (line == null) {
                     return;
                 }
    @@ -449,11 +469,11 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> ou
                 return;
             }
             /*
    -         * everything else after this point takes care of reading chunked content. basically, read chunk size,
    +         * Everything else after this point takes care of reading chunked content. Basically, read chunk size,
              * read chunk, read and ignore the CRLF and repeat until 0
              */
             case READ_CHUNK_SIZE: try {
    -            ByteBuf line = lineParser.parse(buffer);
    +            ByteBuf line = lineParser.parse(buffer, THROW_INVALID_CHUNK_EXTENSION);
                 if (line == null) {
                     return;
                 }
    @@ -491,16 +511,16 @@ protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> ou
                 // fall-through
             }
             case READ_CHUNK_DELIMITER: {
    -            final int wIdx = buffer.writerIndex();
    -            int rIdx = buffer.readerIndex();
    -            while (wIdx > rIdx) {
    -                byte next = buffer.getByte(rIdx++);
    -                if (next == HttpConstants.LF) {
    +            if (buffer.readableBytes() >= 2) {
    +                int rIdx = buffer.readerIndex();
    +                if (buffer.getByte(rIdx) == HttpConstants.CR &&
    +                        buffer.getByte(rIdx + 1) == HttpConstants.LF) {
    +                    buffer.skipBytes(2);
                         currentState = State.READ_CHUNK_SIZE;
    -                    break;
    +                } else {
    +                    out.add(invalidChunk(buffer, new InvalidChunkTerminationException()));
                     }
                 }
    -            buffer.readerIndex(rIdx);
                 return;
             }
             case READ_CHUNK_FOOTER: try {
    @@ -723,7 +743,7 @@ private State readHeaders(ByteBuf buffer) {
     
             final HeaderParser headerParser = this.headerParser;
     
    -        ByteBuf line = headerParser.parse(buffer);
    +        ByteBuf line = headerParser.parse(buffer, defaultStrictCRLFCheck);
             if (line == null) {
                 return null;
             }
    @@ -745,7 +765,7 @@ private State readHeaders(ByteBuf buffer) {
                     splitHeader(lineContent, startLine, lineLength);
                 }
     
    -            line = headerParser.parse(buffer);
    +            line = headerParser.parse(buffer, defaultStrictCRLFCheck);
                 if (line == null) {
                     return null;
                 }
    @@ -835,7 +855,7 @@ protected void handleTransferEncodingChunkedWithContentLength(HttpMessage messag
     
         private LastHttpContent readTrailingHeaders(ByteBuf buffer) {
             final HeaderParser headerParser = this.headerParser;
    -        ByteBuf line = headerParser.parse(buffer);
    +        ByteBuf line = headerParser.parse(buffer, defaultStrictCRLFCheck);
             if (line == null) {
                 return null;
             }
    @@ -878,7 +898,7 @@ private LastHttpContent readTrailingHeaders(ByteBuf buffer) {
                     name = null;
                     value = null;
                 }
    -            line = headerParser.parse(buffer);
    +            line = headerParser.parse(buffer, defaultStrictCRLFCheck);
                 if (line == null) {
                     return null;
                 }
    @@ -1147,7 +1167,7 @@ private static class HeaderParser {
                 this.maxLength = maxLength;
             }
     
    -        public ByteBuf parse(ByteBuf buffer) {
    +        public ByteBuf parse(ByteBuf buffer, Runnable strictCRLFCheck) {
                 final int readableBytes = buffer.readableBytes();
                 final int readerIndex = buffer.readerIndex();
                 final int maxBodySize = maxLength - size;
    @@ -1174,6 +1194,9 @@ public ByteBuf parse(ByteBuf buffer) {
                     // Drop CR if we had a CRLF pair
                     endOfSeqIncluded = indexOfLf - 1;
                 } else {
    +                if (strictCRLFCheck != null) {
    +                    strictCRLFCheck.run();
    +                }
                     endOfSeqIncluded = indexOfLf;
                 }
                 final int newSize = endOfSeqIncluded - readerIndex;
    @@ -1209,18 +1232,18 @@ private final class LineParser extends HeaderParser {
             }
     
             @Override
    -        public ByteBuf parse(ByteBuf buffer) {
    +        public ByteBuf parse(ByteBuf buffer, Runnable strictCRLFCheck) {
                 // Suppress a warning because HeaderParser.reset() is supposed to be called
                 reset();
                 final int readableBytes = buffer.readableBytes();
                 if (readableBytes == 0) {
                     return null;
                 }
    -            final int readerIndex = buffer.readerIndex();
    -            if (currentState == State.SKIP_CONTROL_CHARS && skipControlChars(buffer, readableBytes, readerIndex)) {
    +            if (currentState == State.SKIP_CONTROL_CHARS &&
    +                    skipControlChars(buffer, readableBytes, buffer.readerIndex())) {
                     return null;
                 }
    -            return super.parse(buffer);
    +            return super.parse(buffer, strictCRLFCheck);
             }
     
             private boolean skipControlChars(ByteBuf buffer, int readableBytes, int readerIndex) {
    
  • codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkExtensionException.java+45 0 added
    @@ -0,0 +1,45 @@
    +/*
    + * Copyright 2025 The Netty Project
    + *
    + * The Netty Project licenses this file to you under the Apache License,
    + * version 2.0 (the "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at:
    + *
    + *   https://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    + * License for the specific language governing permissions and limitations
    + * under the License.
    + */
    +package io.netty.handler.codec.http;
    +
    +import io.netty.handler.codec.CorruptedFrameException;
    +
    +/**
    + * Thrown when HTTP chunk extensions could not be parsed, typically due to incorrect use of CR LF delimiters.
    + * <p>
    + * <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-chunked-transfer-coding">RFC 9112</a>
    + * specifies that chunk header lines must be terminated in a CR LF pair,
    + * and that a lone LF octet is not allowed within the chunk header line.
    + */
    +public final class InvalidChunkExtensionException extends CorruptedFrameException {
    +    private static final long serialVersionUID = 536224937231200736L;
    +
    +    public InvalidChunkExtensionException() {
    +        super("Line Feed must be preceded by Carriage Return when terminating HTTP chunk header lines");
    +    }
    +
    +    public InvalidChunkExtensionException(String message, Throwable cause) {
    +        super(message, cause);
    +    }
    +
    +    public InvalidChunkExtensionException(String message) {
    +        super(message);
    +    }
    +
    +    public InvalidChunkExtensionException(Throwable cause) {
    +        super(cause);
    +    }
    +}
    
  • codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkTerminationException.java+45 0 added
    @@ -0,0 +1,45 @@
    +/*
    + * Copyright 2025 The Netty Project
    + *
    + * The Netty Project licenses this file to you under the Apache License,
    + * version 2.0 (the "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at:
    + *
    + *   https://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    + * License for the specific language governing permissions and limitations
    + * under the License.
    + */
    +package io.netty.handler.codec.http;
    +
    +import io.netty.handler.codec.CorruptedFrameException;
    +
    +/**
    + * Thrown when HTTP chunks could not be parsed, typically due to incorrect use of CR LF delimiters.
    + * <p>
    + * <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-chunked-transfer-coding">RFC 9112</a>
    + * specifies that chunk bodies must be terminated in a CR LF pair,
    + * and that the delimiter must follow the given chunk-size number of octets in chunk-data.
    + */
    +public final class InvalidChunkTerminationException extends CorruptedFrameException {
    +    private static final long serialVersionUID = 536224937231200736L;
    +
    +    public InvalidChunkTerminationException() {
    +        super("Chunk data sections must be terminated by a CR LF octet pair");
    +    }
    +
    +    public InvalidChunkTerminationException(String message, Throwable cause) {
    +        super(message, cause);
    +    }
    +
    +    public InvalidChunkTerminationException(String message) {
    +        super(message);
    +    }
    +
    +    public InvalidChunkTerminationException(Throwable cause) {
    +        super(cause);
    +    }
    +}
    
  • codec-http/src/main/java/io/netty/handler/codec/http/InvalidLineSeparatorException.java+48 0 added
    @@ -0,0 +1,48 @@
    +/*
    + * Copyright 2025 The Netty Project
    + *
    + * The Netty Project licenses this file to you under the Apache License,
    + * version 2.0 (the "License"); you may not use this file except in compliance
    + * with the License. You may obtain a copy of the License at:
    + *
    + *   https://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
    + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
    + * License for the specific language governing permissions and limitations
    + * under the License.
    + */
    +package io.netty.handler.codec.http;
    +
    +import io.netty.handler.codec.DecoderException;
    +
    +/**
    + * Thrown when {@linkplain HttpDecoderConfig#isStrictLineParsing() strict line parsing} is enabled,
    + * and HTTP start- and header field-lines are not seperated by CR LF octet pairs.
    + * <p>
    + * Strict line parsing is enabled by default since Netty 4.1.124 and 4.2.4.
    + * This default can be overridden by setting the {@value HttpObjectDecoder#PROP_DEFAULT_STRICT_LINE_PARSING} system
    + * property to {@code false}.
    + * <p>
    + * See <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-message-format">RFC 9112 Section 2.1</a>.
    + */
    +public final class InvalidLineSeparatorException extends DecoderException {
    +    private static final long serialVersionUID = 536224937231200736L;
    +
    +    public InvalidLineSeparatorException() {
    +        super("Line Feed must be preceded by Carriage Return when terminating HTTP start- and header field-lines");
    +    }
    +
    +    public InvalidLineSeparatorException(String message, Throwable cause) {
    +        super(message, cause);
    +    }
    +
    +    public InvalidLineSeparatorException(String message) {
    +        super(message);
    +    }
    +
    +    public InvalidLineSeparatorException(Throwable cause) {
    +        super(cause);
    +    }
    +}
    
  • codec-http/src/test/java/io/netty/handler/codec/http/HttpRequestDecoderTest.java+123 24 modified
    @@ -20,6 +20,7 @@
     import io.netty.channel.ChannelHandlerContext;
     import io.netty.channel.ChannelInboundHandlerAdapter;
     import io.netty.channel.embedded.EmbeddedChannel;
    +import io.netty.handler.codec.DecoderResult;
     import io.netty.util.AsciiString;
     import io.netty.util.CharsetUtil;
     import io.netty.util.ReferenceCountUtil;
    @@ -34,6 +35,7 @@
     
     import static io.netty.handler.codec.http.HttpHeaderNames.*;
     import static io.netty.handler.codec.http.HttpHeadersTestUtils.of;
    +import static org.assertj.core.api.Assertions.assertThat;
     import static org.junit.jupiter.api.Assertions.assertEquals;
     import static org.junit.jupiter.api.Assertions.assertFalse;
     import static org.junit.jupiter.api.Assertions.assertInstanceOf;
    @@ -85,31 +87,48 @@ public void testDecodeWholeRequestAtOnceMixedDelimiters() {
             testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS);
         }
     
    +    @Test
    +    public void testDecodeWholeRequestAtOnceFailesWithLFDelimiters() {
    +        testDecodeWholeRequestAtOnce(CONTENT_LF_DELIMITERS, HttpObjectDecoder.DEFAULT_MAX_HEADER_SIZE, true, true);
    +    }
    +
    +    @Test
    +    public void testDecodeWholeRequestAtOnceFailsWithMixedDelimiters() {
    +        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, HttpObjectDecoder.DEFAULT_MAX_HEADER_SIZE, true, true);
    +    }
    +
         @Test
         public void testDecodeWholeRequestAtOnceMixedDelimitersWithIntegerOverflowOnMaxBodySize() {
    -        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, Integer.MAX_VALUE);
    -        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, Integer.MAX_VALUE - 1);
    +        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, Integer.MAX_VALUE, false, false);
    +        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, Integer.MAX_VALUE - 1, false, false);
         }
     
         private static void testDecodeWholeRequestAtOnce(byte[] content) {
    -        testDecodeWholeRequestAtOnce(content, HttpRequestDecoder.DEFAULT_MAX_HEADER_SIZE);
    +        testDecodeWholeRequestAtOnce(content, HttpObjectDecoder.DEFAULT_MAX_HEADER_SIZE, false, false);
         }
     
    -    private static void testDecodeWholeRequestAtOnce(byte[] content, int maxHeaderSize) {
    -        EmbeddedChannel channel =
    -                new EmbeddedChannel(new HttpRequestDecoder(HttpObjectDecoder.DEFAULT_MAX_INITIAL_LINE_LENGTH,
    -                                                           maxHeaderSize,
    -                                                           HttpObjectDecoder.DEFAULT_MAX_CHUNK_SIZE));
    +    private static void testDecodeWholeRequestAtOnce(byte[] content, int maxHeaderSize, boolean strictLineParsing,
    +                                                     boolean expectFailure) {
    +        HttpDecoderConfig config = new HttpDecoderConfig()
    +                .setMaxHeaderSize(maxHeaderSize)
    +                .setStrictLineParsing(strictLineParsing);
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder(config));
             assertTrue(channel.writeInbound(Unpooled.copiedBuffer(content)));
             HttpRequest req = channel.readInbound();
             assertNotNull(req);
    -        checkHeaders(req.headers());
    -        LastHttpContent c = channel.readInbound();
    -        assertEquals(CONTENT_LENGTH, c.content().readableBytes());
    -        assertEquals(
    -                Unpooled.wrappedBuffer(content, content.length - CONTENT_LENGTH, CONTENT_LENGTH),
    -                c.content().readSlice(CONTENT_LENGTH));
    -        c.release();
    +        if (expectFailure) {
    +            assertTrue(req.decoderResult().isFailure());
    +            assertThat(req.decoderResult().cause()).isInstanceOf(InvalidLineSeparatorException.class);
    +        } else {
    +            assertFalse(req.decoderResult().isFailure());
    +            checkHeaders(req.headers());
    +            LastHttpContent c = channel.readInbound();
    +            assertEquals(CONTENT_LENGTH, c.content().readableBytes());
    +            assertEquals(
    +                    Unpooled.wrappedBuffer(content, content.length - CONTENT_LENGTH, CONTENT_LENGTH),
    +                    c.content().readSlice(CONTENT_LENGTH));
    +            c.release();
    +        }
     
             assertFalse(channel.finish());
             assertNull(channel.readInbound());
    @@ -135,27 +154,41 @@ private static void checkHeader(HttpHeaders headers, String name, String value)
     
         @Test
         public void testDecodeWholeRequestInMultipleStepsCRLFDelimiters() {
    -        testDecodeWholeRequestInMultipleSteps(CONTENT_CRLF_DELIMITERS);
    +        testDecodeWholeRequestInMultipleSteps(CONTENT_CRLF_DELIMITERS, true, false);
         }
     
         @Test
         public void testDecodeWholeRequestInMultipleStepsLFDelimiters() {
    -        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS);
    +        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS, false, false);
         }
     
         @Test
         public void testDecodeWholeRequestInMultipleStepsMixedDelimiters() {
    -        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS);
    +        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS, false, false);
         }
     
    -    private static void testDecodeWholeRequestInMultipleSteps(byte[] content) {
    +    @Test
    +    public void testDecodeWholeRequestInMultipleStepsFailsWithLFDelimiters() {
    +        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS, true, true);
    +    }
    +
    +    @Test
    +    public void testDecodeWholeRequestInMultipleStepsFailsWithMixedDelimiters() {
    +        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS, true, true);
    +    }
    +
    +    private static void testDecodeWholeRequestInMultipleSteps(
    +            byte[] content, boolean strictLineParsing, boolean expectFailure) {
             for (int i = 1; i < content.length; i++) {
    -            testDecodeWholeRequestInMultipleSteps(content, i);
    +            testDecodeWholeRequestInMultipleSteps(content, i, strictLineParsing, expectFailure);
             }
         }
     
    -    private static void testDecodeWholeRequestInMultipleSteps(byte[] content, int fragmentSize) {
    -        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
    +    private static void testDecodeWholeRequestInMultipleSteps(
    +            byte[] content, int fragmentSize, boolean strictLineParsing, boolean expectFailure) {
    +        HttpDecoderConfig config = new HttpDecoderConfig()
    +                .setStrictLineParsing(strictLineParsing);
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder(config));
             int headerLength = content.length - CONTENT_LENGTH;
     
             // split up the header
    @@ -177,6 +210,12 @@ private static void testDecodeWholeRequestInMultipleSteps(byte[] content, int fr
     
             HttpRequest req = channel.readInbound();
             assertNotNull(req);
    +        if (expectFailure) {
    +            assertTrue(req.decoderResult().isFailure());
    +            assertThat(req.decoderResult().cause()).isInstanceOf(InvalidLineSeparatorException.class);
    +            return; // No more messages will be produced.
    +        }
    +        assertFalse(req.decoderResult().isFailure());
             checkHeaders(req.headers());
     
             for (int i = CONTENT_LENGTH; i > 1; i --) {
    @@ -596,6 +635,66 @@ public void testContentLengthHeaderAndChunked() {
             assertFalse(channel.finish());
         }
     
    +    @Test
    +    void mustRejectImproperlyTerminatedChunkExtensions() throws Exception {
    +        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
    +        String requestStr = "GET /one HTTP/1.1\r\n" +
    +                "Host: localhost\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "2;\n" + // Chunk size followed by illegal single newline (not preceded by carraige return)
    +                "xx\r\n" +
    +                "45\r\n" +
    +                "0\r\n\r\n" +
    +                "GET /two HTTP/1.1\r\n" +
    +                "Host: localhost\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "0\r\n\r\n";
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
    +        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
    +        HttpRequest request = channel.readInbound();
    +        assertFalse(request.decoderResult().isFailure()); // We parse the headers just fine.
    +        assertTrue(request.headers().names().contains("Transfer-Encoding"));
    +        assertTrue(request.headers().contains("Transfer-Encoding", "chunked", false));
    +        HttpContent content = channel.readInbound();
    +        DecoderResult decoderResult = content.decoderResult();
    +        assertTrue(decoderResult.isFailure()); // But parsing the chunk must fail.
    +        assertThat(decoderResult.cause()).isInstanceOf(InvalidChunkExtensionException.class);
    +        content.release();
    +        assertFalse(channel.finish());
    +    }
    +
    +    @Test
    +    void mustRejectImproperlyTerminatedChunkBodies() throws Exception {
    +        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
    +        String requestStr = "GET /one HTTP/1.1\r\n" +
    +                "Host: localhost\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "5\r\n" +
    +                "AAAAAXX" + // Chunk body contains extra (XX) bytes, and no CRLF terminator.
    +                "45\r\n" +
    +                "0\r\n" +
    +                "GET /two HTTP/1.1\r\n" +
    +                "Host: localhost\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "0\r\n\r\n";
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
    +        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
    +        HttpRequest request = channel.readInbound();
    +        assertFalse(request.decoderResult().isFailure()); // We parse the headers just fine.
    +        assertTrue(request.headers().names().contains("Transfer-Encoding"));
    +        assertTrue(request.headers().contains("Transfer-Encoding", "chunked", false));
    +        HttpContent content = channel.readInbound();
    +        assertFalse(content.decoderResult().isFailure()); // We parse the content promised by the chunk length.
    +        content.release();
    +
    +        content = channel.readInbound();
    +        DecoderResult decoderResult = content.decoderResult();
    +        assertTrue(decoderResult.isFailure()); // But then parsing the chunk delimiter must fail.
    +        assertThat(decoderResult.cause()).isInstanceOf(InvalidChunkTerminationException.class);
    +        content.release();
    +        assertFalse(channel.finish());
    +    }
    +
         @Test
         public void testOrderOfHeadersWithContentLength() {
             String requestStr = "GET /some/path HTTP/1.1\r\n" +
    @@ -690,7 +789,7 @@ public void testChunkSizeOverflow() {
         public void testChunkSizeOverflow2() {
             String requestStr = "PUT /some/path HTTP/1.1\r\n" +
                     "Transfer-Encoding: chunked\r\n\r\n" +
    -                "bbbbbbbe;\n\r\n";
    +                "bbbbbbbe;\r\n\r\n";
             EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
             assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
             HttpRequest request = channel.readInbound();
    @@ -723,7 +822,7 @@ public void testLeadingWhitespaceInFirstHeaderName() {
     
        @Test
         public void testNulInInitialLine() {
    -        testInvalidHeaders0("GET / HTTP/1.1\r\u0000\nHost: whatever\r\n\r\n");
    +        testInvalidHeaders0("GET / HTTP/1.1\u0000\r\nHost: whatever\r\n\r\n");
         }
     
         @Test
    
  • codec-http/src/test/java/io/netty/handler/codec/http/HttpResponseDecoderTest.java+61 3 modified
    @@ -18,6 +18,7 @@
     import io.netty.buffer.ByteBuf;
     import io.netty.buffer.Unpooled;
     import io.netty.channel.embedded.EmbeddedChannel;
    +import io.netty.handler.codec.DecoderResult;
     import io.netty.handler.codec.PrematureChannelClosureException;
     import io.netty.util.CharsetUtil;
     import io.netty.util.ReferenceCountUtil;
    @@ -996,6 +997,63 @@ public void testGarbageChunkAfterWhiteSpaces() {
             assertFalse(channel.finish());
         }
     
    +    @Test
    +    void mustRejectImproperlyTerminatedChunkExtensions() throws Exception {
    +        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
    +        String requestStr = "HTTP/1.1 200 OK\r\n" +
    +                "Transfer-Encoding: chunked\r\n" +
    +                "\r\n" +
    +                "2;\n" + // Chunk size followed by illegal single newline (not preceded by carraige return)
    +                "xx\r\n" +
    +                "1D\r\n" +
    +                "0\r\n\r\n" +
    +                "HTTP/1.1 200 OK\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "0\r\n\r\n";
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
    +        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
    +        HttpResponse response = channel.readInbound();
    +        assertFalse(response.decoderResult().isFailure()); // We parse the headers just fine.
    +        assertTrue(response.headers().names().contains("Transfer-Encoding"));
    +        assertTrue(response.headers().contains("Transfer-Encoding", "chunked", false));
    +        HttpContent content = channel.readInbound();
    +        DecoderResult decoderResult = content.decoderResult();
    +        assertTrue(decoderResult.isFailure()); // But parsing the chunk must fail.
    +        assertThat(decoderResult.cause()).isInstanceOf(InvalidChunkExtensionException.class);
    +        content.release();
    +        assertFalse(channel.finish());
    +    }
    +
    +    @Test
    +    void mustRejectImproperlyTerminatedChunkBodies() throws Exception {
    +        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
    +        String requestStr = "HTTP/1.1 200 OK\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "5\r\n" +
    +                "AAAAXX" + // Chunk body contains extra (XX) bytes, and no CRLF terminator.
    +                "1D\r\n" +
    +                "0\r\n" +
    +                "HTTP/1.1 200 OK\r\n" +
    +                "Transfer-Encoding: chunked\r\n\r\n" +
    +                "0\r\n\r\n";
    +        EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
    +        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
    +        HttpResponse response = channel.readInbound();
    +        assertFalse(response.decoderResult().isFailure()); // We parse the headers just fine.
    +        assertTrue(response.headers().names().contains("Transfer-Encoding"));
    +        assertTrue(response.headers().contains("Transfer-Encoding", "chunked", false));
    +        HttpContent content = channel.readInbound();
    +        assertFalse(content.decoderResult().isFailure()); // We parse the content promised by the chunk length.
    +        content.release();
    +
    +        content = channel.readInbound();
    +        DecoderResult decoderResult = content.decoderResult();
    +        assertTrue(decoderResult.isFailure()); // But then parsing the chunk delimiter must fail.
    +        assertThat(decoderResult.cause()).isInstanceOf(InvalidChunkTerminationException.class);
    +        content.release();
    +        assertFalse(channel.finish());
    +    }
    +
         @Test
         public void testConnectionClosedBeforeHeadersReceived() {
             EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
    @@ -1043,7 +1101,7 @@ public void testWhitespace() {
             EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
             String requestStr = "HTTP/1.1 200 OK\r\n" +
                     "Transfer-Encoding : chunked\r\n" +
    -                "Host: netty.io\n\r\n";
    +                "Host: netty.io\r\n\r\n";
     
             assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
             HttpResponse response = channel.readInbound();
    @@ -1146,7 +1204,7 @@ public void testHeaderNameEndsWithControlChar0c() {
             testHeaderNameEndsWithControlChar(0x0c);
         }
     
    -    private void testHeaderNameEndsWithControlChar(int controlChar) {
    +    private static void testHeaderNameEndsWithControlChar(int controlChar) {
             ByteBuf responseBuffer = Unpooled.buffer();
             responseBuffer.writeCharSequence("HTTP/1.1 200 OK\r\n" +
                     "Host: netty.io\r\n", CharsetUtil.US_ASCII);
    @@ -1161,7 +1219,7 @@ private void testHeaderNameEndsWithControlChar(int controlChar) {
                 "HTTP/1.11", "HTTP/11.1", "HTTP/A.1", "HTTP/1.B"})
         public void testInvalidVersion(String version) {
             testInvalidHeaders0(Unpooled.copiedBuffer(
    -                version + " 200 OK\n\r\nHost: whatever\r\n\r\n", CharsetUtil.US_ASCII));
    +                version + " 200 OK\r\nHost: whatever\r\n\r\n", CharsetUtil.US_ASCII));
         }
     
         private static void testInvalidHeaders0(ByteBuf responseBuffer) {
    
  • codec-http/src/test/java/io/netty/handler/codec/http/MultipleContentLengthHeadersTest.java+4 13 modified
    @@ -26,11 +26,6 @@
     import java.util.Collection;
     import java.util.List;
     
    -import static io.netty.handler.codec.http.HttpObjectDecoder.DEFAULT_INITIAL_BUFFER_SIZE;
    -import static io.netty.handler.codec.http.HttpObjectDecoder.DEFAULT_MAX_CHUNK_SIZE;
    -import static io.netty.handler.codec.http.HttpObjectDecoder.DEFAULT_MAX_HEADER_SIZE;
    -import static io.netty.handler.codec.http.HttpObjectDecoder.DEFAULT_MAX_INITIAL_LINE_LENGTH;
    -import static io.netty.handler.codec.http.HttpObjectDecoder.DEFAULT_VALIDATE_HEADERS;
     import static org.assertj.core.api.Assertions.assertThat;
     import static org.junit.jupiter.api.Assertions.assertEquals;
     import static org.junit.jupiter.api.Assertions.assertFalse;
    @@ -53,13 +48,9 @@ static Collection<Object[]> parameters() {
         }
     
         private static EmbeddedChannel newChannel(boolean allowDuplicateContentLengths) {
    -        HttpRequestDecoder decoder = new HttpRequestDecoder(
    -                DEFAULT_MAX_INITIAL_LINE_LENGTH,
    -                DEFAULT_MAX_HEADER_SIZE,
    -                DEFAULT_MAX_CHUNK_SIZE,
    -                DEFAULT_VALIDATE_HEADERS,
    -                DEFAULT_INITIAL_BUFFER_SIZE,
    -                allowDuplicateContentLengths);
    +        HttpDecoderConfig config = new HttpDecoderConfig()
    +                .setAllowDuplicateContentLengths(allowDuplicateContentLengths);
    +        HttpRequestDecoder decoder = new HttpRequestDecoder(config);
             return new EmbeddedChannel(decoder);
         }
     
    @@ -109,7 +100,7 @@ public void testDanglingComma() {
             EmbeddedChannel channel = newChannel(false);
             String requestStr = "GET /some/path HTTP/1.1\r\n" +
                                 "Content-Length: 1,\r\n" +
    -                            "Connection: close\n\n" +
    +                            "Connection: close\r\n\r\n" +
                                 "ab";
             assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
             HttpRequest request = channel.readInbound();
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

10

News mentions

0

No linked articles in our index yet.