VYPR
High severity7.5OSV Advisory· Published Nov 10, 2025· Updated Apr 15, 2026

CVE-2025-64508

CVE-2025-64508

Description

Bugsink is a self-hosted error tracking tool. In versions prior to 2.0.5, brotli "bombs" (highly compressed brotli streams, such as many zeros) can be sent to the server. Since the server will attempt to decompress these streams before applying various maximums, this can lead to exhaustion of the available memory and thus a Denial of Service. This can be done if the DSN is known, which it is in many common setups (JavaScript, Mobile Apps). The issue is patched in Bugsink version 2.0.5. The vulnerability is similar to, but distinct from, another brotli-related problem in Bugsink, GHSA-rrx3-2x4g-mq2h/CVE-2025-64509.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
bugsinkPyPI
< 2.0.52.0.5

Affected products

1
  • Range: dev/null, go/brotli/v1.1.1-rc0, go/cbrotli/v1.1.0, …

Patches

2
3f65544aab3a

Merge pull request #266 from bugsink/brotli-bombs-and-1.2

https://github.com/bugsink/bugsinkKlaas van SchelvenNov 8, 2025via ghsa
3 files changed · +75 59
  • bugsink/streams.py+28 18 modified
    @@ -3,7 +3,6 @@
     import brotli
     
     from bugsink.app_settings import get_settings
    -from bugsink.utils import assert_
     
     
     DEFAULT_CHUNK_SIZE = 8 * 1024
    @@ -38,43 +37,54 @@ def zlib_generator(input_stream, wbits, chunk_size=DEFAULT_CHUNK_SIZE):
     
     
     def brotli_generator(input_stream, chunk_size=DEFAULT_CHUNK_SIZE):
    -    decompressor = brotli.Decompressor()
    +    # implementation notes: in principle chunk_size for input and output could be different, we keep them the same here.
    +    # I've also seen that the actual output data may be quite a bit larger than the output_buffer_limit; a detail that
    +    # I do not fully understand (but I understand that at least it's not _unboundedly_ larger).
     
    -    while True:
    -        compressed_chunk = input_stream.read(chunk_size)
    -        if not compressed_chunk:
    -            break
    -
    -        yield decompressor.process(compressed_chunk)
    +    decompressor = brotli.Decompressor()
    +    input_is_finished = False
    +
    +    while not (decompressor.is_finished() and input_is_finished):
    +        if decompressor.can_accept_more_data():
    +            compressed_chunk = input_stream.read(chunk_size)
    +            if not compressed_chunk:
    +                input_is_finished = True
    +                data = decompressor.process(b"", output_buffer_limit=chunk_size)  # b"": no input available, "drain"
    +            else:
    +                data = decompressor.process(compressed_chunk, output_buffer_limit=chunk_size)
    +        else:
    +            data = decompressor.process(b"", output_buffer_limit=chunk_size)  # b"" compressor cannot accept more input
     
    -    assert_(decompressor.is_finished())
    +        if data:
    +            yield data
     
     
     class GeneratorReader:
    +    """Read from a generator (yielding bytes) as from a file-like object."""
     
         def __init__(self, generator):
             self.generator = generator
    -        self.unread = b""
    +        self.buffer = bytearray()
     
         def read(self, size=None):
             if size is None:
                 for chunk in self.generator:
    -                self.unread += chunk
    -
    -            result = self.unread
    -            self.unread = b""
    +                self.buffer.extend(chunk)
    +            result = bytes(self.buffer)
    +            self.buffer.clear()
                 return result
     
    -        while size > len(self.unread):
    +        while len(self.buffer) < size:
                 try:
                     chunk = next(self.generator)
    -                if chunk == b"":
    +                if not chunk:
                         break
    -                self.unread += chunk
    +                self.buffer.extend(chunk)
                 except StopIteration:
                     break
     
    -        self.unread, result = self.unread[size:], self.unread[:size]
    +        result = bytes(self.buffer[:size])
    +        del self.buffer[:size]
             return result
     
     
    
  • bugsink/tests.py+46 40 modified
    @@ -43,68 +43,43 @@ class StreamsTestCase(RegularTestCase):
         def test_compress_decompress_gzip(self):
             with open(__file__, 'rb') as f:
                 myself_times_ten = f.read() * 10
    -        plain_stream = io.BytesIO(myself_times_ten)
     
    +        plain_stream = io.BytesIO(myself_times_ten)
             compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_GZIP))
    -
    -        result = b""
             reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_GZIP))
     
    -        while True:
    -            chunk = reader.read(3)
    -            result += chunk
    -            if chunk == b"":
    -                break
    -
    -        self.assertEqual(myself_times_ten, result)
    +        self.assertEqual(myself_times_ten, reader.read())
     
         def test_compress_decompress_deflate(self):
             with open(__file__, 'rb') as f:
                 myself_times_ten = f.read() * 10
    -        plain_stream = io.BytesIO(myself_times_ten)
     
    +        plain_stream = io.BytesIO(myself_times_ten)
             compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_DEFLATE))
    -
    -        result = b""
             reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_DEFLATE))
     
    -        while True:
    -            chunk = reader.read(3)
    -            result += chunk
    -            if chunk == b"":
    -                break
    -
    -        self.assertEqual(myself_times_ten, result)
    +        self.assertEqual(myself_times_ten, reader.read())
     
         def test_compress_decompress_brotli(self):
             with open(__file__, 'rb') as f:
                 myself_times_ten = f.read() * 10
     
             compressed_stream = io.BytesIO(brotli.compress(myself_times_ten))
    -
    -        result = b""
             reader = GeneratorReader(brotli_generator(compressed_stream))
     
    -        while True:
    -            chunk = reader.read(3)
    -            result += chunk
    -            if chunk == b"":
    -                break
    -
    -        self.assertEqual(myself_times_ten, result)
    +        self.assertEqual(myself_times_ten, reader.read())
     
    -    def test_compress_decompress_read_none(self):
    -        with open(__file__, 'rb') as f:
    -            myself_times_ten = f.read() * 10
    -        plain_stream = io.BytesIO(myself_times_ten)
    +    def test_decompress_brotli_tiny_bomb(self):
    +        # by picking something "sufficiently large" we can ensure all three code paths in brotli_generator are taken,
    +        # in particular the "cannot accept more input" path. (for it to be taken, we need a "big thing" on the output
    +        # side)
    +        compressed_stream = io.BytesIO(brotli.compress(b"\x00" * 15_000_000))
     
    -        compressed_stream = io.BytesIO(compress_with_zlib(plain_stream, WBITS_PARAM_FOR_DEFLATE))
    -
    -        result = b""
    -        reader = GeneratorReader(zlib_generator(compressed_stream, WBITS_PARAM_FOR_DEFLATE))
    -
    -        result = reader.read(None)
    -        self.assertEqual(myself_times_ten, result)
    +        size = 0
    +        generator = brotli_generator(compressed_stream)
    +        for chunk in generator:
    +            size += len(chunk)
    +        self.assertEqual(15_000_000, size)
     
         def test_max_data_reader(self):
             stream = io.BytesIO(b"hello" * 100)
    @@ -143,6 +118,37 @@ def test_max_data_writer(self):
             with self.assertRaises(ValueError):
                 writer.write(b"hellohello")
     
    +    def test_generator_reader(self):
    +
    +        def generator():
    +            yield b"hello "
    +            yield b"I am "
    +            yield b"a generator"
    +
    +        reader = GeneratorReader(generator())
    +
    +        self.assertEqual(b"hel", reader.read(3))
    +        self.assertEqual(b"lo ", reader.read(3))
    +        self.assertEqual(b"I a", reader.read(3))
    +        self.assertEqual(b"m a", reader.read(3))
    +        self.assertEqual(b" generator", reader.read(None))
    +
    +    def test_generator_reader_performance(self):
    +        # at least one test directly for GeneratorReader; doubles as a regression test for performance issue that showed
    +        # up when the underlying generator yielded relatively big chunks and the read() size was small. should run
    +        # easily under a second.
    +
    +        def yielding_big_chunks():
    +            yield b"x" * 500_000
    +
    +        read = []
    +        reader = GeneratorReader(yielding_big_chunks())
    +        while True:
    +            chunk = reader.read(1)
    +            if chunk == b"":
    +                break
    +            read.append(chunk)
    +
     
     @override_settings(DEBUG_CSRF=True)
     class CSRFViewsTestCase(DjangoTestCase):
    
  • requirements.txt+1 1 modified
    @@ -7,7 +7,7 @@ semver==3.0.*
     django-admin-autocomplete-filter==0.7.*
     pygments==2.19.*
     inotify_simple==2.0.*
    -Brotli==1.1.*
    +Brotli==1.2.*
     python-dateutil==2.9.*
     whitenoise==6.11.*
     requests==2.32.*
    
67d78bc41db1

Merge pull request #1234 from robryk:sizelimit

https://github.com/google/brotliCopybara-ServiceJan 8, 2025via ghsa
4 files changed · +197 67
  • python/_brotli.c+155 67 modified
    @@ -23,6 +23,7 @@ typedef struct {
         PyObject *list;
         /* Number of whole allocated size. */
         Py_ssize_t allocated;
    +    Py_ssize_t size_limit;
     } BlocksOutputBuffer;
     
     static const char unable_allocate_msg[] = "Unable to allocate output buffer.";
    @@ -69,11 +70,17 @@ static const Py_ssize_t BUFFER_BLOCK_SIZE[] =
        Return -1 on failure
     */
     static inline int
    -BlocksOutputBuffer_InitAndGrow(BlocksOutputBuffer *buffer,
    +BlocksOutputBuffer_InitAndGrow(BlocksOutputBuffer *buffer, Py_ssize_t size_limit,
                                    size_t *avail_out, uint8_t **next_out)
     {
         PyObject *b;
    -    const Py_ssize_t block_size = BUFFER_BLOCK_SIZE[0];
    +    Py_ssize_t block_size = BUFFER_BLOCK_SIZE[0];
    +
    +    assert(size_limit > 0);
    +
    +    if (size_limit < block_size) {
    +      block_size = size_limit;
    +    }
     
         // Ensure .list was set to NULL, for BlocksOutputBuffer_OnError().
         assert(buffer->list == NULL);
    @@ -94,6 +101,7 @@ BlocksOutputBuffer_InitAndGrow(BlocksOutputBuffer *buffer,
     
         // Set variables
         buffer->allocated = block_size;
    +    buffer->size_limit = size_limit;
     
         *avail_out = (size_t) block_size;
         *next_out = (uint8_t*) PyBytes_AS_STRING(b);
    @@ -122,10 +130,16 @@ BlocksOutputBuffer_Grow(BlocksOutputBuffer *buffer,
             block_size = BUFFER_BLOCK_SIZE[Py_ARRAY_LENGTH(BUFFER_BLOCK_SIZE) - 1];
         }
     
    -    // Check buffer->allocated overflow
    -    if (block_size > PY_SSIZE_T_MAX - buffer->allocated) {
    -        PyErr_SetString(PyExc_MemoryError, unable_allocate_msg);
    -        return -1;
    +    if (block_size > buffer->size_limit - buffer->allocated) {
    +      block_size = buffer->size_limit - buffer->allocated;
    +    }
    +
    +    if (block_size == 0) {
    +      // We are at the size_limit (either the provided one, in which case we
    +      // shouldn't have been called, or the implicit PY_SSIZE_T_MAX one, in
    +      // which case we wouldn't be able to concatenate the blocks at the end).
    +      PyErr_SetString(PyExc_MemoryError, "too long");
    +      return -1;
         }
     
         // Create the block
    @@ -291,7 +305,7 @@ static PyObject* compress_stream(BrotliEncoderState* enc, BrotliEncoderOperation
       BlocksOutputBuffer buffer = {.list=NULL};
       PyObject *ret;
     
    -  if (BlocksOutputBuffer_InitAndGrow(&buffer, &available_out, &next_out) < 0) {
    +  if (BlocksOutputBuffer_InitAndGrow(&buffer, PY_SSIZE_T_MAX, &available_out, &next_out) < 0) {
         goto error;
       }
     
    @@ -592,57 +606,6 @@ static PyTypeObject brotli_CompressorType = {
       brotli_Compressor_new,                 /* tp_new */
     };
     
    -static PyObject* decompress_stream(BrotliDecoderState* dec,
    -                                   uint8_t* input, size_t input_length) {
    -  BrotliDecoderResult result;
    -
    -  size_t available_in = input_length;
    -  const uint8_t* next_in = input;
    -
    -  size_t available_out;
    -  uint8_t* next_out;
    -  BlocksOutputBuffer buffer = {.list=NULL};
    -  PyObject *ret;
    -
    -  if (BlocksOutputBuffer_InitAndGrow(&buffer, &available_out, &next_out) < 0) {
    -    goto error;
    -  }
    -
    -  while (1) {
    -    Py_BEGIN_ALLOW_THREADS
    -    result = BrotliDecoderDecompressStream(dec,
    -                                           &available_in, &next_in,
    -                                           &available_out, &next_out, NULL);
    -    Py_END_ALLOW_THREADS
    -
    -    if (result == BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT) {
    -      if (available_out == 0) {
    -        if (BlocksOutputBuffer_Grow(&buffer, &available_out, &next_out) < 0) {
    -          goto error;
    -        }
    -      }
    -      continue;
    -    }
    -
    -    break;
    -  }
    -
    -  if (result == BROTLI_DECODER_RESULT_ERROR || available_in != 0) {
    -    goto error;
    -  }
    -
    -  ret = BlocksOutputBuffer_Finish(&buffer, available_out);
    -  if (ret != NULL) {
    -    goto finally;
    -  }
    -
    -error:
    -  BlocksOutputBuffer_OnError(&buffer);
    -  ret = NULL;
    -finally:
    -  return ret;
    -}
    -
     PyDoc_STRVAR(brotli_Decompressor_doc,
     "An object to decompress a byte string.\n"
     "\n"
    @@ -655,10 +618,14 @@ PyDoc_STRVAR(brotli_Decompressor_doc,
     typedef struct {
       PyObject_HEAD
       BrotliDecoderState* dec;
    +  uint8_t* unconsumed_data;
    +  size_t unconsumed_data_length;
     } brotli_Decompressor;
     
     static void brotli_Decompressor_dealloc(brotli_Decompressor* self) {
       BrotliDecoderDestroyInstance(self->dec);
    +  if (self->unconsumed_data)
    +    free(self->unconsumed_data);
       #if PY_MAJOR_VERSION >= 3
       Py_TYPE(self)->tp_free((PyObject*)self);
       #else
    @@ -674,6 +641,9 @@ static PyObject* brotli_Decompressor_new(PyTypeObject *type, PyObject *args, PyO
         self->dec = BrotliDecoderCreateInstance(0, 0, 0);
       }
     
    +  self->unconsumed_data = NULL;
    +  self->unconsumed_data_length = 0;
    +
       return (PyObject *)self;
     }
     
    @@ -692,35 +662,118 @@ static int brotli_Decompressor_init(brotli_Decompressor *self, PyObject *args, P
       return 0;
     }
     
    +static PyObject* decompress_stream(brotli_Decompressor* self,
    +                                   uint8_t* input, size_t input_length, Py_ssize_t max_output_length) {
    +  BrotliDecoderResult result;
    +
    +  size_t available_in = input_length;
    +  const uint8_t* next_in = input;
    +
    +  size_t available_out;
    +  uint8_t* next_out;
    +  uint8_t* new_tail;
    +  BlocksOutputBuffer buffer = {.list=NULL};
    +  PyObject *ret;
    +
    +  if (BlocksOutputBuffer_InitAndGrow(&buffer, max_output_length, &available_out, &next_out) < 0) {
    +    goto error;
    +  }
    +
    +  while (1) {
    +    Py_BEGIN_ALLOW_THREADS
    +    result = BrotliDecoderDecompressStream(self->dec,
    +                                           &available_in, &next_in,
    +                                           &available_out, &next_out, NULL);
    +    Py_END_ALLOW_THREADS
    +
    +    if (result == BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT) {
    +      if (available_out == 0) {
    +        if (buffer.allocated == PY_SSIZE_T_MAX) {
    +          PyErr_SetString(PyExc_MemoryError, unable_allocate_msg);
    +          goto error;
    +        }
    +        if (buffer.allocated == max_output_length) {
    +          // We've reached the output length limit.
    +          break;
    +        }
    +        if (BlocksOutputBuffer_Grow(&buffer, &available_out, &next_out) < 0) {
    +          goto error;
    +        }
    +      }
    +      continue;
    +    }
    +
    +    if (result == BROTLI_DECODER_RESULT_ERROR || available_in != 0) {
    +      available_in = 0;
    +      goto error;
    +    }
    +
    +    break;
    +  }
    +
    +  ret = BlocksOutputBuffer_Finish(&buffer, available_out);
    +  if (ret != NULL) {
    +    goto finally;
    +  }
    +
    +error:
    +  BlocksOutputBuffer_OnError(&buffer);
    +  ret = NULL;
    +
    +finally:
    +  new_tail = available_in > 0 ? malloc(available_in) : NULL;
    +  if (available_in > 0) {
    +    memcpy(new_tail, next_in, available_in);
    +  }
    +  if (self->unconsumed_data) {
    +    free(self->unconsumed_data);
    +  }
    +  self->unconsumed_data = new_tail;
    +  self->unconsumed_data_length = available_in;
    +
    +  return ret;
    +}
    +
    +
     PyDoc_STRVAR(brotli_Decompressor_process_doc,
     "Process \"string\" for decompression, returning a string that contains \n"
     "decompressed output data.  This data should be concatenated to the output \n"
     "produced by any preceding calls to the \"process()\" method. \n"
     "Some or all of the input may be kept in internal buffers for later \n"
     "processing, and the decompressed output data may be empty until enough input \n"
     "has been accumulated.\n"
    +"If max_output_length is set, no more than max_output_length bytes will be\n"
    +"returned. If the limit is reached, further calls to process (potentially with\n"
    +"empty input) will continue to yield more data. If, after returning a string of\n"
    +"the length equal to limit, can_accept_more_data() returns False, process()\n"
    +"must only be called with empty input until can_accept_more_data() once again\n"
    +"returns True.\n"
     "\n"
     "Signature:\n"
    -"  decompress(string)\n"
    +"  decompress(string, max_output_length=int)\n"
     "\n"
     "Args:\n"
     "  string (bytes): The input data\n"
    -"\n"
    -"Returns:\n"
    +"\n""Returns:\n"
     "  The decompressed output data (bytes)\n"
     "\n"
     "Raises:\n"
     "  brotli.error: If decompression fails\n");
     
    -static PyObject* brotli_Decompressor_process(brotli_Decompressor *self, PyObject *args) {
    +static PyObject* brotli_Decompressor_process(brotli_Decompressor *self, PyObject *args, PyObject* keywds) {
       PyObject* ret;
       Py_buffer input;
       int ok;
    +  Py_ssize_t max_output_length = PY_SSIZE_T_MAX;
    +  uint8_t* data;
    +  size_t data_length;
    +
    +  static char* kwlist[] = { "", "max_output_length", NULL };
     
     #if PY_MAJOR_VERSION >= 3
    -  ok = PyArg_ParseTuple(args, "y*:process", &input);
    +  ok = PyArg_ParseTupleAndKeywords(args, keywds, "y*|n:process", kwlist, &input, &max_output_length);
     #else
    -  ok = PyArg_ParseTuple(args, "s*:process", &input);
    +  ok = PyArg_ParseTupleAndKeywords(args, keywds, "s*|n:process", kwlist, &input, &max_output_length);
     #endif
     
       if (!ok) {
    @@ -731,7 +784,20 @@ static PyObject* brotli_Decompressor_process(brotli_Decompressor *self, PyObject
         goto error;
       }
     
    -  ret = decompress_stream(self->dec, (uint8_t*) input.buf, input.len);
    +  if (self->unconsumed_data_length > 0) {
    +    if (input.len > 0) {
    +      PyErr_SetString(BrotliError, "process called with data when accept_more_data is False");
    +      ret = NULL;
    +      goto finally;
    +    }
    +    data = self->unconsumed_data;
    +    data_length = self->unconsumed_data_length;
    +  } else {
    +    data = (uint8_t*)input.buf;
    +    data_length = input.len;
    +  }
    +
    +  ret = decompress_stream(self, data, data_length, max_output_length);
       if (ret != NULL) {
         goto finally;
       }
    @@ -773,13 +839,35 @@ static PyObject* brotli_Decompressor_is_finished(brotli_Decompressor *self) {
       }
     }
     
    +PyDoc_STRVAR(brotli_Decompressor_can_accept_more_data_doc,
    +"Checks if the decoder instance can accept more compressed data. If the decompress()\n"
    +"method on this instance of decompressor was never called with max_length,\n"
    +"this method will always return True.\n"
    +"\n"
    +"Signature:"
    +"  can_accept_more_data()\n"
    +"\n"
    +"Returns:\n"
    +"  True  if the decoder is ready to accept more compressed data via decompress()\n"
    +"  False if the decoder needs to output some data via decompress(b'') before\n"
    +"        being provided any more compressed data\n");
    +
    +static PyObject* brotli_Decompressor_can_accept_more_data(brotli_Decompressor* self) {
    +  if (self->unconsumed_data_length > 0) {
    +    Py_RETURN_FALSE;
    +  } else {
    +    Py_RETURN_TRUE;
    +  }
    +}
    +
     static PyMemberDef brotli_Decompressor_members[] = {
       {NULL}  /* Sentinel */
     };
     
     static PyMethodDef brotli_Decompressor_methods[] = {
    -  {"process", (PyCFunction)brotli_Decompressor_process, METH_VARARGS, brotli_Decompressor_process_doc},
    +  {"process", (PyCFunction)brotli_Decompressor_process, METH_VARARGS | METH_KEYWORDS, brotli_Decompressor_process_doc},
       {"is_finished", (PyCFunction)brotli_Decompressor_is_finished, METH_NOARGS, brotli_Decompressor_is_finished_doc},
    +  {"can_accept_more_data", (PyCFunction)brotli_Decompressor_can_accept_more_data, METH_NOARGS, brotli_Decompressor_can_accept_more_data_doc},
       {NULL}  /* Sentinel */
     };
     
    @@ -877,7 +965,7 @@ static PyObject* brotli_decompress(PyObject *self, PyObject *args, PyObject *key
       next_in = (uint8_t*) input.buf;
       available_in = input.len;
     
    -  if (BlocksOutputBuffer_InitAndGrow(&buffer, &available_out, &next_out) < 0) {
    +  if (BlocksOutputBuffer_InitAndGrow(&buffer, PY_SSIZE_T_MAX, &available_out, &next_out) < 0) {
         goto error;
       }
     
    
  • python/tests/decompressor_test.py+42 0 modified
    @@ -4,6 +4,7 @@
     # See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
     
     import functools
    +import os
     import unittest
     
     from . import _test_utils
    @@ -39,10 +40,51 @@ def _decompress(self, test_data):
                         out_file.write(self.decompressor.process(data))
             self.assertTrue(self.decompressor.is_finished())
     
    +    def _decompress_with_limit(self, test_data, max_output_length):
    +        temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
    +        with open(temp_uncompressed, 'wb') as out_file:
    +            with open(test_data, 'rb') as in_file:
    +                chunk_iter = iter(functools.partial(in_file.read, 10 * 1024), b'')
    +                while not self.decompressor.is_finished():
    +                    data = b''
    +                    if self.decompressor.can_accept_more_data():
    +                        data = next(chunk_iter, b'')
    +                    decompressed_data = self.decompressor.process(data, max_output_length=max_output_length)
    +                    self.assertTrue(len(decompressed_data) <= max_output_length)
    +                    out_file.write(decompressed_data)
    +                self.assertTrue(next(chunk_iter, None) == None)
    +
         def _test_decompress(self, test_data):
             self._decompress(test_data)
             self._check_decompression(test_data)
     
    +    def _test_decompress_with_limit(self, test_data):
    +        self._decompress_with_limit(test_data, max_output_length=20)
    +        self._check_decompression(test_data)
    +
    +    def test_too_much_input(self):
    +        with open(os.path.join(_test_utils.TESTDATA_DIR, "zerosukkanooa.compressed"), 'rb') as in_file:
    +            compressed = in_file.read()
    +            self.decompressor.process(compressed[:-1], max_output_length=1)
    +            # the following assertion checks whether the test setup is correct
    +            self.assertTrue(not self.decompressor.can_accept_more_data())
    +            with self.assertRaises(brotli.error):
    +                self.decompressor.process(compressed[-1:])
    +
    +    def test_changing_limit(self):
    +        test_data = os.path.join(_test_utils.TESTDATA_DIR, "zerosukkanooa.compressed")
    +        temp_uncompressed = _test_utils.get_temp_uncompressed_name(test_data)
    +        with open(temp_uncompressed, 'wb') as out_file:
    +            with open(test_data, 'rb') as in_file:
    +                compressed = in_file.read()
    +                uncompressed = self.decompressor.process(compressed[:-1], max_output_length=1)
    +                self.assertTrue(len(uncompressed) <= 1)
    +                out_file.write(uncompressed)
    +                while not self.decompressor.can_accept_more_data():
    +                    out_file.write(self.decompressor.process(b''))
    +                out_file.write(self.decompressor.process(compressed[-1:]))
    +        self._check_decompression(test_data)
    +
         def test_garbage_appended(self):
             with self.assertRaises(brotli.error):
                 self.decompressor.process(brotli.compress(b'a') + b'a')
    
  • tests/testdata/zerosukkanooa+0 0 added
  • tests/testdata/zerosukkanooa.compressed+0 0 added

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

10

News mentions

0

No linked articles in our index yet.