VYPR
High severityNVD Advisory· Published Apr 9, 2026· Updated Apr 16, 2026

CVE-2026-39972

CVE-2026-39972

Description

Mercure is a protocol for pushing data updates to web browsers and other HTTP clients in a battery-efficient way. Prior to 0.22.0, a cache key collision vulnerability in TopicSelectorStore allows an attacker to poison the match result cache, potentially causing private updates to be delivered to unauthorized subscribers or blocking delivery to authorized ones. The cache key was constructed by concatenating the topic selector and topic with an underscore separator. Because both topic selectors and topics can contain underscores, two distinct pairs can produce the same key. An attacker who can subscribe to the hub or publish updates with crafted topic names can exploit this to bypass authorization checks on private updates. This vulnerability is fixed in 0.22.0.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
github.com/dunglas/mercureGo
< 0.22.00.22.0

Affected products

1

Patches

1
4964a69be904

Merge commit from fork

https://github.com/dunglas/mercureKévin DunglasApr 8, 2026via ghsa
8 files changed · +138 151
  • caddy/mercure.go+25 23 modified
    @@ -66,8 +66,12 @@ type JWTConfig struct {
     }
     
     type TopicSelectorCacheConfig struct {
    -	MaxEntriesPerShard int    `json:"max_entries_per_shard,omitempty"`
    -	ShardCount         uint64 `json:"shard_count,omitempty"`
    +	// Deprecated: use Size instead.
    +	MaxEntriesPerShard int `json:"max_entries_per_shard,omitempty"`
    +	// Deprecated: no longer used.
    +	ShardCount uint64 `json:"shard_count,omitempty"`
    +	// Size is the maximum number of entries in the cache.
    +	Size int `json:"size,omitempty"`
     }
     
     // Mercure implements a Mercure hub as a Caddy module. Mercure is a protocol allowing to push data updates to web browsers and other HTTP clients in a convenient, fast, reliable and battery-efficient way.
    @@ -193,25 +197,28 @@ func (m *Mercure) Provision(ctx caddy.Context) (err error) { //nolint:funlen,goc
     		return err
     	}
     
    -	maxEntriesPerShard := mercure.DefaultTopicSelectorStoreCacheMaxEntriesPerShard
    -	shardCount := mercure.DefaultTopicSelectorStoreCacheShardCount
    +	cacheSize := mercure.DefaultTopicSelectorStoreCacheSize
     
     	if m.TopicSelectorCache != nil {
    -		maxEntriesPerShard = m.TopicSelectorCache.MaxEntriesPerShard
    -		shardCount = m.TopicSelectorCache.ShardCount
    -	}
    +		switch {
    +		case m.TopicSelectorCache.Size > 0:
    +			cacheSize = m.TopicSelectorCache.Size
    +		case m.TopicSelectorCache.MaxEntriesPerShard > 0:
    +			// Backward compat: convert old per-shard config
    +			shardCount := m.TopicSelectorCache.ShardCount
    +			if shardCount == 0 {
    +				shardCount = 256
    +			}
     
    -	if shardCount == 0 {
    -		shardCount = mercure.DefaultTopicSelectorStoreCacheShardCount
    +			cacheSize = m.TopicSelectorCache.MaxEntriesPerShard * int(shardCount)
    +		case m.TopicSelectorCache.MaxEntriesPerShard < 0:
    +			cacheSize = 0
    +		}
     	}
     
    -	var tss *mercure.TopicSelectorStore
    -	if maxEntriesPerShard < 0 {
    -		tss = &mercure.TopicSelectorStore{}
    -	} else {
    -		if tss, err = mercure.NewTopicSelectorStoreCache(maxEntriesPerShard, shardCount); err != nil {
    -			return err
    -		}
    +	tss, err := mercure.NewTopicSelectorStore(cacheSize)
    +	if err != nil {
    +		return err
     	}
     
     	ctx = ctx.WithValue(SubscriptionsContextKey, m.Subscriptions)
    @@ -497,17 +504,12 @@ func (m *Mercure) UnmarshalCaddyfile(d *caddyfile.Dispenser) (err error) { //nol
     					return d.ArgErr()
     				}
     
    -				maxEntriesPerShard, err := strconv.Atoi(d.Val())
    -				if err != nil {
    -					return d.WrapErr(err)
    -				}
    -
    -				shardCount, err := strconv.ParseUint(d.Val(), 10, 64)
    +				size, err := strconv.Atoi(d.Val())
     				if err != nil {
     					return d.WrapErr(err)
     				}
     
    -				m.TopicSelectorCache = &TopicSelectorCacheConfig{maxEntriesPerShard, shardCount}
    +				m.TopicSelectorCache = &TopicSelectorCacheConfig{Size: size}
     			case "subscriber_list_cache_size":
     				if !d.NextArg() {
     					return d.ArgErr()
    
  • config_deprecated.go+3 3 modified
    @@ -104,7 +104,7 @@ func SetFlags(fs *pflag.FlagSet, v *viper.Viper) {
     	fs.BoolP("use-forwarded-headers", "f", false, "enable headers forwarding")
     	fs.BoolP("demo", "D", false, "enable the demo mode")
     	fs.BoolP("subscriptions", "s", false, "dispatch updates when subscriptions are created or terminated")
    -	fs.Int("tcsz", DefaultTopicSelectorStoreCacheMaxEntriesPerShard, "size of each shard in topic selector store cache")
    +	fs.Int("tcsz", DefaultTopicSelectorStoreCacheSize, "size of the topic selector store cache")
     
     	fs.Bool("metrics-enabled", false, "enable metrics")
     	fs.String("metrics-addr", "127.0.0.1:9764", "metrics HTTP server address")
    @@ -162,10 +162,10 @@ func NewHubFromViper(v *viper.Viper) (*Hub, error) { //nolint:funlen,gocognit
     
     	tcsz := v.GetInt("tcsz")
     	if tcsz == 0 {
    -		tcsz = DefaultTopicSelectorStoreCacheMaxEntriesPerShard
    +		tcsz = DefaultTopicSelectorStoreCacheSize
     	}
     
    -	tss, err = NewTopicSelectorStoreCache(tcsz, DefaultTopicSelectorStoreCacheShardCount)
    +	tss, err = NewTopicSelectorStore(tcsz)
     	if err != nil {
     		return nil, err
     	}
    
  • hub.go+1 1 modified
    @@ -333,7 +333,7 @@ func NewHub(ctx context.Context, options ...Option) (*Hub, error) {
     	}
     
     	if opt.topicSelectorStore == nil {
    -		tss, err := NewTopicSelectorStoreCache(DefaultTopicSelectorStoreCacheMaxEntriesPerShard, DefaultTopicSelectorStoreCacheShardCount)
    +		tss, err := NewTopicSelectorStore(DefaultTopicSelectorStoreCacheSize)
     		if err != nil {
     			return nil, err
     		}
    
  • hub_test.go+1 1 modified
    @@ -364,7 +364,7 @@ func TestWithSubscribeDisabled(t *testing.T) {
     func createDummy(tb testing.TB, options ...Option) *Hub {
     	tb.Helper()
     
    -	tss, err := NewTopicSelectorStoreCache(0, 0)
    +	tss, err := NewTopicSelectorStore(0)
     	require.NoError(tb, err)
     
     	options = append(
    
  • topicselectorcache.go+0 54 removed
    @@ -1,54 +0,0 @@
    -package mercure
    -
    -import (
    -	"github.com/cespare/xxhash/v2"
    -	"github.com/maypok86/otter/v2"
    -)
    -
    -// Let's say that a topic selector is 100 bytes on average, a cache with
    -// 10,000 entries per shard and 256 shards will use about 256 * 10,000 * 100 = 256MB of RAM.
    -//
    -// nolint:godox
    -// TODO: gather stats to find the best default values.
    -const (
    -	DefaultTopicSelectorStoreCacheMaxEntriesPerShard = 10_000
    -	DefaultTopicSelectorStoreCacheShardCount         = uint64(256)
    -)
    -
    -// NewTopicSelectorStoreCache creates a TopicSelectorStore with a cache.
    -func NewTopicSelectorStoreCache(maxEntriesPerShard int, shardCount uint64) (*TopicSelectorStore, error) {
    -	if maxEntriesPerShard == 0 {
    -		return &TopicSelectorStore{}, nil
    -	}
    -
    -	if shardCount == 0 {
    -		shardCount = DefaultTopicSelectorStoreCacheShardCount
    -	}
    -
    -	cacheMap := make(shardedCache, shardCount)
    -	for i := range shardCount {
    -		cacheMap[i] = otter.Must(&otter.Options[string, any]{MaximumSize: maxEntriesPerShard})
    -	}
    -
    -	return &TopicSelectorStore{cache: &cacheMap, skipSelect: true}, nil
    -}
    -
    -type shardedCache map[uint64]*otter.Cache[string, any]
    -
    -func (c *shardedCache) Get(k string) (any, bool) {
    -	return c.getShard(k).GetIfPresent(k)
    -}
    -
    -func (c *shardedCache) Set(k string, v any, _ int64) bool {
    -	c.getShard(k).Set(k, v)
    -
    -	return true
    -}
    -
    -func (c *shardedCache) getShard(k string) *otter.Cache[string, any] {
    -	h := xxhash.New()
    -	_, _ = h.Write([]byte(k))
    -	s := h.Sum64()
    -
    -	return (*c)[s%uint64(len(*c))]
    -}
    
  • topicselectorcache_test.go+0 40 removed
    @@ -1,40 +0,0 @@
    -package mercure
    -
    -import (
    -	"testing"
    -
    -	"github.com/stretchr/testify/assert"
    -	"github.com/stretchr/testify/require"
    -)
    -
    -func TestMatchCache(t *testing.T) {
    -	t.Parallel()
    -
    -	tss, err := NewTopicSelectorStoreCache(DefaultTopicSelectorStoreCacheMaxEntriesPerShard, DefaultTopicSelectorStoreCacheMaxEntriesPerShard)
    -	require.NoError(t, err)
    -
    -	assert.False(t, tss.match("foo", "bar"))
    -
    -	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/{foo}/bar"))
    -
    -	_, found := tss.cache.Get("t_https://example.com/{foo}/bar")
    -	assert.True(t, found)
    -
    -	_, found = tss.cache.Get("m_https://example.com/{foo}/bar_https://example.com/foo/bar")
    -	assert.True(t, found)
    -
    -	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/{foo}/bar"))
    -	assert.False(t, tss.match("https://example.com/foo/bar/baz", "https://example.com/{foo}/bar"))
    -
    -	_, found = tss.cache.Get("t_https://example.com/{foo}/bar")
    -	assert.True(t, found)
    -
    -	_, found = tss.cache.Get("m_https://example.com/{foo}/bar_https://example.com/foo/bar")
    -	assert.True(t, found)
    -
    -	assert.True(t, tss.match("https://example.com/kevin/dunglas", "https://example.com/{fistname}/{lastname}"))
    -	assert.True(t, tss.match("https://example.com/foo/bar", "*"))
    -	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/foo/bar"))
    -	assert.True(t, tss.match("foo", "foo"))
    -	assert.False(t, tss.match("foo", "bar"))
    -}
    
  • topicselector.go+44 29 modified
    @@ -4,18 +4,46 @@ import (
     	"regexp"
     	"strings"
     
    +	"github.com/maypok86/otter/v2"
     	"github.com/yosida95/uritemplate/v3"
     )
     
    -type TopicSelectorStoreCache interface {
    -	Get(key string) (any, bool)
    -	Set(key string, value any, n int64) bool
    +// DefaultTopicSelectorStoreCacheSize is the default maximum number of entries in the cache.
    +const DefaultTopicSelectorStoreCacheSize = 2_560_000
    +
    +type matchCacheKey struct {
    +	topicSelector string
    +	topic         string
     }
     
     // TopicSelectorStore caches compiled templates to improve memory and CPU usage.
     type TopicSelectorStore struct {
    -	cache      TopicSelectorStoreCache
    -	skipSelect bool
    +	matchCache    *otter.Cache[matchCacheKey, bool]
    +	templateCache *otter.Cache[string, *regexp.Regexp]
    +}
    +
    +// NewTopicSelectorStore creates a TopicSelectorStore.
    +// If cacheSize > 0, match results and compiled templates are cached.
    +func NewTopicSelectorStore(cacheSize int) (*TopicSelectorStore, error) {
    +	if cacheSize <= 0 {
    +		return &TopicSelectorStore{}, nil
    +	}
    +
    +	matchCache, err := otter.New[matchCacheKey, bool](&otter.Options[matchCacheKey, bool]{
    +		MaximumSize: cacheSize,
    +	})
    +	if err != nil {
    +		return nil, err //nolint:wrapcheck
    +	}
    +
    +	templateCache, err := otter.New[string, *regexp.Regexp](&otter.Options[string, *regexp.Regexp]{
    +		MaximumSize: cacheSize / 10, // Templates are fewer but larger
    +	})
    +	if err != nil {
    +		return nil, err //nolint:wrapcheck
    +	}
    +
    +	return &TopicSelectorStore{matchCache: matchCache, templateCache: templateCache}, nil
     }
     
     func (tss *TopicSelectorStore) match(topic, topicSelector string) bool {
    @@ -25,20 +53,11 @@ func (tss *TopicSelectorStore) match(topic, topicSelector string) bool {
     		return true
     	}
     
    -	var k string
    +	k := matchCacheKey{topicSelector: topicSelector, topic: topic}
     
    -	if tss.cache != nil {
    -		var b strings.Builder
    -		b.Grow(3 + len(topicSelector) + len(topic))
    -		b.WriteString("m_")
    -		b.WriteString(topicSelector)
    -		b.WriteByte('_')
    -		b.WriteString(topic)
    -		k = b.String()
    -
    -		value, found := tss.cache.Get(k)
    -		if found {
    -			return value.(bool)
    +	if tss.matchCache != nil {
    +		if value, found := tss.matchCache.GetIfPresent(k); found {
    +			return value
     		}
     	}
     
    @@ -50,8 +69,8 @@ func (tss *TopicSelectorStore) match(topic, topicSelector string) bool {
     	// Use template.Regexp() instead of template.Match() for performance
     	// See https://github.com/yosida95/uritemplate/pull/7
     	match := r.MatchString(topic)
    -	if tss.cache != nil {
    -		tss.cache.Set(k, match, 4)
    +	if tss.matchCache != nil {
    +		tss.matchCache.Set(k, match)
     	}
     
     	return match
    @@ -64,21 +83,17 @@ func (tss *TopicSelectorStore) getRegexp(topicSelector string) *regexp.Regexp {
     		return nil
     	}
     
    -	var k string
    -	if tss.cache != nil {
    -		k = "t_" + topicSelector
    -
    -		value, found := tss.cache.Get(k)
    -		if found {
    -			return value.(*regexp.Regexp)
    +	if tss.templateCache != nil {
    +		if r, found := tss.templateCache.GetIfPresent(topicSelector); found {
    +			return r
     		}
     	}
     
     	// If an error occurs, it's a raw string
     	if tpl, err := uritemplate.New(topicSelector); err == nil {
     		r := tpl.Regexp()
    -		if tss.cache != nil {
    -			tss.cache.Set(k, r, 19)
    +		if tss.templateCache != nil {
    +			tss.templateCache.Set(topicSelector, r)
     		}
     
     		return r
    
  • topicselector_test.go+64 0 added
    @@ -0,0 +1,64 @@
    +package mercure
    +
    +import (
    +	"testing"
    +
    +	"github.com/stretchr/testify/assert"
    +	"github.com/stretchr/testify/require"
    +)
    +
    +func TestMatchCache(t *testing.T) {
    +	t.Parallel()
    +
    +	tss, err := NewTopicSelectorStore(DefaultTopicSelectorStoreCacheSize)
    +	require.NoError(t, err)
    +
    +	assert.False(t, tss.match("foo", "bar"))
    +
    +	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/{foo}/bar"))
    +
    +	// Template should be cached
    +	_, found := tss.templateCache.GetIfPresent("https://example.com/{foo}/bar")
    +	assert.True(t, found)
    +
    +	// Match result should be cached (struct key, no collision possible)
    +	_, found = tss.matchCache.GetIfPresent(matchCacheKey{
    +		topicSelector: "https://example.com/{foo}/bar",
    +		topic:         "https://example.com/foo/bar",
    +	})
    +	assert.True(t, found)
    +
    +	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/{foo}/bar"))
    +	assert.False(t, tss.match("https://example.com/foo/bar/baz", "https://example.com/{foo}/bar"))
    +
    +	assert.True(t, tss.match("https://example.com/kevin/dunglas", "https://example.com/{fistname}/{lastname}"))
    +	assert.True(t, tss.match("https://example.com/foo/bar", "*"))
    +	assert.True(t, tss.match("https://example.com/foo/bar", "https://example.com/foo/bar"))
    +	assert.True(t, tss.match("foo", "foo"))
    +	assert.False(t, tss.match("foo", "bar"))
    +}
    +
    +func TestMatchCacheKeyNoCollision(t *testing.T) {
    +	t.Parallel()
    +
    +	tss, err := NewTopicSelectorStore(DefaultTopicSelectorStoreCacheSize)
    +	require.NoError(t, err)
    +
    +	// With struct keys, these are naturally distinct — no encoding tricks needed.
    +	// Use URI templates to ensure results get cached.
    +	assert.True(t, tss.match("https://example.com/a", "https://example.com/{x}"))
    +	assert.False(t, tss.match("https://other.com/a", "https://example.com/{x}"))
    +
    +	// Verify independent cache entries exist
    +	_, found := tss.matchCache.GetIfPresent(matchCacheKey{
    +		topicSelector: "https://example.com/{x}",
    +		topic:         "https://example.com/a",
    +	})
    +	assert.True(t, found)
    +
    +	_, found = tss.matchCache.GetIfPresent(matchCacheKey{
    +		topicSelector: "https://example.com/{x}",
    +		topic:         "https://other.com/a",
    +	})
    +	assert.True(t, found)
    +}
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

5

News mentions

0

No linked articles in our index yet.