VYPR
High severityNVD Advisory· Published Oct 23, 2025· Updated Oct 23, 2025

Vault Vulnerable to Denial of Service Due to Rate Limit Regression

CVE-2025-12044

Description

Vault and Vault Enterprise (“Vault”) are vulnerable to an unauthenticated denial of service when processing JSON payloads. This occurs due to a regression from a previous fix for [+HCSEC-2025-24+|https://discuss.hashicorp.com/t/hcsec-2025-24-vault-denial-of-service-though-complex-json-payloads/76393]  which allowed for processing JSON payloads before applying rate limits. This vulnerability, CVE-2025-12044, is fixed in Vault Community Edition 1.21.0 and Vault Enterprise 1.16.27, 1.19.11, 1.20.5, and 1.21.0.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
github.com/hashicorp/vaultGo
>= 1.20.3, < 1.21.01.21.0

Affected products

2
  • Range: 1.20.3
  • HashiCorp/Vault Enterprisev5
    Range: 1.20.3

Patches

2
b19e74c29a33

JSON limits covering new use cases and MaxTokens (#9406) (#9596)

https://github.com/hashicorp/vaultVault AutomationSep 24, 2025via ghsa
7 files changed · +925 98
  • http/handler.go+10 0 modified
    @@ -93,6 +93,7 @@ const (
     	// recover from snapshot operation. This replaces the use of query parameters
     	// to pass the snapshot ID
     	VaultSnapshotRecoverHeader = "X-Vault-Recover-Snapshot-Id"
    +
     	// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
     	// This limit is designed to prevent stack exhaustion attacks from deeply
     	// nested JSON payloads, which could otherwise lead to a denial-of-service
    @@ -129,6 +130,15 @@ const (
     	// systems that require handling larger datasets, though pagination is the
     	// recommended practice for such cases.
     	CustomMaxJSONArrayElementCount = 10000
    +
    +	// CustomMaxJSONToken sets the maximum total number of tokens (e.g., keys, values,
    +	// braces, brackets) permitted in a single JSON payload. This limit is a crucial
    +	// defense against complexity-based denial-of-service (DoS) attacks, where a
    +	// payload could exhaust CPU and memory with an enormous number of small elements,
    +	// even while respecting all other individual limits. The default of 500,000
    +	// tokens provides a robust safeguard against malicious inputs without interfering
    +	// with legitimate, large-scale API operations. This value is configurable.
    +	CustomMaxJSONToken = 500000
     )
     
     var (
    
  • http/logical_test.go+19 6 modified
    @@ -7,6 +7,7 @@ import (
     	"bytes"
     	"context"
     	"encoding/json"
    +	"fmt"
     	"io"
     	"io/ioutil"
     	"net/http"
    @@ -283,12 +284,24 @@ func TestLogical_RequestSizeLimit(t *testing.T) {
     	defer ln.Close()
     	TestServerAuth(t, addr, token)
     
    -	// Write a very large object, should fail. This test works because Go will
    -	// convert the byte slice to base64, which makes it significantly larger
    -	// than the default max request size.
    -	resp := testHttpPut(t, token, addr+"/v1/secret/foo", map[string]interface{}{
    -		"data": make([]byte, DefaultMaxRequestSize),
    -	})
    +	// To test the server's max request size limit (which returns 413),
    +	// we must create a payload that is larger than the limit in total, but
    +	// does not violate any of the JSON parser's individual limits (like max
    +	// string length), which would return a 500 error first.
    +	//
    +	// We do this by creating many key-value pairs, where each value is a
    +	// moderately sized string.
    +	const valueSize = 4096 // 4KB, well under the 1MB string limit
    +	numEntries := (DefaultMaxRequestSize / valueSize) + 1
    +	valueString := strings.Repeat("a", valueSize)
    +
    +	payload := make(map[string]interface{}, numEntries)
    +	for i := 0; i < numEntries; i++ {
    +		key := fmt.Sprintf("key_%d", i)
    +		payload[key] = valueString
    +	}
    +	resp := testHttpPut(t, token, addr+"/v1/secret/foo", payload)
    +
     	testResponseStatus(t, resp, http.StatusRequestEntityTooLarge)
     }
     
    
  • http/util.go+6 1 modified
    @@ -25,14 +25,15 @@ var nonVotersAllowed = false
     
     func wrapMaxRequestSizeHandler(handler http.Handler, props *vault.HandlerProperties) http.Handler {
     	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
    -		var maxRequestSize, maxJSONDepth, maxStringValueLength, maxObjectEntryCount, maxArrayElementCount int64
    +		var maxRequestSize, maxJSONDepth, maxStringValueLength, maxObjectEntryCount, maxArrayElementCount, maxToken int64
     
     		if props.ListenerConfig != nil {
     			maxRequestSize = props.ListenerConfig.MaxRequestSize
     			maxJSONDepth = props.ListenerConfig.CustomMaxJSONDepth
     			maxStringValueLength = props.ListenerConfig.CustomMaxJSONStringValueLength
     			maxObjectEntryCount = props.ListenerConfig.CustomMaxJSONObjectEntryCount
     			maxArrayElementCount = props.ListenerConfig.CustomMaxJSONArrayElementCount
    +			maxToken = props.ListenerConfig.CustomMaxJSONToken
     		}
     
     		if maxRequestSize == 0 {
    @@ -50,12 +51,16 @@ func wrapMaxRequestSizeHandler(handler http.Handler, props *vault.HandlerPropert
     		if maxArrayElementCount == 0 {
     			maxArrayElementCount = CustomMaxJSONArrayElementCount
     		}
    +		if maxToken == 0 {
    +			maxToken = CustomMaxJSONToken
    +		}
     
     		jsonLimits := jsonutil.JSONLimits{
     			MaxDepth:             int(maxJSONDepth),
     			MaxStringValueLength: int(maxStringValueLength),
     			MaxObjectEntryCount:  int(maxObjectEntryCount),
     			MaxArrayElementCount: int(maxArrayElementCount),
    +			MaxTokens:            int(maxToken),
     		}
     
     		// If the payload is JSON, the VerifyMaxDepthStreaming function will perform validations.
    
  • internalshared/configutil/listener.go+11 0 modified
    @@ -167,6 +167,10 @@ type Listener struct {
     	// CustomMaxJSONArrayElementCount determines the maximum number of elements in a JSON array.
     	CustomMaxJSONArrayElementCountRaw interface{} `hcl:"max_json_array_element_count"`
     	CustomMaxJSONArrayElementCount    int64       `hcl:"-"`
    +
    +	// CustomMaxJSONToken determines the maximum number of tokens in a JSON.
    +	CustomMaxJSONTokenRaw interface{} `hcl:"max_json_token"`
    +	CustomMaxJSONToken    int64       `hcl:"-"`
     }
     
     // AgentAPI allows users to select which parts of the Agent API they want enabled.
    @@ -762,5 +766,12 @@ func (l *Listener) parseJSONLimitsSettings() error {
     		return fmt.Errorf("max_json_array_element_count cannot be negative")
     	}
     
    +	if err := parseAndClearInt(&l.CustomMaxJSONTokenRaw, &l.CustomMaxJSONToken); err != nil {
    +		return fmt.Errorf("error parsing max_json_token: %w", err)
    +	}
    +	if l.CustomMaxJSONToken < 0 {
    +		return fmt.Errorf("max_json_token cannot be negative")
    +	}
    +
     	return nil
     }
    
  • internalshared/configutil/listener_test.go+20 0 modified
    @@ -230,6 +230,8 @@ func TestListener_parseRequestSettings(t *testing.T) {
     		expectedCustomMaxJSONObjectEntryCount  int64
     		rawCustomMaxJSONArrayElementCount      any
     		expectedCustomMaxJSONArrayElementCount int64
    +		rawCustomMaxJSONToken                  any
    +		expectedCustomMaxJSONToken             int64
     		isErrorExpected                        bool
     		errorMessage                           string
     	}{
    @@ -306,6 +308,21 @@ func TestListener_parseRequestSettings(t *testing.T) {
     			expectedCustomMaxJSONArrayElementCount: 500,
     			isErrorExpected:                        false,
     		},
    +		"max-json-token-bad": {
    +			rawCustomMaxJSONToken: "badvalue",
    +			isErrorExpected:       true,
    +			errorMessage:          "error parsing max_json_token",
    +		},
    +		"max-json-token-negative": {
    +			rawCustomMaxJSONToken: "-1",
    +			isErrorExpected:       true,
    +			errorMessage:          "max_json_token cannot be negative",
    +		},
    +		"max-json-token-good": {
    +			rawCustomMaxJSONToken:      "500000",
    +			expectedCustomMaxJSONToken: 500000,
    +			isErrorExpected:            false,
    +		},
     	}
     
     	for name, tc := range tests {
    @@ -323,6 +340,7 @@ func TestListener_parseRequestSettings(t *testing.T) {
     				CustomMaxJSONStringValueLengthRaw: tc.rawCustomMaxJSONStringValueLength,
     				CustomMaxJSONObjectEntryCountRaw:  tc.rawCustomMaxJSONObjectEntryCount,
     				CustomMaxJSONArrayElementCountRaw: tc.rawCustomMaxJSONArrayElementCount,
    +				CustomMaxJSONTokenRaw:             tc.rawCustomMaxJSONToken,
     			}
     
     			err := l.parseRequestSettings()
    @@ -338,6 +356,7 @@ func TestListener_parseRequestSettings(t *testing.T) {
     				require.Equal(t, tc.expectedCustomMaxJSONStringValueLength, l.CustomMaxJSONStringValueLength)
     				require.Equal(t, tc.expectedCustomMaxJSONObjectEntryCount, l.CustomMaxJSONObjectEntryCount)
     				require.Equal(t, tc.expectedCustomMaxJSONArrayElementCount, l.CustomMaxJSONArrayElementCount)
    +				require.Equal(t, tc.expectedCustomMaxJSONToken, l.CustomMaxJSONToken)
     				require.Equal(t, tc.expectedRequireRequestHeader, l.RequireRequestHeader)
     				require.Equal(t, tc.expectedDisableRequestLimiter, l.DisableRequestLimiter)
     				require.Equal(t, tc.expectedDuration, l.MaxRequestDuration)
    @@ -347,6 +366,7 @@ func TestListener_parseRequestSettings(t *testing.T) {
     				require.Nil(t, l.CustomMaxJSONStringValueLengthRaw)
     				require.Nil(t, l.CustomMaxJSONObjectEntryCountRaw)
     				require.Nil(t, l.CustomMaxJSONArrayElementCountRaw)
    +				require.Nil(t, l.CustomMaxJSONTokenRaw)
     				require.Nil(t, l.MaxRequestDurationRaw)
     				require.Nil(t, l.RequireRequestHeaderRaw)
     				require.Nil(t, l.DisableRequestLimiterRaw)
    
  • sdk/helper/jsonutil/json.go+399 89 modified
    @@ -10,6 +10,9 @@ import (
     	"encoding/json"
     	"fmt"
     	"io"
    +	"strconv"
    +	"strings"
    +	"unicode"
     
     	"github.com/hashicorp/errwrap"
     	"github.com/hashicorp/vault/sdk/helper/compressutil"
    @@ -105,8 +108,18 @@ func DecodeJSONFromReader(r io.Reader, out interface{}) error {
     
     // containerState holds information about an open JSON container (object or array).
     type containerState struct {
    -	Type  json.Delim // '{' or '['
    -	Count int        // Number of entries (for objects) or elements for arrays)
    +	// '{' or '['
    +	Type json.Delim
    +
    +	// Number of entries (for objects) or elements for arrays)
    +	Count int
    +
    +	// isKey is true if the next expected token in an object is a key.
    +	isKey bool
    +
    +	// keys tracks the keys seen in an object to detect duplicates.
    +	// It is only initialized for objects ('{').
    +	keys map[string]struct{}
     }
     
     // JSONLimits defines the configurable limits for JSON validation.
    @@ -115,127 +128,250 @@ type JSONLimits struct {
     	MaxStringValueLength int
     	MaxObjectEntryCount  int
     	MaxArrayElementCount int
    +	MaxTokens            int
     }
     
     // isWhitespace checks if a byte is a JSON whitespace character.
     func isWhitespace(b byte) bool {
    -	return b == ' ' || b == '\t' || b == '\n' || b == '\r'
    +	// Standard JSON whitespace characters (RFC 8259)
    +	if b == ' ' || b == '\t' || b == '\n' || b == '\r' {
    +		return true
    +	}
    +	// Custom support for non-standard Unit Separator character (ASCII 31)
    +	if b == 31 || b == 139 {
    +		return true
    +	}
    +	return false
     }
     
    -// VerifyMaxDepthStreaming scans the JSON stream to determine its maximum nesting depth
    -// and enforce various limits. It first checks if the stream is likely JSON before proceeding.
    +// VerifyMaxDepthStreaming scans the JSON stream to enforce nesting depth, counts,
    +// and other limits without decoding the full structure into memory.
     func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, error) {
     	// Use a buffered reader to peek at the stream without consuming it from the original reader.
     	bufReader := bufio.NewReader(jsonReader)
     
    -	// Find the first non-whitespace character.
    -	var firstByte byte
    -	var err error
    -	for {
    -		firstByte, err = bufReader.ReadByte()
    -		if err != nil {
    -			// If we hit EOF before finding a real character, it's an empty or whitespace-only payload.
    -			if err == io.EOF {
    -				return 0, nil
    -			}
    -			return 0, err // A different I/O error occurred.
    -		}
    -		if !isWhitespace(firstByte) {
    -			break // Found the first significant character.
    -		}
    +	bom, err := bufReader.Peek(3)
    +	if err == nil && bytes.Equal(bom, []byte{0xEF, 0xBB, 0xBF}) {
    +		_, _ = bufReader.Discard(3)
     	}
     
    -	// If the payload doesn't start with '{' or '[', assume it's not a JSON object or array
    -	// and that our limits do not apply.
    -	if firstByte != '{' && firstByte != '[' {
    -		return 0, nil
    -	}
    -
    -	fullStreamReader := io.MultiReader(bytes.NewReader([]byte{firstByte}), bufReader)
    -	decoder := json.NewDecoder(fullStreamReader)
    -	decoder.UseNumber()
    -
    +	// We use a manual token loop instead of json.Decoder to gain low-level
    +	// control over the stream. This is necessary to fix a vulnerability where
    +	// the raw byte length of strings with escape sequences was not correctly limited.
     	var (
    -		maxDepth      = 0
    -		currentDepth  = 0
    -		isKeyExpected bool
    +		maxDepth          int
    +		currentDepth      int
    +		tokenCount        int
    +		lastTokenWasComma bool
     	)
     	containerInfoStack := make([]containerState, 0, limits.MaxDepth)
     
    -	for {
    -		t, err := decoder.Token()
    +	// Prime the loop by finding the first non-whitespace character.
    +	if err := skipWhitespace(bufReader); err != nil {
     		if err == io.EOF {
    -			break
    +			// An empty payload or one with only whitespace is valid. Skip verification.
    +			return 0, nil
     		}
    +		return 0, err
    +	}
    +
    +	b, err := bufReader.Peek(1)
    +	if err != nil {
    +		// This can happen if there's an I/O error after skipping whitespace.
    +		return 0, err
    +	}
    +
    +	// If the payload doesn't start with a JSON container ('{' or '['), skip
    +	// verification. The limits are intended for structured data, not primitives
    +	// or other formats.
    +	if b[0] != '{' && b[0] != '[' {
    +		return 0, nil
    +	}
    +
    +	for {
    +		// Check for EOF before peeking.
    +		b, err := bufReader.Peek(1)
    +		// Any error from the decoder is now considered a real error.
     		if err != nil {
    -			// Any error from the decoder is now considered a real error.
    +			if err == io.EOF {
    +				break
    +			}
     			return 0, fmt.Errorf("error reading JSON token: %w", err)
     		}
     
    -		switch v := t.(type) {
    -		case json.Delim:
    -			switch v {
    -			case '{', '[':
    -				currentDepth++
    -				// Check against the limit directly.
    -				if currentDepth > limits.MaxDepth {
    -					return 0, fmt.Errorf("JSON input exceeds allowed nesting depth")
    +		// If the last token was a comma, the next token cannot be a closing delimiter.
    +		if lastTokenWasComma && (b[0] == '}' || b[0] == ']') {
    +			if b[0] == '}' {
    +				return 0, fmt.Errorf("invalid character '}' after object key-value pair")
    +			}
    +			return 0, fmt.Errorf("invalid character ']' after array element")
    +		}
    +
    +		// After a top-level value, any other character is an error.
    +		if len(containerInfoStack) == 0 && maxDepth > 0 {
    +			return 0, fmt.Errorf("invalid character '%c' after top-level value", b[0])
    +		}
    +
    +		// Increment and check the total token count limit.
    +		if limits.MaxTokens > 0 {
    +			tokenCount++
    +			if tokenCount > limits.MaxTokens {
    +				return 0, fmt.Errorf("JSON payload exceeds allowed token count")
    +			}
    +		}
    +
    +		var currentContainer *containerState
    +		if len(containerInfoStack) > 0 {
    +			currentContainer = &containerInfoStack[len(containerInfoStack)-1]
    +		}
    +
    +		// Before processing the token, reset the comma flag. It will be set
    +		// again below if the current token is a comma.
    +		lastTokenWasComma = false
    +
    +		switch b[0] {
    +		case '{', '[':
    +			delim, _ := bufReader.ReadByte()
    +			if currentContainer != nil {
    +				if currentContainer.Type == '[' {
    +					currentContainer.Count++
    +					if currentContainer.Count > limits.MaxArrayElementCount {
    +						return 0, fmt.Errorf("JSON array exceeds allowed element count")
    +					}
    +				} else {
    +					currentContainer.isKey = true
     				}
    -				if currentDepth > maxDepth {
    -					maxDepth = currentDepth
    +			}
    +
    +			// Handle depth checks and tracking together for clarity.
    +			currentDepth++
    +			if currentDepth > maxDepth {
    +				maxDepth = currentDepth
    +			}
    +			// Check depth limit immediately after incrementing.
    +			if limits.MaxDepth > 0 && currentDepth > limits.MaxDepth {
    +				return 0, fmt.Errorf("JSON input exceeds allowed nesting depth")
    +			}
    +
    +			// For objects, initialize a map to track keys and prevent duplicates.
    +			var keys map[string]struct{}
    +			if delim == '{' {
    +				keys = make(map[string]struct{})
    +			}
    +
    +			containerInfoStack = append(containerInfoStack, containerState{Type: json.Delim(delim), isKey: delim == '{', keys: keys})
    +
    +		case '}', ']':
    +			// A closing brace cannot follow a colon without a value.
    +			if currentContainer != nil && currentContainer.Type == '{' && !currentContainer.isKey {
    +				return 0, fmt.Errorf("invalid character '}' after object key")
    +			}
    +			delim, _ := bufReader.ReadByte()
    +			if currentContainer == nil {
    +				return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", delim)
    +			}
    +			if (delim == '}' && currentContainer.Type != '{') || (delim == ']' && currentContainer.Type != '[') {
    +				return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c'", delim)
    +			}
    +			containerInfoStack = containerInfoStack[:len(containerInfoStack)-1]
    +			currentDepth--
    +			if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' {
    +				containerInfoStack[len(containerInfoStack)-1].isKey = true
    +			}
    +
    +		case '"':
    +			// Manually scan the string to count its raw byte length and get the value.
    +			val, err := scanString(bufReader, limits.MaxStringValueLength)
    +			if err != nil {
    +				return 0, err
    +			}
    +
    +			if currentContainer == nil {
    +				if maxDepth == 0 {
    +					maxDepth = 1
     				}
    +				break
    +			}
    +
    +			if currentContainer.Type == '{' {
    +				if currentContainer.isKey {
    +					// Check for duplicate keys.
    +					if _, ok := currentContainer.keys[val]; ok {
    +						return 0, fmt.Errorf("duplicate key '%s' in object", val)
    +					}
    +					currentContainer.keys[val] = struct{}{}
     
    -				containerInfoStack = append(containerInfoStack, containerState{Type: v, Count: 0})
    -				if v == '{' {
    -					isKeyExpected = true
    +					currentContainer.Count++
    +					if currentContainer.Count > limits.MaxObjectEntryCount {
    +						return 0, fmt.Errorf("JSON object exceeds allowed entry count")
    +					}
    +					currentContainer.isKey = false
    +				} else {
    +					currentContainer.isKey = true
     				}
    -			case '}', ']':
    -				if len(containerInfoStack) == 0 {
    -					return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", v)
    +			} else {
    +				currentContainer.Count++
    +				if currentContainer.Count > limits.MaxArrayElementCount {
    +					return 0, fmt.Errorf("JSON array exceeds allowed element count")
     				}
    -				top := containerInfoStack[len(containerInfoStack)-1]
    -				containerInfoStack = containerInfoStack[:len(containerInfoStack)-1]
    -				currentDepth--
    -				if (v == '}' && top.Type != '{') || (v == ']' && top.Type != '[') {
    -					return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c' for opening '%c'", v, top.Type)
    +			}
    +
    +		case 't', 'f', 'n': // true, false, null
    +			if err := scanLiteral(bufReader); err != nil {
    +				return 0, err
    +			}
    +			if currentContainer == nil {
    +				if maxDepth == 0 {
    +					maxDepth = 1
     				}
    -				if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' {
    -					isKeyExpected = false
    +				break
    +			}
    +			if currentContainer.Type == '[' {
    +				currentContainer.Count++
    +				if currentContainer.Count > limits.MaxArrayElementCount {
    +					return 0, fmt.Errorf("JSON array exceeds allowed element count")
     				}
    +			} else {
    +				currentContainer.isKey = true
     			}
    -		case string:
    -			if len(v) > limits.MaxStringValueLength {
    -				return 0, fmt.Errorf("JSON string value exceeds allowed length")
    -			}
    -			if len(containerInfoStack) > 0 {
    -				top := &containerInfoStack[len(containerInfoStack)-1]
    -				if top.Type == '{' {
    -					if isKeyExpected {
    -						top.Count++
    -						if top.Count > limits.MaxObjectEntryCount {
    -							return 0, fmt.Errorf("JSON object exceeds allowed entry count")
    -						}
    -						isKeyExpected = false
    -					}
    -				} else if top.Type == '[' {
    -					top.Count++
    -					if top.Count > limits.MaxArrayElementCount {
    -						return 0, fmt.Errorf("JSON array exceeds allowed element count")
    -					}
    +		case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
    +			if err := scanNumber(bufReader, limits.MaxStringValueLength); err != nil {
    +				return 0, err
    +			}
    +			if currentContainer == nil {
    +				if maxDepth == 0 {
    +					maxDepth = 1
     				}
    +				break
     			}
    -		default: // Handles numbers, booleans, and nulls
    -			if len(containerInfoStack) > 0 {
    -				top := &containerInfoStack[len(containerInfoStack)-1]
    -				if top.Type == '[' {
    -					top.Count++
    -					if top.Count > limits.MaxArrayElementCount {
    -						return 0, fmt.Errorf("JSON array exceeds allowed element count")
    -					}
    -				} else if top.Type == '{' {
    -					isKeyExpected = true
    +			if currentContainer.Type == '[' {
    +				currentContainer.Count++
    +				if currentContainer.Count > limits.MaxArrayElementCount {
    +					return 0, fmt.Errorf("JSON array exceeds allowed element count")
     				}
    +			} else {
    +				currentContainer.isKey = true
    +			}
    +
    +		case ',':
    +			_, _ = bufReader.ReadByte()
    +			lastTokenWasComma = true
    +			if currentContainer != nil && currentContainer.Type == '{' {
    +				currentContainer.isKey = true
    +			}
    +
    +		case ':':
    +			_, _ = bufReader.ReadByte()
    +
    +		default:
    +			return 0, fmt.Errorf("invalid character '%c' looking for beginning of value", b[0])
    +		}
    +
    +		if err := skipWhitespace(bufReader); err != nil {
    +			if err == io.EOF {
    +				break
     			}
    +			return 0, err
     		}
     	}
     
    @@ -245,3 +381,177 @@ func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, erro
     
     	return maxDepth, nil
     }
    +
    +func skipWhitespace(r *bufio.Reader) error {
    +	for {
    +		b, err := r.Peek(1)
    +		if err != nil {
    +			return err
    +		}
    +		if !isWhitespace(b[0]) {
    +			return nil
    +		}
    +		_, _ = r.ReadByte()
    +	}
    +}
    +
    +// scanString consumes a JSON string from the reader, ensuring the raw byte
    +// length of its content does not exceed the limit. It returns the unescaped
    +// string value.
    +func scanString(r *bufio.Reader, limit int) (string, error) {
    +	if b, _ := r.ReadByte(); b != '"' {
    +		return "", fmt.Errorf("expected string")
    +	}
    +
    +	var builder strings.Builder
    +	contentByteCount := 0
    +	var lastRune rune = -1 // Track the last rune for surrogate pair validation.
    +
    +	for {
    +		b, err := r.ReadByte()
    +		if err != nil {
    +			if err == io.EOF {
    +				return "", fmt.Errorf("malformed JSON, unclosed string")
    +			}
    +			return "", err
    +		}
    +
    +		if b == '"' {
    +			// Before successfully returning, ensure we didn't end on an unpaired high surrogate.
    +			if lastRune >= 0xD800 && lastRune <= 0xDBFF {
    +				return "", fmt.Errorf("malformed JSON, unterminated surrogate pair in string")
    +			}
    +			return builder.String(), nil
    +		}
    +
    +		contentByteCount++
    +		lastRune = -1 // Reset unless we parse a new rune.
    +
    +		if b == '\\' {
    +			escaped, err := r.ReadByte()
    +			if err != nil {
    +				return "", fmt.Errorf("malformed JSON, unterminated escape sequence in string")
    +			}
    +			contentByteCount++
    +
    +			switch escaped {
    +			case '"', '\\', '/':
    +				builder.WriteByte(escaped)
    +			case 'b':
    +				builder.WriteByte('\b')
    +			case 'f':
    +				builder.WriteByte('\f')
    +			case 'n':
    +				builder.WriteByte('\n')
    +			case 'r':
    +				builder.WriteByte('\r')
    +			case 't':
    +				builder.WriteByte('\t')
    +			case 'u':
    +				hexChars := make([]byte, 4)
    +				if _, err := io.ReadFull(r, hexChars); err != nil {
    +					return "", fmt.Errorf("malformed JSON, unterminated unicode escape in string")
    +				}
    +				contentByteCount += 4
    +
    +				hexStr := string(hexChars)
    +				for _, char := range hexStr {
    +					if !((char >= '0' && char <= '9') || (char >= 'a' && char <= 'f') || (char >= 'A' && char <= 'F')) {
    +						return "", fmt.Errorf("invalid character '%c' in string escape code", char)
    +					}
    +				}
    +
    +				code, _ := strconv.ParseUint(hexStr, 16, 32)
    +				if code > unicode.MaxRune {
    +					return "", fmt.Errorf("invalid unicode escape sequence: value out of range")
    +				}
    +
    +				r := rune(code)
    +				builder.WriteRune(r)
    +				lastRune = r
    +			default:
    +				return "", fmt.Errorf("invalid character '%c' in string escape code", escaped)
    +			}
    +		} else {
    +			builder.WriteByte(b)
    +		}
    +
    +		if limit > 0 && contentByteCount > limit {
    +			return "", fmt.Errorf("JSON string value exceeds allowed length")
    +		}
    +	}
    +}
    +
    +func scanLiteral(r *bufio.Reader) error {
    +	for {
    +		b, err := r.Peek(1)
    +		if err != nil {
    +			// If we hit EOF after reading part of a literal, that's a clean end.
    +			if err == io.EOF {
    +				break
    +			}
    +			return err
    +		}
    +		if isWhitespace(b[0]) || b[0] == ',' || b[0] == '}' || b[0] == ']' {
    +			return nil
    +		}
    +		_, _ = r.ReadByte()
    +	}
    +	return nil
    +}
    +
    +func scanNumber(r *bufio.Reader, limit int) error {
    +	var builder strings.Builder
    +	byteCount := 0
    +
    +	// Peek at the first char to check for leading zero issues.
    +	peeked, err := r.Peek(2)
    +	if err == nil && len(peeked) > 1 {
    +		if peeked[0] == '0' && peeked[1] >= '0' && peeked[1] <= '9' {
    +			_, _ = r.ReadByte()
    +			return fmt.Errorf("invalid character '%c' after top-level value", peeked[1])
    +		}
    +	}
    +
    +	for {
    +		b, err := r.Peek(1)
    +		if err != nil {
    +			if err == io.EOF {
    +				break
    +			}
    +			return err
    +		}
    +
    +		char := b[0]
    +		if isWhitespace(char) || char == ',' || char == '}' || char == ']' {
    +			break
    +		}
    +
    +		// A number token can contain only these characters.
    +		isNumPart := (char >= '0' && char <= '9') || char == '.' || char == 'e' || char == 'E' || char == '+' || char == '-'
    +		if !isNumPart {
    +			// If it's not a valid number character, we stop scanning.
    +			break
    +		}
    +
    +		_, _ = r.ReadByte()
    +		builder.WriteByte(char)
    +		byteCount++
    +		if limit > 0 && byteCount > limit {
    +			return fmt.Errorf("JSON number value exceeds allowed length")
    +		}
    +	}
    +
    +	if byteCount == 0 {
    +		return fmt.Errorf("malformed JSON, empty number")
    +	}
    +
    +	// Use the standard library for a final, strict validation of the number's syntax.
    +	// This correctly rejects malformed inputs like "-" or "123.".
    +	numStr := builder.String()
    +	if _, err := strconv.ParseFloat(numStr, 64); err != nil {
    +		return fmt.Errorf("malformed JSON, invalid number syntax for '%s'", numStr)
    +	}
    +
    +	return nil
    +}
    
  • sdk/helper/jsonutil/json_test.go+460 2 modified
    @@ -183,7 +183,7 @@ func TestJSONUtil_DecodeJSONFromReader(t *testing.T) {
     	}
     }
     
    -func TestJSONUtil_Limits(t *testing.T) {
    +func TestJSONUtil_Limits_DefaultLimits(t *testing.T) {
     	tests := []struct {
     		name        string
     		jsonInput   string
    @@ -213,7 +213,7 @@ func TestJSONUtil_Limits(t *testing.T) {
     			name:        "Malformed - Unmatched closing brace",
     			jsonInput:   `{}}`,
     			expectError: true,
    -			errorMsg:    "error reading JSON token: invalid character '}' looking for beginning of value",
    +			errorMsg:    "invalid character '}' after top-level value",
     		},
     		// String Length Limits
     		{
    @@ -274,6 +274,464 @@ func TestJSONUtil_Limits(t *testing.T) {
     	}
     }
     
    +func TestJSONUtil_Limits_ConfiguredLimits(t *testing.T) {
    +	limits := JSONLimits{
    +		MaxDepth:             64,
    +		MaxStringValueLength: 1024,
    +		MaxObjectEntryCount:  3,
    +		MaxArrayElementCount: 3,
    +		MaxTokens:            20,
    +	}
    +
    +	bom := []byte{0xEF, 0xBB, 0xBF}
    +
    +	tests := []struct {
    +		name     string
    +		payload  []byte
    +		errorMsg string
    +	}{
    +		{
    +			name:     "object entries with string values",
    +			payload:  []byte(`{"k0":"v0","k1":"v1","k2":"v2","k3":"v3"}`),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "object entries with array values",
    +			payload:  []byte(`{"k0":[],"k1":[],"k2":[],"k3":[]}`),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "object entries with object values",
    +			payload:  []byte(`{"k0":{},"k1":{},"k2":{},"k3":{}}`),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "array elements as objects",
    +			payload:  []byte(`[{}, {}, {}, {}]`),
    +			errorMsg: "JSON array exceeds allowed element count",
    +		},
    +		{
    +			name:     "BOM-prefixed over-limit object",
    +			payload:  append(bom, []byte(`{"k0":"v0","k1":"v1","k2":"v2","k3":"v3"}`)...),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "object key exceeds string length limit",
    +			payload:  []byte(fmt.Sprintf(`{"%s": 0}`, strings.Repeat("a", limits.MaxStringValueLength+1))),
    +			errorMsg: "JSON string value exceeds allowed length",
    +		},
    +		{
    +			name:     "trailing data after valid JSON",
    +			payload:  []byte(`{"k0":"v0"} "invalid"`),
    +			errorMsg: "invalid character '\"' after top-level value",
    +		},
    +		{
    +			name:     "object with embedded null byte in key",
    +			payload:  []byte(`{"k0\u0000":0, "k1":1, "k2":2, "k3":3}`),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "incomplete JSON stream",
    +			payload:  []byte(`{"k0":"v0",`),
    +			errorMsg: "malformed JSON, unclosed containers",
    +		},
    +		{
    +			name:     "deeply nested object exceeds depth limit",
    +			payload:  []byte(strings.Repeat(`{"a":`, limits.MaxDepth+1) + "null" + strings.Repeat(`}`, limits.MaxDepth+1)),
    +			errorMsg: "JSON payload exceeds allowed token count",
    +		},
    +		{
    +			name:     "payload exceeds token limit",
    +			payload:  []byte(`{"a":1,"b":2,"c":3,"d":4,"e":5,"f":6,"g":7,"h":8,"i":9,"j":10}`),
    +			errorMsg: "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:     "string with many escapes exceeds length limit",
    +			payload:  []byte(fmt.Sprintf(`{"k":"%s"}`, strings.Repeat(`\"`, limits.MaxStringValueLength/2+1))),
    +			errorMsg: "JSON string value exceeds allowed length",
    +		},
    +		{
    +			name: "deeply nested string exceeds length limit",
    +			payload: []byte(fmt.Sprintf(`%s{"key":"%s"}%s`,
    +				strings.Repeat(`{"a":`, 60),
    +				strings.Repeat("b", limits.MaxStringValueLength+1),
    +				strings.Repeat(`}`, 60))),
    +			errorMsg: "JSON payload exceeds allowed token count",
    +		},
    +		{
    +			name:     "very long number exceeds length limit",
    +			payload:  []byte(fmt.Sprintf(`{"key":%s}`, strings.Repeat("1", limits.MaxStringValueLength+1))),
    +			errorMsg: "JSON number value exceeds allowed length",
    +		},
    +		{
    +			name: "string with invalid unicode escape",
    +			// 'X' is not a valid hex digit
    +			payload:  []byte(`{"key":"\u123X"}`),
    +			errorMsg: "invalid character 'X' in string escape code",
    +		},
    +		{
    +			name:     "object with trailing comma",
    +			payload:  []byte(`{"k0":"v0",}`),
    +			errorMsg: "invalid character '}' after object key-value pair",
    +		},
    +	}
    +
    +	for _, tt := range tests {
    +		t.Run(tt.name, func(t *testing.T) {
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader(tt.payload), limits)
    +			require.Error(t, err)
    +			require.Contains(t, err.Error(), tt.errorMsg)
    +		})
    +	}
    +}
    +
    +func TestVerifyMaxDepthStreaming_MaxTokens(t *testing.T) {
    +	t.Run("payload exceeds limit", func(t *testing.T) {
    +		limits := JSONLimits{
    +			MaxTokens:            5, // Set a small, specific limit.
    +			MaxDepth:             CustomMaxJSONDepth,
    +			MaxStringValueLength: CustomMaxJSONStringValueLength,
    +			MaxObjectEntryCount:  CustomMaxJSONObjectEntryCount,
    +			MaxArrayElementCount: CustomMaxJSONArrayElementCount,
    +		}
    +
    +		// This payload contains 6 tokens: {, "k0", "v0", "k1", "v1", }
    +		payload := []byte(`{"k0":"v0","k1":"v1"}`)
    +		expectedErrorMsg := "JSON payload exceeds allowed token count"
    +
    +		_, err := VerifyMaxDepthStreaming(bytes.NewReader(payload), limits)
    +
    +		// We expect an error because the token count (6) is greater than the limit (5).
    +		require.Error(t, err)
    +		require.Contains(t, err.Error(), expectedErrorMsg)
    +	})
    +
    +	t.Run("payload within limit", func(t *testing.T) {
    +		limits := JSONLimits{
    +			MaxTokens:            5,
    +			MaxDepth:             CustomMaxJSONDepth,
    +			MaxStringValueLength: CustomMaxJSONStringValueLength,
    +			MaxObjectEntryCount:  CustomMaxJSONObjectEntryCount,
    +			MaxArrayElementCount: CustomMaxJSONArrayElementCount,
    +		}
    +
    +		// This payload contains 3 tokens: {, "key", }
    +		payload := []byte(`{"key":null}`)
    +
    +		_, err := VerifyMaxDepthStreaming(bytes.NewReader(payload), limits)
    +
    +		// We expect no error because the token count (3) is less than the limit (5).
    +		require.NoError(t, err)
    +	})
    +}
    +
    +// TestJSONUtil_Limits_Strictness adds tests for cases that a lenient parser
    +// might accept but a security-focused one should reject.
    +func TestJSONUtil_Limits_Strictness(t *testing.T) {
    +	limits := JSONLimits{
    +		MaxDepth:             64,
    +		MaxStringValueLength: 1024,
    +		MaxObjectEntryCount:  3,
    +		MaxArrayElementCount: 3,
    +		MaxTokens:            100,
    +	}
    +
    +	tests := []struct {
    +		name     string
    +		payload  []byte
    +		errorMsg string
    +	}{
    +		// RFC 8259 states that object key names SHOULD be unique, but doesn't
    +		// require it. A strict parser should reject duplicates to prevent ambiguity.
    +		{
    +			name:     "object with duplicate keys",
    +			payload:  []byte(`{"key":"v1", "key":"v2"}`),
    +			errorMsg: "duplicate key 'key' in object",
    +		},
    +		{
    +			name:     "array with trailing comma",
    +			payload:  []byte(`[1, 2, 3,]`),
    +			errorMsg: "invalid character ']' after array element",
    +		},
    +		// A robust parser should reject any invalid escape sequence, not just unicode.
    +		{
    +			name:     "string with invalid escape sequence",
    +			payload:  []byte(`{"key":"\q"}`),
    +			errorMsg: "invalid character 'q' in string escape code",
    +		},
    +		// A key must be followed by a colon and a value.
    +		{
    +			name:     "object with missing value after key",
    +			payload:  []byte(`{"key":}`),
    +			errorMsg: "invalid character '}' after object key",
    +		},
    +		// Numbers starting with zero (unless they are just "0") are not standard.
    +		{
    +			name:     "number with leading zero",
    +			payload:  []byte(`[0123]`),
    +			errorMsg: "invalid character '1' after top-level value",
    +		},
    +	}
    +	for _, tt := range tests {
    +		t.Run(tt.name, func(t *testing.T) {
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader(tt.payload), limits)
    +			require.Error(t, err)
    +			require.Contains(t, err.Error(), tt.errorMsg)
    +		})
    +	}
    +}
    +
    +// TestVerifyMaxDepthStreaming_NonContainerBypass ensures that top-level values
    +// that are not objects or arrays are correctly ignored, as the limits are not
    +// intended to apply to them.
    +func TestVerifyMaxDepthStreaming_NonContainerBypass(t *testing.T) {
    +	limits := JSONLimits{MaxDepth: 1, MaxTokens: 1}
    +
    +	tests := map[string][]byte{
    +		"top-level string": []byte(`"this is a string"`),
    +		"top-level number": []byte(`12345`),
    +		"top-level bool":   []byte(`true`),
    +		"top-level null":   []byte(`null`),
    +	}
    +
    +	for name, payload := range tests {
    +		t.Run(name, func(t *testing.T) {
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader(payload), limits)
    +			require.NoError(t, err, "expected no error for non-container top-level value")
    +		})
    +	}
    +}
    +
    +// TestVerifyMaxDepthStreaming_ValidVaultPayloads ensures the parser
    +// correctly accepts legitimate, common JSON payloads from the Vault ecosystem.
    +func TestVerifyMaxDepthStreaming_ValidVaultPayloads(t *testing.T) {
    +	// Use reasonable limits that are well above what these payloads require,
    +	// ensuring that the parser doesn't fail on valid structures.
    +	limits := JSONLimits{
    +		MaxDepth:             10,
    +		MaxStringValueLength: 4096,
    +		MaxObjectEntryCount:  100,
    +		MaxArrayElementCount: 100,
    +		MaxTokens:            500,
    +	}
    +
    +	tests := map[string]string{
    +		"KVv2 secret read response": `{
    +			"request_id": "a5a1c058-305f-3576-2f1d-f8f9a46a742c",
    +			"lease_id": "",
    +			"lease_duration": 0,
    +			"renewable": false,
    +			"data": {
    +				"data": {
    +					"foo": "bar"
    +				},
    +				"metadata": {
    +					"created_time": "2025-09-19T08:10:00.123456789Z",
    +					"custom_metadata": null,
    +					"deletion_time": "",
    +					"destroyed": false,
    +					"version": 1
    +				}
    +			},
    +			"warnings": null,
    +			"wrap_info": null
    +		}`,
    +		"Auth token lookup response": `{
    +			"request_id": "6d1f2b3e-7c3a-4e2b-8c6a-1b7d5f0e3a1b",
    +			"data": {
    +				"accessor": "St8oY1x3x6z5y9p6q3r8s7t2",
    +				"creation_time": 1663242230,
    +				"display_name": "userpass-user",
    +				"entity_id": "e-12345-67890-abcdef",
    +				"expire_time": "2025-10-19T10:10:00.000Z",
    +				"explicit_max_ttl": 0,
    +				"id": "h.123abcde456fghij789klmno",
    +				"identity_policies": ["default", "dev-policy"],
    +				"issue_time": "2025-09-19T10:10:00.000Z",
    +				"meta": {
    +					"username": "test-user"
    +				},
    +				"num_uses": 0,
    +				"orphan": true,
    +				"path": "auth/userpass/login/test-user",
    +				"policies": ["default", "dev-policy"],
    +				"renewable": true,
    +				"ttl": 2764799,
    +				"type": "service"
    +			}
    +		}`,
    +		"LIST operation response": `{
    +			"request_id": "c1a2b3d4-e5f6-a7b8-c9d0-e1f2a3b4c5d6",
    +			"data": {
    +				"keys": [
    +					"secret1",
    +					"secret2/",
    +					"another-secret"
    +				]
    +			}
    +		}`,
    +		"Policy write request": `{
    +			"policy": "path \"secret/data/foo\" {\n  capabilities = [\"read\", \"list\"]\n}\n\npath \"secret/data/bar\" {\n  capabilities = [\"create\", \"update\"]\n}"
    +		}`,
    +		"Transit batch encryption request": `{
    +			"batch_input": [
    +				{
    +					"plaintext": "aGVsbG8gd29ybGQ=",
    +					"context": "Y29udGV4dDE="
    +				},
    +				{
    +					"plaintext": "dGhpcyBpcyBhIHRlc3Q="
    +				}
    +			]
    +		}`,
    +	}
    +
    +	for name, payload := range tests {
    +		t.Run(name, func(t *testing.T) {
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader([]byte(payload)), limits)
    +			require.NoError(t, err, "expected valid Vault payload to parse without error")
    +		})
    +	}
    +}
    +
    +// TestVerifyMaxDepthStreaming_InvalidVaultPayloads ensures the parser correctly
    +// rejects real-world Vault payloads that have been crafted to violate specific
    +// security limits.
    +func TestVerifyMaxDepthStreaming_InvalidVaultPayloads(t *testing.T) {
    +	tests := []struct {
    +		name     string
    +		payload  []byte
    +		errorMsg string
    +		limits   JSONLimits
    +	}{
    +		// This KVv2 secret is valid, but the metadata object contains 5 keys.
    +		// It should be rejected by the MaxObjectEntryCount limit of 4.
    +		{
    +			name: "KVv2 secret with too many metadata entries",
    +			payload: []byte(`{
    +				"data": {
    +					"data": {"foo": "bar"},
    +					"metadata": {
    +						"created_time": "2025-09-19T08:10:00.123456789Z",
    +						"custom_metadata": null,
    +						"deletion_time": "",
    +						"destroyed": false,
    +						"version": 1
    +					}
    +				}
    +			}`),
    +			errorMsg: "JSON input exceeds allowed nesting depth",
    +			limits: JSONLimits{
    +				MaxDepth:             2,
    +				MaxStringValueLength: 100,
    +				MaxObjectEntryCount:  4,
    +				MaxArrayElementCount: 2,
    +				MaxTokens:            40,
    +			},
    +		},
    +		// This auth token response is flat but contains many key-value pairs,
    +		// resulting in over 50 tokens. It should be rejected by the MaxTokens limit of 40.
    +		{
    +			name: "Auth token response with too many tokens",
    +			payload: []byte(`{
    +				"data": {
    +					"accessor": "St8oY1x3x6z5y9p6q3r8s7t2",
    +					"creation_time": 1663242230,
    +					"display_name": "userpass-user",
    +					"entity_id": "e-12345-67890-abcdef",
    +					"expire_time": "2025-10-19T10:10:00.000Z",
    +					"explicit_max_ttl": 0,
    +					"id": "h.123abcde456fghij789klmno",
    +					"identity_policies": ["default", "dev-policy"],
    +					"issue_time": "2025-09-19T10:10:00.000Z",
    +					"meta": {"username": "test-user"},
    +					"num_uses": 0,
    +					"orphan": true,
    +					"path": "auth/userpass/login/test-user",
    +					"policies": ["default", "dev-policy"],
    +					"renewable": true,
    +					"ttl": 2764799,
    +					"type": "service"
    +				}
    +			}`),
    +			errorMsg: "JSON payload exceeds allowed token count",
    +			limits: JSONLimits{
    +				MaxDepth:             100,
    +				MaxStringValueLength: 100,
    +				MaxObjectEntryCount:  50,
    +				MaxArrayElementCount: 2,
    +				MaxTokens:            40,
    +			},
    +		},
    +		// This LIST response is valid but contains 3 elements in the "keys" array.
    +		// It should be rejected by the MaxArrayElementCount limit of 2.
    +		{
    +			name: "LIST response with too many keys in array",
    +			payload: []byte(`{
    +				"data": {
    +					"keys": [
    +						"secret1",
    +						"secret2/",
    +						"another-secret"
    +					]
    +				}
    +			}`),
    +			errorMsg: "JSON input exceeds allowed nesting depth",
    +			limits: JSONLimits{
    +				MaxDepth:             2,
    +				MaxStringValueLength: 100,
    +				MaxObjectEntryCount:  4,
    +				MaxArrayElementCount: 2,
    +				MaxTokens:            40,
    +			},
    +		},
    +		// The policy string in this payload is over 100 bytes long.
    +		// It should be rejected by the MaxStringValueLength limit of 100.
    +		{
    +			name: "Policy write request with oversized policy string",
    +			payload: []byte(`{
    +				"policy": "path \"secret/data/foo\" {\n  capabilities = [\"read\", \"list\"]\n}\n\npath \"secret/data/bar\" {\n  capabilities = [\"create\", \"update\"]\n}"
    +			}`),
    +			errorMsg: "JSON string value exceeds allowed length",
    +			limits: JSONLimits{
    +				MaxDepth:             2,
    +				MaxStringValueLength: 100,
    +				MaxObjectEntryCount:  4,
    +				MaxArrayElementCount: 2,
    +				MaxTokens:            40,
    +			},
    +		},
    +		// This transit request has a nesting depth of 3 ({ -> [ -> {).
    +		// It should be rejected by the MaxDepth limit of 2.
    +		{
    +			name: "Transit batch request with excessive depth",
    +			payload: []byte(`{
    +				"batch_input": [
    +					{
    +						"plaintext": "aGVsbG8gd29ybGQ="
    +					}
    +				]
    +			}`),
    +			errorMsg: "JSON input exceeds allowed nesting depth",
    +			limits: JSONLimits{
    +				MaxDepth:             2,
    +				MaxStringValueLength: 100,
    +				MaxObjectEntryCount:  4,
    +				MaxArrayElementCount: 2,
    +				MaxTokens:            40,
    +			},
    +		},
    +	}
    +
    +	for _, tt := range tests {
    +		t.Run(tt.name, func(t *testing.T) {
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader(tt.payload), tt.limits)
    +			require.Error(t, err)
    +			require.Contains(t, err.Error(), tt.errorMsg)
    +		})
    +	}
    +}
    +
     // generateComplexJSON generates a valid JSON string with a specified nesting depth.
     func generateComplexJSON(depth int) string {
     	if depth <= 0 {
    
eedc2b7426f3

Add limit to JSON nesting depth (#31069)

https://github.com/hashicorp/vaultBiancaAug 6, 2025via ghsa
11 files changed · +587 53
  • changelog/31069.txt+3 0 added
    @@ -0,0 +1,3 @@
    +```release-note:change
    +http: Add JSON configurable limits to HTTP handling for JSON payloads: `max_json_depth`, `max_json_string_value_length`, `max_json_object_entry_count`, `max_json_array_element_count`.
    +```
    \ No newline at end of file
    
  • command/server.go+20 0 modified
    @@ -899,6 +899,26 @@ func (c *ServerCommand) InitListeners(config *server.Config, disableClustering b
     		}
     		props["max_request_size"] = fmt.Sprintf("%d", lnConfig.MaxRequestSize)
     
    +		if lnConfig.CustomMaxJSONDepth == 0 {
    +			lnConfig.CustomMaxJSONDepth = vaulthttp.CustomMaxJSONDepth
    +		}
    +		props["max_json_depth"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONDepth)
    +
    +		if lnConfig.CustomMaxJSONStringValueLength == 0 {
    +			lnConfig.CustomMaxJSONStringValueLength = vaulthttp.CustomMaxJSONStringValueLength
    +		}
    +		props["max_json_string_value_length"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONStringValueLength)
    +
    +		if lnConfig.CustomMaxJSONObjectEntryCount == 0 {
    +			lnConfig.CustomMaxJSONObjectEntryCount = vaulthttp.CustomMaxJSONObjectEntryCount
    +		}
    +		props["max_json_object_entry_count"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONObjectEntryCount)
    +
    +		if lnConfig.CustomMaxJSONArrayElementCount == 0 {
    +			lnConfig.CustomMaxJSONArrayElementCount = vaulthttp.CustomMaxJSONArrayElementCount
    +		}
    +		props["max_json_array_element_count"] = fmt.Sprintf("%d", lnConfig.CustomMaxJSONArrayElementCount)
    +
     		if lnConfig.MaxRequestDuration == 0 {
     			lnConfig.MaxRequestDuration = vault.DefaultMaxRequestDuration
     		}
    
  • http/handler.go+37 0 modified
    @@ -85,6 +85,43 @@ const (
     	// VaultSnapshotRecoverParam is the query parameter sent when Vault should
     	// recover the data from a loaded snapshot
     	VaultSnapshotRecoverParam = "recover_snapshot_id"
    +
    +	// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
    +	// This limit is designed to prevent stack exhaustion attacks from deeply
    +	// nested JSON payloads, which could otherwise lead to a denial-of-service
    +	// (DoS) vulnerability. The default value of 300 is intentionally generous
    +	// to support complex but legitimate configurations, while still providing
    +	// a safeguard against malicious or malformed input. This value is
    +	// configurable to accommodate unique environmental requirements.
    +	CustomMaxJSONDepth = 300
    +
    +	// CustomMaxJSONStringValueLength defines the maximum allowed length for a single
    +	// string value within a JSON payload, in bytes. This is a critical defense
    +	// against excessive memory allocation attacks where a client might send a
    +	// very large string value to exhaust server memory. The default of 1MB
    +	// (1024 * 1024 bytes) is chosen to comfortably accommodate large secrets
    +	// such as private keys, certificate chains, or detailed configuration data,
    +	// without permitting unbounded allocation. This value is configurable.
    +	CustomMaxJSONStringValueLength = 1024 * 1024 // 1MB
    +
    +	// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs
    +	// allowed in a single JSON object. This limit helps mitigate the risk of
    +	// hash-collision denial-of-service (HashDoS) attacks and prevents general
    +	// resource exhaustion from parsing objects with an excessive number of
    +	// entries. A default of 10,000 entries is well beyond the scope of typical
    +	// Vault secrets or configurations, providing a high ceiling for normal
    +	// operations while ensuring stability. This value is configurable.
    +	CustomMaxJSONObjectEntryCount = 10000
    +
    +	// CustomMaxJSONArrayElementCount determines the maximum number of elements
    +	// permitted in a single JSON array. This is particularly relevant for API
    +	// endpoints that can return large lists, such as the result of a `LIST`
    +	// operation on a secrets engine path. The default limit of 10,000 elements
    +	// prevents a single request from causing excessive memory consumption. While
    +	// most environments will fall well below this limit, it is configurable for
    +	// systems that require handling larger datasets, though pagination is the
    +	// recommended practice for such cases.
    +	CustomMaxJSONArrayElementCount = 10000
     )
     
     var (
    
  • http/handler_test.go+1 1 modified
    @@ -938,7 +938,7 @@ func TestHandler_MaxRequestSize(t *testing.T) {
     		"bar": strings.Repeat("a", 1025),
     	})
     
    -	require.ErrorContains(t, err, "error parsing JSON")
    +	require.ErrorContains(t, err, "http: request body too large")
     }
     
     // TestHandler_MaxRequestSize_Memory sets the max request size to 1024 bytes,
    
  • http/logical.go+1 1 modified
    @@ -147,7 +147,7 @@ func buildLogicalRequestNoAuth(perfStandby bool, ra *vault.RouterAccess, w http.
     				if err != nil {
     					status := http.StatusBadRequest
     					logical.AdjustErrorStatusCode(&status, err)
    -					return nil, nil, status, fmt.Errorf("error parsing JSON")
    +					return nil, nil, status, fmt.Errorf("error parsing JSON: %w", err)
     				}
     			}
     		}
    
  • http/logical_test.go+8 1 modified
    @@ -310,8 +310,15 @@ func TestLogical_RequestSizeDisableLimit(t *testing.T) {
     
     	// Write a very large object, should pass as MaxRequestSize set to -1/Negative value
     
    +	// Test change: Previously used DefaultMaxRequestSize to create a large payload.
    +	// However, after introducing JSON limits, the test successfully disables the first layer (MaxRequestSize),
    +	// but its large 32MB payload is then correctly caught by the second layer—specifically,
    +	// the CustomMaxStringValueLength limit, which defaults to 1MB.
    +	// Create a payload that is larger than a typical small limit (e.g., > 1KB),
    +	// but is well within the default JSON string length limit (1MB).
    +	// This isolates the test to *only* the MaxRequestSize behavior.
     	resp := testHttpPut(t, token, addr+"/v1/secret/foo", map[string]interface{}{
    -		"data": make([]byte, DefaultMaxRequestSize),
    +		"data": make([]byte, 2048),
     	})
     	testResponseStatus(t, resp, http.StatusNoContent)
     }
    
  • http/util.go+60 4 modified
    @@ -15,6 +15,7 @@ import (
     	"github.com/hashicorp/go-multierror"
     	"github.com/hashicorp/vault/helper/namespace"
     	"github.com/hashicorp/vault/limits"
    +	"github.com/hashicorp/vault/sdk/helper/jsonutil"
     	"github.com/hashicorp/vault/sdk/logical"
     	"github.com/hashicorp/vault/vault"
     	"github.com/hashicorp/vault/vault/quotas"
    @@ -24,25 +25,80 @@ var nonVotersAllowed = false
     
     func wrapMaxRequestSizeHandler(handler http.Handler, props *vault.HandlerProperties) http.Handler {
     	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
    -		var maxRequestSize int64
    +		var maxRequestSize, maxJSONDepth, maxStringValueLength, maxObjectEntryCount, maxArrayElementCount int64
    +
     		if props.ListenerConfig != nil {
     			maxRequestSize = props.ListenerConfig.MaxRequestSize
    +			maxJSONDepth = props.ListenerConfig.CustomMaxJSONDepth
    +			maxStringValueLength = props.ListenerConfig.CustomMaxJSONStringValueLength
    +			maxObjectEntryCount = props.ListenerConfig.CustomMaxJSONObjectEntryCount
    +			maxArrayElementCount = props.ListenerConfig.CustomMaxJSONArrayElementCount
     		}
    +
     		if maxRequestSize == 0 {
     			maxRequestSize = DefaultMaxRequestSize
     		}
    -		ctx := r.Context()
    -		originalBody := r.Body
    +		if maxJSONDepth == 0 {
    +			maxJSONDepth = CustomMaxJSONDepth
    +		}
    +		if maxStringValueLength == 0 {
    +			maxStringValueLength = CustomMaxJSONStringValueLength
    +		}
    +		if maxObjectEntryCount == 0 {
    +			maxObjectEntryCount = CustomMaxJSONObjectEntryCount
    +		}
    +		if maxArrayElementCount == 0 {
    +			maxArrayElementCount = CustomMaxJSONArrayElementCount
    +		}
    +
    +		jsonLimits := jsonutil.JSONLimits{
    +			MaxDepth:             int(maxJSONDepth),
    +			MaxStringValueLength: int(maxStringValueLength),
    +			MaxObjectEntryCount:  int(maxObjectEntryCount),
    +			MaxArrayElementCount: int(maxArrayElementCount),
    +		}
    +
    +		// If the payload is JSON, the VerifyMaxDepthStreaming function will perform validations.
    +		buf, err := jsonLimitsValidation(w, r, maxRequestSize, jsonLimits)
    +		if err != nil {
    +			respondError(w, http.StatusInternalServerError, err)
    +			return
    +		}
    +
    +		// Replace the body and update the context.
    +		// This ensures the request object is in a consistent state for all downstream handlers.
    +		// Because the original request body stream has been fully consumed by io.ReadAll,
    +		// we must replace it so that subsequent handlers can read the content.
    +		r.Body = newMultiReaderCloser(buf, r.Body)
    +		contextBody := r.Body
    +		ctx := logical.CreateContextOriginalBody(r.Context(), contextBody)
    +
     		if maxRequestSize > 0 {
     			r.Body = http.MaxBytesReader(w, r.Body, maxRequestSize)
     		}
    -		ctx = logical.CreateContextOriginalBody(ctx, originalBody)
     		r = r.WithContext(ctx)
     
     		handler.ServeHTTP(w, r)
     	})
     }
     
    +func jsonLimitsValidation(w http.ResponseWriter, r *http.Request, maxRequestSize int64, jsonLimits jsonutil.JSONLimits) (*bytes.Buffer, error) {
    +	// The TeeReader reads from the original body and writes a copy to our buffer.
    +	// We wrap the original body with a MaxBytesReader first to enforce the hard size limit.
    +	var limitedTeeReader io.Reader
    +	buf := &bytes.Buffer{}
    +	bodyReader := r.Body
    +	if maxRequestSize > 0 {
    +		bodyReader = http.MaxBytesReader(w, r.Body, maxRequestSize)
    +	}
    +	limitedTeeReader = io.TeeReader(bodyReader, buf)
    +	_, err := jsonutil.VerifyMaxDepthStreaming(limitedTeeReader, jsonLimits)
    +	if err != nil {
    +		return nil, err
    +	}
    +	return buf, nil
    +}
    +
     func wrapRequestLimiterHandler(handler http.Handler, props *vault.HandlerProperties) http.Handler {
     	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
     		request := r.WithContext(
    
  • internalshared/configutil/listener.go+54 0 modified
    @@ -149,6 +149,24 @@ type Listener struct {
     	// DisableRequestLimiter allows per-listener disabling of the Request Limiter.
     	DisableRequestLimiterRaw any  `hcl:"disable_request_limiter"`
     	DisableRequestLimiter    bool `hcl:"-"`
    +
    +	// JSON-specific limits
    +
    +	// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
    +	CustomMaxJSONDepthRaw interface{} `hcl:"max_json_depth"`
    +	CustomMaxJSONDepth    int64       `hcl:"-"`
    +
    +	// CustomMaxJSONStringValueLength defines the maximum allowed length for a string in a JSON payload.
    +	CustomMaxJSONStringValueLengthRaw interface{} `hcl:"max_json_string_value_length"`
    +	CustomMaxJSONStringValueLength    int64       `hcl:"-"`
    +
    +	// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs in a JSON object.
    +	CustomMaxJSONObjectEntryCountRaw interface{} `hcl:"max_json_object_entry_count"`
    +	CustomMaxJSONObjectEntryCount    int64       `hcl:"-"`
    +
    +	// CustomMaxJSONArrayElementCount determines the maximum number of elements in a JSON array.
    +	CustomMaxJSONArrayElementCountRaw interface{} `hcl:"max_json_array_element_count"`
    +	CustomMaxJSONArrayElementCount    int64       `hcl:"-"`
     }
     
     // AgentAPI allows users to select which parts of the Agent API they want enabled.
    @@ -468,6 +486,10 @@ func (l *Listener) parseRequestSettings() error {
     		return fmt.Errorf("invalid value for disable_request_limiter: %w", err)
     	}
     
    +	if err := l.parseJSONLimitsSettings(); err != nil {
    +		return err
    +	}
    +
     	return nil
     }
     
    @@ -710,3 +732,35 @@ func (l *Listener) parseRedactionSettings() error {
     
     	return nil
     }
    +
    +func (l *Listener) parseJSONLimitsSettings() error {
    +	if err := parseAndClearInt(&l.CustomMaxJSONDepthRaw, &l.CustomMaxJSONDepth); err != nil {
    +		return fmt.Errorf("error parsing max_json_depth: %w", err)
    +	}
    +	if l.CustomMaxJSONDepth < 0 {
    +		return fmt.Errorf("max_json_depth cannot be negative")
    +	}
    +
    +	if err := parseAndClearInt(&l.CustomMaxJSONStringValueLengthRaw, &l.CustomMaxJSONStringValueLength); err != nil {
    +		return fmt.Errorf("error parsing max_json_string_value_length: %w", err)
    +	}
    +	if l.CustomMaxJSONStringValueLength < 0 {
    +		return fmt.Errorf("max_json_string_value_length cannot be negative")
    +	}
    +
    +	if err := parseAndClearInt(&l.CustomMaxJSONObjectEntryCountRaw, &l.CustomMaxJSONObjectEntryCount); err != nil {
    +		return fmt.Errorf("error parsing max_json_object_entry_count: %w", err)
    +	}
    +	if l.CustomMaxJSONObjectEntryCount < 0 {
    +		return fmt.Errorf("max_json_object_entry_count cannot be negative")
    +	}
    +
    +	if err := parseAndClearInt(&l.CustomMaxJSONArrayElementCountRaw, &l.CustomMaxJSONArrayElementCount); err != nil {
    +		return fmt.Errorf("error parsing max_json_array_element_count: %w", err)
    +	}
    +	if l.CustomMaxJSONArrayElementCount < 0 {
    +		return fmt.Errorf("max_json_array_element_count cannot be negative")
    +	}
    +
    +	return nil
    +}
    
  • internalshared/configutil/listener_test.go+88 43 modified
    @@ -214,16 +214,24 @@ func TestListener_parseRequestSettings(t *testing.T) {
     	t.Parallel()
     
     	tests := map[string]struct {
    -		rawMaxRequestSize             any
    -		expectedMaxRequestSize        int64
    -		rawMaxRequestDuration         any
    -		expectedDuration              time.Duration
    -		rawRequireRequestHeader       any
    -		expectedRequireRequestHeader  bool
    -		rawDisableRequestLimiter      any
    -		expectedDisableRequestLimiter bool
    -		isErrorExpected               bool
    -		errorMessage                  string
    +		rawMaxRequestSize                      any
    +		expectedMaxRequestSize                 int64
    +		rawMaxRequestDuration                  any
    +		expectedDuration                       time.Duration
    +		rawRequireRequestHeader                any
    +		expectedRequireRequestHeader           bool
    +		rawDisableRequestLimiter               any
    +		expectedDisableRequestLimiter          bool
    +		rawCustomMaxJSONDepth                  any
    +		expectedCustomMaxJSONDepth             int64
    +		rawCustomMaxJSONStringValueLength      any
    +		expectedCustomMaxJSONStringValueLength int64
    +		rawCustomMaxJSONObjectEntryCount       any
    +		expectedCustomMaxJSONObjectEntryCount  int64
    +		rawCustomMaxJSONArrayElementCount      any
    +		expectedCustomMaxJSONArrayElementCount int64
    +		isErrorExpected                        bool
    +		errorMessage                           string
     	}{
     		"nil": {
     			isErrorExpected: false,
    @@ -238,37 +246,65 @@ func TestListener_parseRequestSettings(t *testing.T) {
     			expectedMaxRequestSize: 5,
     			isErrorExpected:        false,
     		},
    -		"max-request-duration-bad": {
    -			rawMaxRequestDuration: "juan",
    +		"max-json-depth-bad": {
    +			rawCustomMaxJSONDepth: "badvalue",
     			isErrorExpected:       true,
    -			errorMessage:          "error parsing max_request_duration",
    +			errorMessage:          "error parsing max_json_depth",
     		},
    -		"max-request-duration-good": {
    -			rawMaxRequestDuration: "30s",
    -			expectedDuration:      30 * time.Second,
    -			isErrorExpected:       false,
    +		"max-json-depth-negative": {
    +			rawCustomMaxJSONDepth: "-1",
    +			isErrorExpected:       true,
    +			errorMessage:          "max_json_depth cannot be negative",
    +		},
    +		"max-json-depth-good": {
    +			rawCustomMaxJSONDepth:      "100",
    +			expectedCustomMaxJSONDepth: 100,
    +			isErrorExpected:            false,
    +		},
    +		"max-json-string-value-length-bad": {
    +			rawCustomMaxJSONStringValueLength: "badvalue",
    +			isErrorExpected:                   true,
    +			errorMessage:                      "error parsing max_json_string_value_length",
    +		},
    +		"max-json-string-value-length-negative": {
    +			rawCustomMaxJSONStringValueLength: "-1",
    +			isErrorExpected:                   true,
    +			errorMessage:                      "max_json_string_value_length cannot be negative",
    +		},
    +		"max-json-string-value-length-good": {
    +			rawCustomMaxJSONStringValueLength:      "2048",
    +			expectedCustomMaxJSONStringValueLength: 2048,
    +			isErrorExpected:                        false,
    +		},
    +		"custom-max-json-object-entry-count-bad": {
    +			rawCustomMaxJSONObjectEntryCount: "badvalue",
    +			isErrorExpected:                  true,
    +			errorMessage:                     "error parsing max_json_object_entry_count",
     		},
    -		"require-request-header-bad": {
    -			rawRequireRequestHeader:      "juan",
    -			expectedRequireRequestHeader: false,
    -			isErrorExpected:              true,
    -			errorMessage:                 "invalid value for require_request_header",
    +		"max-json-object-entry-count-negative": {
    +			rawCustomMaxJSONObjectEntryCount: "-1",
    +			isErrorExpected:                  true,
    +			errorMessage:                     "max_json_object_entry_count cannot be negative",
     		},
    -		"require-request-header-good": {
    -			rawRequireRequestHeader:      "true",
    -			expectedRequireRequestHeader: true,
    -			isErrorExpected:              false,
    +		"max-json-object-entry-count-good": {
    +			rawCustomMaxJSONObjectEntryCount:      "500",
    +			expectedCustomMaxJSONObjectEntryCount: 500,
    +			isErrorExpected:                       false,
     		},
    -		"disable-request-limiter-bad": {
    -			rawDisableRequestLimiter:      "badvalue",
    -			expectedDisableRequestLimiter: false,
    -			isErrorExpected:               true,
    -			errorMessage:                  "invalid value for disable_request_limiter",
    +		"max-json-array-element-count-bad": {
    +			rawCustomMaxJSONArrayElementCount: "badvalue",
    +			isErrorExpected:                   true,
    +			errorMessage:                      "error parsing max_json_array_element_count",
     		},
    -		"disable-request-limiter-good": {
    -			rawDisableRequestLimiter:      "true",
    -			expectedDisableRequestLimiter: true,
    -			isErrorExpected:               false,
    +		"max-json-array-element-count-negative": {
    +			rawCustomMaxJSONArrayElementCount: "-1",
    +			isErrorExpected:                   true,
    +			errorMessage:                      "max_json_array_element_count cannot be negative",
    +		},
    +		"max-json-array-element-count-good": {
    +			rawCustomMaxJSONArrayElementCount:      "500",
    +			expectedCustomMaxJSONArrayElementCount: 500,
    +			isErrorExpected:                        false,
     		},
     	}
     
    @@ -278,12 +314,15 @@ func TestListener_parseRequestSettings(t *testing.T) {
     		t.Run(name, func(t *testing.T) {
     			t.Parallel()
     
    -			// Configure listener with raw values
     			l := &Listener{
    -				MaxRequestSizeRaw:        tc.rawMaxRequestSize,
    -				MaxRequestDurationRaw:    tc.rawMaxRequestDuration,
    -				RequireRequestHeaderRaw:  tc.rawRequireRequestHeader,
    -				DisableRequestLimiterRaw: tc.rawDisableRequestLimiter,
    +				MaxRequestSizeRaw:                 tc.rawMaxRequestSize,
    +				MaxRequestDurationRaw:             tc.rawMaxRequestDuration,
    +				RequireRequestHeaderRaw:           tc.rawRequireRequestHeader,
    +				DisableRequestLimiterRaw:          tc.rawDisableRequestLimiter,
    +				CustomMaxJSONDepthRaw:             tc.rawCustomMaxJSONDepth,
    +				CustomMaxJSONStringValueLengthRaw: tc.rawCustomMaxJSONStringValueLength,
    +				CustomMaxJSONObjectEntryCountRaw:  tc.rawCustomMaxJSONObjectEntryCount,
    +				CustomMaxJSONArrayElementCountRaw: tc.rawCustomMaxJSONArrayElementCount,
     			}
     
     			err := l.parseRequestSettings()
    @@ -293,15 +332,21 @@ func TestListener_parseRequestSettings(t *testing.T) {
     				require.Error(t, err)
     				require.ErrorContains(t, err, tc.errorMessage)
     			default:
    -				// Assert we got the relevant values.
     				require.NoError(t, err)
     				require.Equal(t, tc.expectedMaxRequestSize, l.MaxRequestSize)
    -				require.Equal(t, tc.expectedDuration, l.MaxRequestDuration)
    +				require.Equal(t, tc.expectedCustomMaxJSONDepth, l.CustomMaxJSONDepth)
    +				require.Equal(t, tc.expectedCustomMaxJSONStringValueLength, l.CustomMaxJSONStringValueLength)
    +				require.Equal(t, tc.expectedCustomMaxJSONObjectEntryCount, l.CustomMaxJSONObjectEntryCount)
    +				require.Equal(t, tc.expectedCustomMaxJSONArrayElementCount, l.CustomMaxJSONArrayElementCount)
     				require.Equal(t, tc.expectedRequireRequestHeader, l.RequireRequestHeader)
     				require.Equal(t, tc.expectedDisableRequestLimiter, l.DisableRequestLimiter)
    +				require.Equal(t, tc.expectedDuration, l.MaxRequestDuration)
     
    -				// Ensure the state was modified for the raw values.
     				require.Nil(t, l.MaxRequestSizeRaw)
    +				require.Nil(t, l.CustomMaxJSONDepthRaw)
    +				require.Nil(t, l.CustomMaxJSONStringValueLengthRaw)
    +				require.Nil(t, l.CustomMaxJSONObjectEntryCountRaw)
    +				require.Nil(t, l.CustomMaxJSONArrayElementCountRaw)
     				require.Nil(t, l.MaxRequestDurationRaw)
     				require.Nil(t, l.RequireRequestHeaderRaw)
     				require.Nil(t, l.DisableRequestLimiterRaw)
    
  • sdk/helper/jsonutil/json.go+146 2 modified
    @@ -4,6 +4,7 @@
     package jsonutil
     
     import (
    +	"bufio"
     	"bytes"
     	"compress/gzip"
     	"encoding/json"
    @@ -14,7 +15,7 @@ import (
     	"github.com/hashicorp/vault/sdk/helper/compressutil"
     )
     
    -// Encodes/Marshals the given object into JSON
    +// EncodeJSON encodes/marshals the given object into JSON
     func EncodeJSON(in interface{}) ([]byte, error) {
     	if in == nil {
     		return nil, fmt.Errorf("input for encoding is nil")
    @@ -84,7 +85,7 @@ func DecodeJSON(data []byte, out interface{}) error {
     	return DecodeJSONFromReader(bytes.NewReader(data), out)
     }
     
    -// Decodes/Unmarshals the given io.Reader pointing to a JSON, into a desired object
    +// DecodeJSONFromReader Decodes/Unmarshals the given io.Reader pointing to a JSON, into a desired object
     func DecodeJSONFromReader(r io.Reader, out interface{}) error {
     	if r == nil {
     		return fmt.Errorf("'io.Reader' being decoded is nil")
    @@ -101,3 +102,146 @@ func DecodeJSONFromReader(r io.Reader, out interface{}) error {
     	// Since 'out' is an interface representing a pointer, pass it to the decoder without an '&'
     	return dec.Decode(out)
     }
    +
    +// containerState holds information about an open JSON container (object or array).
    +type containerState struct {
    +	Type  json.Delim // '{' or '['
    +	Count int        // Number of entries (for objects) or elements for arrays)
    +}
    +
    +// JSONLimits defines the configurable limits for JSON validation.
    +type JSONLimits struct {
    +	MaxDepth             int
    +	MaxStringValueLength int
    +	MaxObjectEntryCount  int
    +	MaxArrayElementCount int
    +}
    +
    +// isWhitespace checks if a byte is a JSON whitespace character.
    +func isWhitespace(b byte) bool {
    +	return b == ' ' || b == '\t' || b == '\n' || b == '\r'
    +}
    +
    +// VerifyMaxDepthStreaming scans the JSON stream to determine its maximum nesting depth
    +// and enforce various limits. It first checks if the stream is likely JSON before proceeding.
    +func VerifyMaxDepthStreaming(jsonReader io.Reader, limits JSONLimits) (int, error) {
    +	// Use a buffered reader to peek at the stream without consuming it from the original reader.
    +	bufReader := bufio.NewReader(jsonReader)
    +
    +	// Find the first non-whitespace character.
    +	var firstByte byte
    +	var err error
    +	for {
    +		firstByte, err = bufReader.ReadByte()
    +		if err != nil {
    +			// If we hit EOF before finding a real character, it's an empty or whitespace-only payload.
    +			if err == io.EOF {
    +				return 0, nil
    +			}
    +			return 0, err // A different I/O error occurred.
    +		}
    +		if !isWhitespace(firstByte) {
    +			break // Found the first significant character.
    +		}
    +	}
    +
    +	// If the payload doesn't start with '{' or '[', assume it's not a JSON object or array
    +	// and that our limits do not apply.
    +	if firstByte != '{' && firstByte != '[' {
    +		return 0, nil
    +	}
    +
    +	fullStreamReader := io.MultiReader(bytes.NewReader([]byte{firstByte}), bufReader)
    +	decoder := json.NewDecoder(fullStreamReader)
    +	decoder.UseNumber()
    +
    +	var (
    +		maxDepth      = 0
    +		currentDepth  = 0
    +		isKeyExpected bool
    +	)
    +	containerInfoStack := make([]containerState, 0, limits.MaxDepth)
    +
    +	for {
    +		t, err := decoder.Token()
    +		if err == io.EOF {
    +			break
    +		}
    +		if err != nil {
    +			// Any error from the decoder is now considered a real error.
    +			return 0, fmt.Errorf("error reading JSON token: %w", err)
    +		}
    +
    +		switch v := t.(type) {
    +		case json.Delim:
    +			switch v {
    +			case '{', '[':
    +				currentDepth++
    +				// Check against the limit directly.
    +				if currentDepth > limits.MaxDepth {
    +					return 0, fmt.Errorf("JSON input exceeds allowed nesting depth")
    +				}
    +				if currentDepth > maxDepth {
    +					maxDepth = currentDepth
    +				}
    +
    +				containerInfoStack = append(containerInfoStack, containerState{Type: v, Count: 0})
    +				if v == '{' {
    +					isKeyExpected = true
    +				}
    +			case '}', ']':
    +				if len(containerInfoStack) == 0 {
    +					return 0, fmt.Errorf("malformed JSON: unmatched closing delimiter '%c'", v)
    +				}
    +				top := containerInfoStack[len(containerInfoStack)-1]
    +				containerInfoStack = containerInfoStack[:len(containerInfoStack)-1]
    +				currentDepth--
    +				if (v == '}' && top.Type != '{') || (v == ']' && top.Type != '[') {
    +					return 0, fmt.Errorf("malformed JSON: mismatched closing delimiter '%c' for opening '%c'", v, top.Type)
    +				}
    +				if len(containerInfoStack) > 0 && containerInfoStack[len(containerInfoStack)-1].Type == '{' {
    +					isKeyExpected = false
    +				}
    +			}
    +		case string:
    +			if len(v) > limits.MaxStringValueLength {
    +				return 0, fmt.Errorf("JSON string value exceeds allowed length")
    +			}
    +			if len(containerInfoStack) > 0 {
    +				top := &containerInfoStack[len(containerInfoStack)-1]
    +				if top.Type == '{' {
    +					if isKeyExpected {
    +						top.Count++
    +						if top.Count > limits.MaxObjectEntryCount {
    +							return 0, fmt.Errorf("JSON object exceeds allowed entry count")
    +						}
    +						isKeyExpected = false
    +					}
    +				} else if top.Type == '[' {
    +					top.Count++
    +					if top.Count > limits.MaxArrayElementCount {
    +						return 0, fmt.Errorf("JSON array exceeds allowed element count")
    +					}
    +				}
    +			}
    +		default: // Handles numbers, booleans, and nulls
    +			if len(containerInfoStack) > 0 {
    +				top := &containerInfoStack[len(containerInfoStack)-1]
    +				if top.Type == '[' {
    +					top.Count++
    +					if top.Count > limits.MaxArrayElementCount {
    +						return 0, fmt.Errorf("JSON array exceeds allowed element count")
    +					}
    +				} else if top.Type == '{' {
    +					isKeyExpected = true
    +				}
    +			}
    +		}
    +	}
    +
    +	if len(containerInfoStack) != 0 {
    +		return 0, fmt.Errorf("malformed JSON, unclosed containers")
    +	}
    +
    +	return maxDepth, nil
    +}
    
  • sdk/helper/jsonutil/json_test.go+169 1 modified
    @@ -12,6 +12,46 @@ import (
     	"testing"
     
     	"github.com/hashicorp/vault/sdk/helper/compressutil"
    +	"github.com/stretchr/testify/require"
    +)
    +
    +const (
    +	// CustomMaxJSONDepth specifies the maximum nesting depth of a JSON object.
    +	// This limit is designed to prevent stack exhaustion attacks from deeply
    +	// nested JSON payloads, which could otherwise lead to a denial-of-service
    +	// (DoS) vulnerability. The default value of 500 is intentionally generous
    +	// to support complex but legitimate configurations, while still providing
    +	// a safeguard against malicious or malformed input. This value is
    +	// configurable to accommodate unique environmental requirements.
    +	CustomMaxJSONDepth = 500
    +
    +	// CustomMaxJSONStringValueLength defines the maximum allowed length for a single
    +	// string value within a JSON payload, in bytes. This is a critical defense
    +	// against excessive memory allocation attacks where a client might send a
    +	// very large string value to exhaust server memory. The default of 1MB
    +	// (1024 * 1024 bytes) is chosen to comfortably accommodate large secrets
    +	// such as private keys, certificate chains, or detailed configuration data,
    +	// without permitting unbounded allocation. This value is configurable.
    +	CustomMaxJSONStringValueLength = 1024 * 1024 // 1MB
    +
    +	// CustomMaxJSONObjectEntryCount sets the maximum number of key-value pairs
    +	// allowed in a single JSON object. This limit helps mitigate the risk of
    +	// hash-collision denial-of-service (HashDoS) attacks and prevents general
    +	// resource exhaustion from parsing objects with an excessive number of
    +	// entries. A default of 10,000 entries is well beyond the scope of typical
    +	// Vault secrets or configurations, providing a high ceiling for normal
    +	// operations while ensuring stability. This value is configurable.
    +	CustomMaxJSONObjectEntryCount = 10000
    +
    +	// CustomMaxJSONArrayElementCount determines the maximum number of elements
    +	// permitted in a single JSON array. This is particularly relevant for API
    +	// endpoints that can return large lists, such as the result of a `LIST`
    +	// operation on a secrets engine path. The default limit of 10,000 elements
    +	// prevents a single request from causing excessive memory consumption. While
    +	// most environments will fall well below this limit, it is configurable for
    +	// systems that require handling larger datasets, though pagination is the
    +	// recommended practice for such cases.
    +	CustomMaxJSONArrayElementCount = 10000
     )
     
     func TestJSONUtil_CompressDecompressJSON(t *testing.T) {
    @@ -59,7 +99,7 @@ func TestJSONUtil_CompressDecompressJSON(t *testing.T) {
     		t.Fatalf("expected a failure")
     	}
     
    -	// Compress an object
    +	// Compress an object with BestSpeed
     	compressedBytes, err = EncodeJSONAndCompress(expected, &compressutil.CompressionConfig{
     		Type:                 compressutil.CompressionTypeGzip,
     		GzipCompressionLevel: gzip.BestSpeed,
    @@ -142,3 +182,131 @@ func TestJSONUtil_DecodeJSONFromReader(t *testing.T) {
     		t.Fatalf("bad: expected:%#v\nactual:%#v", expected, actual)
     	}
     }
    +
    +func TestJSONUtil_Limits(t *testing.T) {
    +	tests := []struct {
    +		name        string
    +		jsonInput   string
    +		expectError bool
    +		errorMsg    string
    +	}{
    +		// Depth Limits
    +		{
    +			name:        "JSON exceeding max depth",
    +			jsonInput:   generateComplexJSON(CustomMaxJSONDepth + 1),
    +			expectError: true,
    +			errorMsg:    "JSON input exceeds allowed nesting depth",
    +		},
    +		{
    +			name:        "JSON at max allowed depth",
    +			jsonInput:   generateComplexJSON(CustomMaxJSONDepth),
    +			expectError: false,
    +		},
    +		// Malformed JSON
    +		{
    +			name:        "Malformed - Unmatched opening brace",
    +			jsonInput:   `{"a": {`,
    +			expectError: true,
    +			errorMsg:    "malformed JSON, unclosed containers",
    +		},
    +		{
    +			name:        "Malformed - Unmatched closing brace",
    +			jsonInput:   `{}}`,
    +			expectError: true,
    +			errorMsg:    "error reading JSON token: invalid character '}' looking for beginning of value",
    +		},
    +		// String Length Limits
    +		{
    +			name:        "String value exceeding max length",
    +			jsonInput:   fmt.Sprintf(`{"key": "%s"}`, strings.Repeat("a", CustomMaxJSONStringValueLength+1)),
    +			expectError: true,
    +			errorMsg:    "JSON string value exceeds allowed length",
    +		},
    +		{
    +			name:        "String at max length",
    +			jsonInput:   fmt.Sprintf(`{"key": "%s"}`, strings.Repeat("a", CustomMaxJSONStringValueLength)),
    +			expectError: false,
    +		},
    +		// Object Entry Count Limits
    +		{
    +			name:        "Object exceeding max entry count",
    +			jsonInput:   fmt.Sprintf(`{%s}`, generateObjectEntries(CustomMaxJSONObjectEntryCount+1)),
    +			expectError: true,
    +			errorMsg:    "JSON object exceeds allowed entry count",
    +		},
    +		{
    +			name:        "Object at max entry count",
    +			jsonInput:   fmt.Sprintf(`{%s}`, generateObjectEntries(CustomMaxJSONObjectEntryCount)),
    +			expectError: false,
    +		},
    +		// Array Element Count Limits
    +		{
    +			name:        "Array exceeding max element count",
    +			jsonInput:   fmt.Sprintf(`[%s]`, generateArrayElements(CustomMaxJSONArrayElementCount+1)),
    +			expectError: true,
    +			errorMsg:    "JSON array exceeds allowed element count",
    +		},
    +		{
    +			name:        "Array at max element count",
    +			jsonInput:   fmt.Sprintf(`[%s]`, generateArrayElements(CustomMaxJSONArrayElementCount)),
    +			expectError: false,
    +		},
    +	}
    +
    +	for _, tt := range tests {
    +		t.Run(tt.name, func(t *testing.T) {
    +			limits := JSONLimits{
    +				MaxDepth:             CustomMaxJSONDepth,
    +				MaxStringValueLength: CustomMaxJSONStringValueLength,
    +				MaxObjectEntryCount:  CustomMaxJSONObjectEntryCount,
    +				MaxArrayElementCount: CustomMaxJSONArrayElementCount,
    +			}
    +
    +			_, err := VerifyMaxDepthStreaming(bytes.NewReader([]byte(tt.jsonInput)), limits)
    +
    +			if tt.expectError {
    +				require.Error(t, err, "expected an error but got nil")
    +				require.Contains(t, err.Error(), tt.errorMsg, "error message mismatch")
    +			} else {
    +				require.NoError(t, err, "did not expect an error but got one")
    +			}
    +		})
    +	}
    +}
    +
    +// generateComplexJSON generates a valid JSON string with a specified nesting depth.
    +func generateComplexJSON(depth int) string {
    +	if depth <= 0 {
    +		return "{}"
    +	}
    +	// Build the nested structure from the inside out.
    +	json := "1"
    +	for i := 0; i < depth; i++ {
    +		json = fmt.Sprintf(`{"a":%s}`, json)
    +	}
    +	return json
    +}
    +
    +// generateObjectEntries creates a string of object entries for testing.
    +func generateObjectEntries(count int) string {
    +	var sb strings.Builder
    +	for i := 0; i < count; i++ {
    +		sb.WriteString(fmt.Sprintf(`"key%d":%d`, i, i))
    +		if i < count-1 {
    +			sb.WriteString(",")
    +		}
    +	}
    +	return sb.String()
    +}
    +
    +// generateArrayElements creates a string of array elements for testing.
    +func generateArrayElements(count int) string {
    +	var sb strings.Builder
    +	for i := 0; i < count; i++ {
    +		sb.WriteString(fmt.Sprintf("%d", i))
    +		if i < count-1 {
    +			sb.WriteString(",")
    +		}
    +	}
    +	return sb.String()
    +}
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

5

News mentions

0

No linked articles in our index yet.