VYPR
High severity7.5NVD Advisory· Published Mar 31, 2026· Updated Apr 2, 2026

CVE-2026-34573

CVE-2026-34573

Description

Parse Server is an open source backend that can be deployed to any infrastructure that can run Node.js. Prior to versions 8.6.68 and 9.7.0-alpha.12, the GraphQL query complexity validator can be exploited to cause a denial-of-service by sending a crafted query with binary fan-out fragment spreads. A single unauthenticated request can block the Node.js event loop for seconds, denying service to all concurrent users. This only affects deployments that have enabled the requestComplexity.graphQLDepth or requestComplexity.graphQLFields configuration options. This issue has been patched in versions 8.6.68 and 9.7.0-alpha.12.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
parse-servernpm
>= 9.0.0, < 9.7.0-alpha.129.7.0-alpha.12
parse-servernpm
< 8.6.688.6.68

Affected products

12
  • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha10:*:*:*:node.js:*:*+ 11 more
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha10:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha11:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha1:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha2:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha3:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha4:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha5:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha6:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha7:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha8:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:9.7.0:alpha9:*:*:*:node.js:*:*
    • cpe:2.3:a:parseplatform:parse-server:*:*:*:*:*:node.js:*:*range: <8.6.68

Patches

2
ea15412795f3

fix: GraphQL complexity validator exponential fragment traversal DoS ([GHSA-mfj6-6p54-m98c](https://github.com/parse-community/parse-server/security/advisories/GHSA-mfj6-6p54-m98c)) (#10345)

2 files changed · +64 5
  • spec/GraphQLQueryComplexity.spec.js+28 0 modified
    @@ -178,4 +178,32 @@ describe('graphql query complexity', () => {
           expect(result.errors).toBeUndefined();
         });
       });
    +
    +  describe('fragment fan-out', () => {
    +    it('should reject query with exponential fragment fan-out efficiently', async () => {
    +      await setupGraphQL({
    +        requestComplexity: { graphQLFields: 100 },
    +      });
    +      // Binary fan-out: each fragment spreads the next one twice.
    +      // Without fix: 2^(levels-1) field visits = 2^25 ≈ 33M (hangs event loop).
    +      // With fix (memoization): O(levels) traversal, same field count, instant rejection.
    +      const levels = 26;
    +      let query = 'query Q { ...F0 }\n';
    +      for (let i = 0; i < levels; i++) {
    +        if (i === levels - 1) {
    +          query += `fragment F${i} on Query { __typename }\n`;
    +        } else {
    +          query += `fragment F${i} on Query { ...F${i + 1} ...F${i + 1} }\n`;
    +        }
    +      }
    +      const start = Date.now();
    +      const result = await graphqlRequest(query);
    +      const elapsed = Date.now() - start;
    +      // Must complete in under 5 seconds (without fix it would take seconds or hang)
    +      expect(elapsed).toBeLessThan(5000);
    +      // Field count is 2^(levels-1) = 16777216, which exceeds the limit of 100
    +      expect(result.errors).toBeDefined();
    +      expect(result.errors[0].message).toMatch(/Number of GraphQL fields .* exceeds maximum allowed/);
    +    });
    +  });
     });
    
  • src/GraphQL/helpers/queryComplexity.js+36 5 modified
    @@ -1,14 +1,22 @@
     import { GraphQLError } from 'graphql';
     import logger from '../../logger';
     
    -function calculateQueryComplexity(operation, fragments) {
    +function calculateQueryComplexity(operation, fragments, limits = {}) {
       let maxDepth = 0;
       let totalFields = 0;
    +  const fragmentCache = new Map();
    +  const { maxDepth: allowedMaxDepth, maxFields: allowedMaxFields } = limits;
     
       function visitSelectionSet(selectionSet, depth, visitedFragments) {
         if (!selectionSet) {
           return;
         }
    +    if (
    +      (allowedMaxFields !== undefined && allowedMaxFields !== -1 && totalFields > allowedMaxFields) ||
    +      (allowedMaxDepth !== undefined && allowedMaxDepth !== -1 && maxDepth > allowedMaxDepth)
    +    ) {
    +      return;
    +    }
         for (const selection of selectionSet.selections) {
           if (selection.kind === 'Field') {
             totalFields++;
    @@ -23,14 +31,36 @@ function calculateQueryComplexity(operation, fragments) {
             visitSelectionSet(selection.selectionSet, depth, visitedFragments);
           } else if (selection.kind === 'FragmentSpread') {
             const name = selection.name.value;
    +        if (fragmentCache.has(name)) {
    +          const cached = fragmentCache.get(name);
    +          totalFields += cached.fields;
    +          const adjustedDepth = depth + cached.maxDepthDelta;
    +          if (adjustedDepth > maxDepth) {
    +            maxDepth = adjustedDepth;
    +          }
    +          continue;
    +        }
             if (visitedFragments.has(name)) {
               continue;
             }
             const fragment = fragments[name];
             if (fragment) {
    -          const branchVisited = new Set(visitedFragments);
    -          branchVisited.add(name);
    -          visitSelectionSet(fragment.selectionSet, depth, branchVisited);
    +          if (
    +            (allowedMaxFields !== undefined && allowedMaxFields !== -1 && totalFields > allowedMaxFields) ||
    +            (allowedMaxDepth !== undefined && allowedMaxDepth !== -1 && maxDepth > allowedMaxDepth)
    +          ) {
    +            continue;
    +          }
    +          visitedFragments.add(name);
    +          const savedFields = totalFields;
    +          const savedMaxDepth = maxDepth;
    +          maxDepth = depth;
    +          visitSelectionSet(fragment.selectionSet, depth, visitedFragments);
    +          const fieldsContribution = totalFields - savedFields;
    +          const maxDepthDelta = maxDepth - depth;
    +          fragmentCache.set(name, { fields: fieldsContribution, maxDepthDelta });
    +          maxDepth = Math.max(savedMaxDepth, maxDepth);
    +          visitedFragments.delete(name);
             }
           }
         }
    @@ -69,7 +99,8 @@ function createComplexityValidationPlugin(getConfig) {
     
             const { depth, fields } = calculateQueryComplexity(
               requestContext.operation,
    -          fragments
    +          fragments,
    +          { maxDepth: graphQLDepth, maxFields: graphQLFields }
             );
     
             if (graphQLDepth !== -1 && depth > graphQLDepth) {
    
f759bda07529

fix: GraphQL complexity validator exponential fragment traversal DoS ([GHSA-mfj6-6p54-m98c](https://github.com/parse-community/parse-server/security/advisories/GHSA-mfj6-6p54-m98c)) (#10344)

2 files changed · +64 5
  • spec/GraphQLQueryComplexity.spec.js+28 0 modified
    @@ -179,6 +179,34 @@ describe('graphql query complexity', () => {
         });
       });
     
    +  describe('fragment fan-out', () => {
    +    it('should reject query with exponential fragment fan-out efficiently', async () => {
    +      await setupGraphQL({
    +        requestComplexity: { graphQLFields: 100 },
    +      });
    +      // Binary fan-out: each fragment spreads the next one twice.
    +      // Without fix: 2^(levels-1) field visits = 2^25 ≈ 33M (hangs event loop).
    +      // With fix (memoization): O(levels) traversal, same field count, instant rejection.
    +      const levels = 26;
    +      let query = 'query Q { ...F0 }\n';
    +      for (let i = 0; i < levels; i++) {
    +        if (i === levels - 1) {
    +          query += `fragment F${i} on Query { __typename }\n`;
    +        } else {
    +          query += `fragment F${i} on Query { ...F${i + 1} ...F${i + 1} }\n`;
    +        }
    +      }
    +      const start = Date.now();
    +      const result = await graphqlRequest(query);
    +      const elapsed = Date.now() - start;
    +      // Must complete in under 5 seconds (without fix it would take seconds or hang)
    +      expect(elapsed).toBeLessThan(5000);
    +      // Field count is 2^(levels-1) = 16777216, which exceeds the limit of 100
    +      expect(result.errors).toBeDefined();
    +      expect(result.errors[0].message).toMatch(/Number of GraphQL fields .* exceeds maximum allowed/);
    +    });
    +  });
    +
       describe('where argument breadth', () => {
         it('should enforce depth and field limits regardless of where argument breadth', async () => {
           await setupGraphQL({
    
  • src/GraphQL/helpers/queryComplexity.js+36 5 modified
    @@ -1,14 +1,22 @@
     import { GraphQLError } from 'graphql';
     import logger from '../../logger';
     
    -function calculateQueryComplexity(operation, fragments) {
    +function calculateQueryComplexity(operation, fragments, limits = {}) {
       let maxDepth = 0;
       let totalFields = 0;
    +  const fragmentCache = new Map();
    +  const { maxDepth: allowedMaxDepth, maxFields: allowedMaxFields } = limits;
     
       function visitSelectionSet(selectionSet, depth, visitedFragments) {
         if (!selectionSet) {
           return;
         }
    +    if (
    +      (allowedMaxFields !== undefined && allowedMaxFields !== -1 && totalFields > allowedMaxFields) ||
    +      (allowedMaxDepth !== undefined && allowedMaxDepth !== -1 && maxDepth > allowedMaxDepth)
    +    ) {
    +      return;
    +    }
         for (const selection of selectionSet.selections) {
           if (selection.kind === 'Field') {
             totalFields++;
    @@ -23,14 +31,36 @@ function calculateQueryComplexity(operation, fragments) {
             visitSelectionSet(selection.selectionSet, depth, visitedFragments);
           } else if (selection.kind === 'FragmentSpread') {
             const name = selection.name.value;
    +        if (fragmentCache.has(name)) {
    +          const cached = fragmentCache.get(name);
    +          totalFields += cached.fields;
    +          const adjustedDepth = depth + cached.maxDepthDelta;
    +          if (adjustedDepth > maxDepth) {
    +            maxDepth = adjustedDepth;
    +          }
    +          continue;
    +        }
             if (visitedFragments.has(name)) {
               continue;
             }
             const fragment = fragments[name];
             if (fragment) {
    -          const branchVisited = new Set(visitedFragments);
    -          branchVisited.add(name);
    -          visitSelectionSet(fragment.selectionSet, depth, branchVisited);
    +          if (
    +            (allowedMaxFields !== undefined && allowedMaxFields !== -1 && totalFields > allowedMaxFields) ||
    +            (allowedMaxDepth !== undefined && allowedMaxDepth !== -1 && maxDepth > allowedMaxDepth)
    +          ) {
    +            continue;
    +          }
    +          visitedFragments.add(name);
    +          const savedFields = totalFields;
    +          const savedMaxDepth = maxDepth;
    +          maxDepth = depth;
    +          visitSelectionSet(fragment.selectionSet, depth, visitedFragments);
    +          const fieldsContribution = totalFields - savedFields;
    +          const maxDepthDelta = maxDepth - depth;
    +          fragmentCache.set(name, { fields: fieldsContribution, maxDepthDelta });
    +          maxDepth = Math.max(savedMaxDepth, maxDepth);
    +          visitedFragments.delete(name);
             }
           }
         }
    @@ -69,7 +99,8 @@ function createComplexityValidationPlugin(getConfig) {
     
             const { depth, fields } = calculateQueryComplexity(
               requestContext.operation,
    -          fragments
    +          fragments,
    +          { maxDepth: graphQLDepth, maxFields: graphQLFields }
             );
     
             if (graphQLDepth !== -1 && depth > graphQLDepth) {
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

7

News mentions

0

No linked articles in our index yet.