VYPR
Moderate severityNVD Advisory· Published Mar 21, 2024· Updated Feb 13, 2025

node-tar vulnerable to denial of service while parsing a tar file due to lack of folders count validation

CVE-2024-28863

Description

node-tar is a Tar for Node.js. node-tar prior to version 6.2.1 has no limit on the number of sub-folders created in the folder creation process. An attacker who generates a large number of sub-folders can consume memory on the system running node-tar and even crash the Node.js client within few seconds of running it using a path with too many sub-folders inside. Version 6.2.1 fixes this issue by preventing extraction in excessively deep sub-folders.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
node-tarnpm
< 6.2.16.2.1
tarnpm
< 6.2.16.2.1

Affected products

1

Patches

1
fe8cd57da568

prevent extraction in excessively deep subfolders

https://github.com/isaacs/node-tarisaacsMar 16, 2024via ghsa
5 files changed · +94 6
  • lib/unpack.js+22 5 modified
    @@ -48,6 +48,7 @@ const crypto = require('crypto')
     const getFlag = require('./get-write-flag.js')
     const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
     const isWindows = platform === 'win32'
    +const DEFAULT_MAX_DEPTH = 1024
     
     // Unlinks on Windows are not atomic.
     //
    @@ -181,6 +182,12 @@ class Unpack extends Parser {
         this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
           process.getgid() : null
     
    +    // prevent excessively deep nesting of subfolders
    +    // set to `Infinity` to remove this restriction
    +    this.maxDepth = typeof opt.maxDepth === 'number'
    +      ? opt.maxDepth
    +      : DEFAULT_MAX_DEPTH
    +
         // mostly just for testing, but useful in some cases.
         // Forcibly trigger a chown on every entry, no matter what
         this.forceChown = opt.forceChown === true
    @@ -238,13 +245,13 @@ class Unpack extends Parser {
       }
     
       [CHECKPATH] (entry) {
    +    const p = normPath(entry.path)
    +    const parts = p.split('/')
    +
         if (this.strip) {
    -      const parts = normPath(entry.path).split('/')
           if (parts.length < this.strip) {
             return false
           }
    -      entry.path = parts.slice(this.strip).join('/')
    -
           if (entry.type === 'Link') {
             const linkparts = normPath(entry.linkpath).split('/')
             if (linkparts.length >= this.strip) {
    @@ -253,11 +260,21 @@ class Unpack extends Parser {
               return false
             }
           }
    +      parts.splice(0, this.strip)
    +      entry.path = parts.join('/')
    +    }
    +
    +    if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
    +      this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
    +        entry,
    +        path: p,
    +        depth: parts.length,
    +        maxDepth: this.maxDepth,
    +      })
    +      return false
         }
     
         if (!this.preservePaths) {
    -      const p = normPath(entry.path)
    -      const parts = p.split('/')
           if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
             this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
               entry,
    
  • README.md+10 0 modified
    @@ -115,6 +115,8 @@ Handlers receive 3 arguments:
       encountered an error which prevented it from being unpacked.  This occurs
       when:
       - an unrecoverable fs error happens during unpacking,
    +  - an entry is trying to extract into an excessively deep
    +    location (by default, limited to 1024 subfolders),
       - an entry has `..` in the path and `preservePaths` is not set, or
       - an entry is extracting through a symbolic link, when `preservePaths` is
         not set.
    @@ -427,6 +429,10 @@ The following options are supported:
       `process.umask()` to determine the default umask value, since tar will
       extract with whatever mode is provided, and let the process `umask` apply
       normally.
    +- `maxDepth` The maximum depth of subfolders to extract into. This
    +  defaults to 1024. Anything deeper than the limit will raise a
    +  warning and skip the entry. Set to `Infinity` to remove the
    +  limitation.
     
     The following options are mostly internal, but can be modified in some
     advanced use cases, such as re-using caches between runs.
    @@ -749,6 +755,10 @@ Most unpack errors will cause a `warn` event to be emitted.  If the
       `process.umask()` to determine the default umask value, since tar will
       extract with whatever mode is provided, and let the process `umask` apply
       normally.
    +- `maxDepth` The maximum depth of subfolders to extract into. This
    +  defaults to 1024. Anything deeper than the limit will raise a
    +  warning and skip the entry. Set to `Infinity` to remove the
    +  limitation.
     
     ### class tar.Unpack.Sync
     
    
  • test/fixtures/excessively-deep.tar+0 0 added
  • test/parse.js+1 1 modified
    @@ -646,7 +646,7 @@ t.test('truncated gzip input', t => {
         p.write(tgz.slice(split))
         p.end()
         t.equal(aborted, true, 'aborted writing')
    -    t.same(warnings, ['zlib: incorrect data check'])
    +    t.match(warnings, [/^zlib: /])
         t.end()
       })
     
    
  • test/unpack.js+61 0 modified
    @@ -22,6 +22,7 @@ const mkdirp = require('mkdirp')
     const mutateFS = require('mutate-fs')
     const eos = require('end-of-stream')
     const normPath = require('../lib/normalize-windows-path.js')
    +const ReadEntry = require('../lib/read-entry.js')
     
     // On Windows in particular, the "really deep folder path" file
     // often tends to cause problems, which don't indicate a failure
    @@ -3235,3 +3236,63 @@ t.test('recognize C:.. as a dot path part', t => {
     
       t.end()
     })
    +
    +t.test('excessively deep subfolder nesting', async t => {
    +  const tf = path.resolve(fixtures, 'excessively-deep.tar')
    +  const data = fs.readFileSync(tf)
    +  const warnings = []
    +  const onwarn = (c, w, { entry, path, depth, maxDepth }) =>
    +    warnings.push([c, w, { entry, path, depth, maxDepth }])
    +
    +  const check = (t, maxDepth = 1024) => {
    +    t.match(warnings, [
    +      ['TAR_ENTRY_ERROR',
    +        'path excessively deep',
    +        {
    +          entry: ReadEntry,
    +          path: /^\.(\/a){1024,}\/foo.txt$/,
    +          depth: 222372,
    +          maxDepth,
    +        }
    +      ]
    +    ])
    +    warnings.length = 0
    +    t.end()
    +  }
    +
    +  t.test('async', t => {
    +    const cwd = t.testdir()
    +    new Unpack({
    +      cwd,
    +      onwarn
    +    }).on('end', () => check(t)).end(data)
    +  })
    +
    +  t.test('sync', t => {
    +    const cwd = t.testdir()
    +    new UnpackSync({
    +      cwd,
    +      onwarn
    +    }).end(data)
    +    check(t)
    +  })
    +
    +  t.test('async set md', t => {
    +    const cwd = t.testdir()
    +    new Unpack({
    +      cwd,
    +      onwarn,
    +      maxDepth: 64,
    +    }).on('end', () => check(t, 64)).end(data)
    +  })
    +
    +  t.test('sync set md', t => {
    +    const cwd = t.testdir()
    +    new UnpackSync({
    +      cwd,
    +      onwarn,
    +      maxDepth: 64,
    +    }).end(data)
    +    check(t, 64)
    +  })
    +})
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

6

News mentions

0

No linked articles in our index yet.