VYPR
High severityNVD Advisory· Published Mar 6, 2026· Updated Mar 9, 2026

changedetection.io: Zip Slip vulnerability in the backup restore functionality

CVE-2026-29065

Description

changedetection.io is a free open source web page change detection tool. Prior to version 0.54.4, a Zip Slip vulnerability in the backup restore functionality allows arbitrary file overwrite via path traversal in uploaded ZIP archives. This issue has been patched in version 0.54.4.

Affected packages

Versions sourced from the GitHub Security Advisory.

PackageAffected versionsPatched versions
changedetection.ioPyPI
< 0.54.40.54.4

Affected products

1

Patches

1
1d7d812eb0fa

CVE-2026-29065 - fix(backups): patch zip slip advisory, zip bomb, upload size limit, UUID validation, secret.txt leakage, and download edge cases

4 files changed · +121 17
  • changedetectionio/blueprint/backups/__init__.py+8 10 modified
    @@ -40,11 +40,6 @@ def create_backup(datastore_path, watches: dict, tags: dict = None):
                 zipObj.write(url_watches_json, arcname="url-watches.json")
                 logger.debug("Added url-watches.json to backup")
     
    -        # Add the flask app secret (if it exists)
    -        secret_file = os.path.join(datastore_path, "secret.txt")
    -        if os.path.isfile(secret_file):
    -            zipObj.write(secret_file, arcname="secret.txt")
    -
             # Add tag data directories (each tag has its own {uuid}/tag.json)
             for uuid, tag in (tags or {}).items():
                 for f in Path(tag.data_dir).glob('*'):
    @@ -151,19 +146,22 @@ def find_backups():
         def download_backup(filename):
             import re
             filename = filename.strip()
    -        backup_filename_regex = BACKUP_FILENAME_FORMAT.format("\d+")
    -
    -        full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename)
    -        if not full_path.startswith(os.path.abspath(datastore.datastore_path)):
    -            abort(404)
    +        backup_filename_regex = BACKUP_FILENAME_FORMAT.format(r"\d+")
     
    +        # Resolve 'latest' before any validation so checks run against the real filename.
             if filename == 'latest':
                 backups = find_backups()
    +            if not backups:
    +                abort(404)
                 filename = backups[0]['filename']
     
             if not re.match(r"^" + backup_filename_regex + "$", filename):
                 abort(400)  # Bad Request if the filename doesn't match the pattern
     
    +        full_path = os.path.join(os.path.abspath(datastore.datastore_path), filename)
    +        if not full_path.startswith(os.path.abspath(datastore.datastore_path) + os.sep):
    +            abort(404)
    +
             logger.debug(f"Backup download request for '{full_path}'")
             return send_from_directory(os.path.abspath(datastore.datastore_path), filename, as_attachment=True)
     
    
  • changedetectionio/blueprint/backups/restore.py+45 5 modified
    @@ -1,6 +1,7 @@
     import io
     import json
     import os
    +import re
     import shutil
     import tempfile
     import threading
    @@ -14,6 +15,16 @@
     
     from changedetectionio.flask_app import login_optionally_required
     
    +# Maximum size of the uploaded zip file. Override via env var MAX_RESTORE_UPLOAD_MB.
    +_MAX_UPLOAD_BYTES = int(os.getenv("MAX_RESTORE_UPLOAD_MB", 256)) * 1024 * 1024
    +# Maximum total uncompressed size of all entries (zip-bomb guard). Override via MAX_RESTORE_DECOMPRESSED_MB.
    +_MAX_DECOMPRESSED_BYTES = int(os.getenv("MAX_RESTORE_DECOMPRESSED_MB", 1024)) * 1024 * 1024
    +# Only top-level directories whose name is a valid UUID are treated as watch/tag entries.
    +_UUID_RE = re.compile(
    +    r'^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$',
    +    re.IGNORECASE,
    +)
    +
     
     class RestoreForm(Form):
         zip_file = FileField(_l('Backup zip file'), validators=[
    @@ -50,14 +61,28 @@ def import_from_zip(zip_stream, datastore, include_groups, include_groups_replac
         with tempfile.TemporaryDirectory() as tmpdir:
             logger.debug(f"Restore: extracting zip to {tmpdir}")
             with zipfile.ZipFile(zip_stream, 'r') as zf:
    -            zf.extractall(tmpdir)
    +            total_uncompressed = sum(m.file_size for m in zf.infolist())
    +            if total_uncompressed > _MAX_DECOMPRESSED_BYTES:
    +                raise ValueError(
    +                    f"Backup archive decompressed size ({total_uncompressed // (1024 * 1024)} MB) "
    +                    f"exceeds the {_MAX_DECOMPRESSED_BYTES // (1024 * 1024)} MB limit"
    +                )
    +            resolved_dest = os.path.realpath(tmpdir)
    +            for member in zf.infolist():
    +                member_dest = os.path.realpath(os.path.join(resolved_dest, member.filename))
    +                if not member_dest.startswith(resolved_dest + os.sep) and member_dest != resolved_dest:
    +                    raise ValueError(f"Zip Slip path traversal detected in backup archive: {member.filename!r}")
    +                zf.extract(member, tmpdir)
             logger.debug("Restore: zip extracted, scanning UUID directories")
     
             for entry in os.scandir(tmpdir):
                 if not entry.is_dir():
                     continue
     
                 uuid = entry.name
    +            if not _UUID_RE.match(uuid):
    +                logger.warning(f"Restore: skipping non-UUID directory {uuid!r}")
    +                continue
                 tag_json_path = os.path.join(entry.path, 'tag.json')
                 watch_json_path = os.path.join(entry.path, 'watch.json')
     
    @@ -155,7 +180,9 @@ def restore():
             form = RestoreForm()
             return render_template("backup_restore.html",
                                    form=form,
    -                               restore_running=any(t.is_alive() for t in restore_threads))
    +                               restore_running=any(t.is_alive() for t in restore_threads),
    +                               max_upload_mb=_MAX_UPLOAD_BYTES // (1024 * 1024),
    +                               max_decompressed_mb=_MAX_DECOMPRESSED_BYTES // (1024 * 1024))
     
         @login_optionally_required
         @restore_blueprint.route("/restore/start", methods=['POST'])
    @@ -173,10 +200,22 @@ def backups_restore_start():
                 flash(gettext("File must be a .zip backup file"), "error")
                 return redirect(url_for('backups.restore.restore'))
     
    -        # Read into memory now — the request stream is gone once we return
    +        # Reject oversized uploads before reading the stream into memory.
    +        content_length = request.content_length
    +        if content_length and content_length > _MAX_UPLOAD_BYTES:
    +            flash(gettext("Backup file is too large (max %(mb)s MB)", mb=_MAX_UPLOAD_BYTES // (1024 * 1024)), "error")
    +            return redirect(url_for('backups.restore.restore'))
    +
    +        # Read into memory now — the request stream is gone once we return.
    +        # Read one byte beyond the limit so we can detect truncated-but-still-oversized streams.
             try:
    -            zip_bytes = io.BytesIO(zip_file.read())
    -            zipfile.ZipFile(zip_bytes)  # quick validity check before spawning
    +            raw = zip_file.read(_MAX_UPLOAD_BYTES + 1)
    +            if len(raw) > _MAX_UPLOAD_BYTES:
    +                flash(gettext("Backup file is too large (max %(mb)s MB)", mb=_MAX_UPLOAD_BYTES // (1024 * 1024)), "error")
    +                return redirect(url_for('backups.restore.restore'))
    +            zip_bytes = io.BytesIO(raw)
    +            with zipfile.ZipFile(zip_bytes):  # quick validity check before spawning
    +                pass
                 zip_bytes.seek(0)
             except zipfile.BadZipFile:
                 flash(gettext("Invalid or corrupted zip file"), "error")
    @@ -201,6 +240,7 @@ def backups_restore_start():
                 name="BackupRestore"
             )
             restore_thread.start()
    +        restore_threads[:] = [t for t in restore_threads if t.is_alive()]
             restore_threads.append(restore_thread)
             flash(gettext("Restore started in background, check back in a few minutes."))
             return redirect(url_for('backups.restore.restore'))
    
  • changedetectionio/blueprint/backups/templates/backup_restore.html+4 0 modified
    @@ -19,6 +19,10 @@
     
                     <p>{{ _('Restore a backup. Must be a .zip backup file created on/after v0.53.1 (new database layout).') }}</p>
                     <p>{{ _('Note: This does not override the main application settings, only watches and groups.') }}</p>
    +                <p class="pure-form-message">
    +                    {{ _('Max upload size: %(upload)s MB &nbsp;·&nbsp; Max decompressed size: %(decomp)s MB',
    +                         upload=max_upload_mb, decomp=max_decompressed_mb) }}
    +                </p>
     
                     <form class="pure-form pure-form-stacked settings"
                           action="{{ url_for('backups.restore.backups_restore_start') }}"
    
  • changedetectionio/tests/test_backup.py+64 2 modified
    @@ -3,7 +3,7 @@
     from .util import set_original_response, live_server_setup, wait_for_all_checks
     from flask import url_for
     import io
    -from zipfile import ZipFile
    +from zipfile import ZipFile, ZIP_DEFLATED
     import re
     import time
     from changedetectionio.model import Watch, Tag
    @@ -68,6 +68,9 @@ def test_backup(client, live_server, measure_memory_usage, datastore_path):
         # Check for changedetection.json (settings file)
         assert 'changedetection.json' in l, "changedetection.json should be in backup"
     
    +    # secret.txt must never be included — it contains the Flask session key
    +    assert 'secret.txt' not in l, "secret.txt (Flask session key) must not be included in backup"
    +
         # Get the latest one
         res = client.get(
             url_for("backups.remove_backups"),
    @@ -196,4 +199,63 @@ def test_backup_restore(client, live_server, measure_memory_usage, datastore_pat
         assert restored_tag2 is not None, f"Tag {tag_uuid2} not found after restore"
         assert restored_tag2['title'] == "Tasty backup tag number two", "Restored tag 2 title does not match"
         assert isinstance(restored_tag2, Tag.model), \
    -        f"Tag 2 not properly rehydrated, got {type(restored_tag2)}"
    \ No newline at end of file
    +        f"Tag 2 not properly rehydrated, got {type(restored_tag2)}"
    +
    +
    +def test_backup_restore_zip_slip_rejected(client, live_server, measure_memory_usage, datastore_path):
    +    """Zip Slip path traversal entries in a restore zip must be rejected."""
    +    import pytest
    +    from changedetectionio.blueprint.backups.restore import import_from_zip
    +
    +    # Build a zip with a path traversal entry that would escape the extraction dir
    +    malicious_zip = io.BytesIO()
    +    with ZipFile(malicious_zip, 'w') as zf:
    +        zf.writestr("../escaped.txt", "ATTACKER-CONTROLLED")
    +    malicious_zip.seek(0)
    +
    +    datastore = live_server.app.config['DATASTORE']
    +
    +    with pytest.raises(ValueError, match="Zip Slip"):
    +        import_from_zip(
    +            zip_stream=malicious_zip,
    +            datastore=datastore,
    +            include_groups=True,
    +            include_groups_replace=True,
    +            include_watches=True,
    +            include_watches_replace=True,
    +        )
    +
    +
    +def test_backup_restore_zip_bomb_rejected(client, live_server, measure_memory_usage, datastore_path):
    +    """A zip whose total uncompressed size exceeds the limit must be rejected.
    +
    +    The guard reads file_size from the zip central-directory metadata — no
    +    actual decompression happens, so this test is fast and uses minimal RAM.
    +    100 KB of zeros compresses to ~100 bytes; monkeypatching the limit to
    +    50 KB is enough to trigger the check without creating any large files.
    +    """
    +    import pytest
    +    import changedetectionio.blueprint.backups.restore as restore_mod
    +    from changedetectionio.blueprint.backups.restore import import_from_zip
    +
    +    # ~100 KB of zeros → deflate compresses to ~100 bytes, but file_size metadata = 100 KB
    +    bomb_zip = io.BytesIO()
    +    with ZipFile(bomb_zip, 'w', compression=ZIP_DEFLATED) as zf:
    +        zf.writestr("data.txt", b"\x00" * (100 * 1024))
    +    bomb_zip.seek(0)
    +
    +    datastore = live_server.app.config['DATASTORE']
    +    original_limit = restore_mod._MAX_DECOMPRESSED_BYTES
    +    try:
    +        restore_mod._MAX_DECOMPRESSED_BYTES = 50 * 1024  # 50 KB limit for this test
    +        with pytest.raises(ValueError, match="decompressed size"):
    +            import_from_zip(
    +                zip_stream=bomb_zip,
    +                datastore=datastore,
    +                include_groups=True,
    +                include_groups_replace=True,
    +                include_watches=True,
    +                include_watches_replace=True,
    +            )
    +    finally:
    +        restore_mod._MAX_DECOMPRESSED_BYTES = original_limit
    \ No newline at end of file
    

Vulnerability mechanics

Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.

References

5

News mentions

0

No linked articles in our index yet.