Sentry vulnerable to improper authorization on debug and artifact file downloads
Description
Sentry is an error tracking and performance monitoring platform. Starting in version 8.21.0 and prior to version 23.5.2, an authenticated user can download a debug or artifact bundle from arbitrary organizations and projects with a known bundle ID. The user does not need to be a member of the organization or have permissions on the project. A patch was issued in version 23.5.2 to ensure authorization checks are properly scoped on requests to retrieve debug or artifact bundles. Authenticated users who do not have the necessary permissions on the particular project are no longer able to download them. Sentry SaaS users do not need to take any action. Self-Hosted Sentry users should upgrade to version 23.5.2 or higher.
Affected packages
Versions sourced from the GitHub Security Advisory.
| Package | Affected versions | Patched versions |
|---|---|---|
sentryPyPI | >= 8.21.0, < 23.5.2 | 23.5.2 |
Affected products
1Patches
1e932b15435bfRestrict file downloads to Project (#49680)
3 files changed · +91 −53
src/sentry/api/endpoints/artifact_lookup.py+47 −18 modified@@ -20,7 +20,6 @@ ArtifactBundle, DebugIdArtifactBundle, Distribution, - File, Project, ProjectArtifactBundle, Release, @@ -45,23 +44,43 @@ class ProjectArtifactLookupEndpoint(ProjectEndpoint): permission_classes = (ProjectReleasePermission,) - def download_file(self, file_id, project: Project): + def download_file(self, download_id, project: Project): + ty, ty_id = download_id.split("/") + rate_limited = ratelimits.is_limited( project=project, - key=f"rl:ArtifactLookupEndpoint:download:{file_id}:{project.id}", + key=f"rl:ArtifactLookupEndpoint:download:{download_id}:{project.id}", limit=10, ) if rate_limited: logger.info( "notification.rate_limited", - extra={"project_id": project.id, "file_id": file_id}, + extra={"project_id": project.id, "file_id": download_id}, ) return HttpResponse({"Too many download requests"}, status=429) - file = File.objects.filter(id=file_id).first() + file = None + if ty == "artifact_bundle": + file = ( + ArtifactBundle.objects.filter( + id=ty_id, + projectartifactbundle__project_id=project.id, + ) + .select_related("file") + .first() + ) + elif ty == "release_file": + # NOTE: `ReleaseFile` does have a `project_id`, but that seems to + # be always empty, so using the `organization_id` instead. + file = ( + ReleaseFile.objects.filter(id=ty_id, organization_id=project.organization.id) + .select_related("file") + .first() + ) if file is None: raise Http404 + file = file.file try: fp = file.getfile() @@ -93,9 +112,9 @@ def get(self, request: Request, project: Project) -> Response: :auth: required """ - if request.GET.get("download") is not None: + if (download_id := request.GET.get("download")) is not None: if has_download_permission(request, project): - return self.download_file(request.GET.get("download"), project) + return self.download_file(download_id, project) else: return Response(status=403) @@ -114,7 +133,7 @@ def get(self, request: Request, project: Project) -> Response: def update_bundles(inner_bundles: Set[Tuple[int, datetime, int]]): for (bundle_id, date_added, file_id) in inner_bundles: used_artifact_bundles[bundle_id] = date_added - bundle_file_ids.add(file_id) + bundle_file_ids.add(("artifact_bundle", bundle_id, file_id)) if debug_id: bundles = get_artifact_bundles_containing_debug_id(debug_id, project) @@ -132,7 +151,8 @@ def update_bundles(inner_bundles: Set[Tuple[int, datetime, int]]): release, dist = try_resolve_release_dist(project, release_name, dist_name) if release: - bundle_file_ids |= get_legacy_release_bundles(release, dist) + for (releasefile_id, file_id) in get_legacy_release_bundles(release, dist): + bundle_file_ids.add(("release_file", releasefile_id, file_id)) individual_files = get_legacy_releasefile_by_file_url(release, dist, url) if options.get("sourcemaps.artifact-bundles.enable-renewal") == 1.0: @@ -144,12 +164,16 @@ def update_bundles(inner_bundles: Set[Tuple[int, datetime, int]]): url_constructor = UrlConstructor(request, project) found_artifacts = [] - for file_id in bundle_file_ids: + # NOTE: the reason we use the `file_id` as the `id` we return is because + # downstream symbolicator relies on that for its internal caching. + # We do not want to hard-refresh those caches quite yet, and the `id` + # should also be as unique as possible, which the `file_id` is. + for (ty, ty_id, file_id) in bundle_file_ids: found_artifacts.append( { "id": str(file_id), "type": "bundle", - "url": url_constructor.url_for_file_id(file_id), + "url": url_constructor.url_for_file_id(ty, ty_id), } ) @@ -158,7 +182,7 @@ def update_bundles(inner_bundles: Set[Tuple[int, datetime, int]]): { "id": str(release_file.file.id), "type": "file", - "url": url_constructor.url_for_file_id(release_file.file.id), + "url": url_constructor.url_for_file_id("release_file", release_file.id), # The `name` is the url/abs_path of the file, # as in: `"~/path/to/file.min.js"`. "abs_path": release_file.name, @@ -167,6 +191,9 @@ def update_bundles(inner_bundles: Set[Tuple[int, datetime, int]]): } ) + # make sure we have a stable sort order for tests + found_artifacts.sort(key=lambda x: int(x["id"])) + # NOTE: We do not paginate this response, as we have very tight limits on all the individual queries. return Response(serialize(found_artifacts, request.user)) @@ -259,10 +286,11 @@ def try_resolve_release_dist( return release, dist -def get_legacy_release_bundles(release: Release, dist: Optional[Distribution]): +def get_legacy_release_bundles( + release: Release, dist: Optional[Distribution] +) -> Set[Tuple[int, int]]: return set( - ReleaseFile.objects.select_related("file") - .filter( + ReleaseFile.objects.filter( release_id=release.id, dist_id=dist.id if dist else None, # a `ReleaseFile` with `0` artifacts represents a release archive, @@ -271,7 +299,8 @@ def get_legacy_release_bundles(release: Release, dist: Optional[Distribution]): # similarly the special `type` is also used for release archives. file__type=RELEASE_BUNDLE_TYPE, ) - .values_list("file_id", flat=True) + .select_related("file") + .values_list("id", "file_id") # TODO: this `order_by` might be incredibly slow # we want to have a hard limit on the returned bundles here. and we would # want to pick the most recently uploaded ones. that should mostly be @@ -304,11 +333,11 @@ def __init__(self, request: Request, project: Project): else: self.base_url = request.build_absolute_uri(request.path) - def url_for_file_id(self, file_id: int) -> str: + def url_for_file_id(self, ty: str, file_id: int) -> str: # NOTE: Returning a self-route that requires authentication (via Bearer token) # is not really forward compatible with a pre-signed URL that does not # require any authentication or headers whatsoever. # This also requires a workaround in Symbolicator, as its generic http # downloader blocks "internal" IPs, whereas the internal Sentry downloader # is explicitly exempt. - return f"{self.base_url}?download={file_id}" + return f"{self.base_url}?download={ty}/{file_id}"
src/sentry/api/endpoints/debug_files.py+4 −2 modified@@ -95,9 +95,11 @@ def download(self, debug_file_id, project): "notification.rate_limited", extra={"project_id": project.id, "project_debug_file_id": debug_file_id}, ) - return HttpResponse({"Too many download requests"}, status=403) + return HttpResponse({"Too many download requests"}, status=429) - debug_file = ProjectDebugFile.objects.filter(id=debug_file_id).first() + debug_file = ProjectDebugFile.objects.filter( + id=debug_file_id, project_id=project.id + ).first() if debug_file is None: raise Http404
tests/sentry/api/endpoints/test_project_artifact_lookup.py+40 −33 modified@@ -92,15 +92,15 @@ def test_query_by_debug_ids(self): "path/in/zip/a": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"foo", + "content": b"foo_id", "headers": { "debug-id": debug_id_a, }, }, "path/in/zip/b": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"bar", + "content": b"bar_id", "headers": { "debug-id": debug_id_b, }, @@ -115,7 +115,11 @@ def test_query_by_debug_ids(self): file=file_ab, artifact_count=2, ) - + ProjectArtifactBundle.objects.create( + organization_id=self.organization.id, + project_id=self.project.id, + artifact_bundle=artifact_bundle_ab, + ) DebugIdArtifactBundle.objects.create( organization_id=self.organization.id, debug_id=debug_id_a, @@ -136,7 +140,7 @@ def test_query_by_debug_ids(self): "path/in/zip/c": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"baz", + "content": b"baz_id", "headers": { "debug-id": debug_id_c, }, @@ -151,7 +155,11 @@ def test_query_by_debug_ids(self): file=file_c, artifact_count=1, ) - + ProjectArtifactBundle.objects.create( + organization_id=self.organization.id, + project_id=self.project.id, + artifact_bundle=artifact_bundle_c, + ) DebugIdArtifactBundle.objects.create( organization_id=self.organization.id, debug_id=debug_id_c, @@ -198,7 +206,7 @@ def test_query_by_url(self): "path/in/zip": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"foo", + "content": b"foo_url", "headers": { "debug-id": debug_id_a, }, @@ -211,20 +219,19 @@ def test_query_by_url(self): "path/in/zip_a": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"foo", + "content": b"foo_url", }, "path/in/zip_b": { "url": "~/path/to/other/app.js", "type": "source_map", - "content": b"bar", + "content": b"bar_url", }, }, ) artifact_bundle_a = ArtifactBundle.objects.create( organization_id=self.organization.id, bundle_id=uuid4(), file=file_a, artifact_count=1 ) - DebugIdArtifactBundle.objects.create( organization_id=self.organization.id, debug_id=debug_id_a, @@ -337,8 +344,8 @@ def test_query_by_url_from_legacy_bundle(self): archive1, archive1_file = self.create_archive( fields={}, files={ - "foo": "foo", - "bar": "bar", + "foo": "foo1", + "bar": "bar1", }, ) @@ -348,15 +355,15 @@ def test_query_by_url_from_legacy_bundle(self): "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "foo", - "sha1": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", - "size": 3, + "sha1": "18a16d4530763ef43321d306c9f6c59ffed33072", + "size": 4, }, "fake://bar": { "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "bar", - "sha1": "62cdb7020ff920e5aa642c3d4066950dd1f01f4d", - "size": 3, + "sha1": "763675d6a1d8d0a3a28deca62bb68abd8baf86f3", + "size": 4, }, }, } @@ -372,7 +379,7 @@ def test_query_by_url_from_legacy_bundle(self): archive2, archive2_file = self.create_archive( fields={}, files={ - "bar": "BAR", + "bar": "BAR1", }, ) @@ -382,15 +389,15 @@ def test_query_by_url_from_legacy_bundle(self): "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "foo", - "sha1": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", - "size": 3, + "sha1": "18a16d4530763ef43321d306c9f6c59ffed33072", + "size": 4, }, "fake://bar": { "archive_ident": archive2.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "bar", - "sha1": "a5d5c1bba91fdb6c669e1ae0413820885bbfc455", - "size": 3, + "sha1": "7f9353c7b307875542883ba558a1692706fcad33", + "size": 4, }, }, } @@ -427,8 +434,8 @@ def test_query_by_url_and_dist_from_legacy_bundle(self): archive1, archive1_file = self.create_archive( fields={}, files={ - "foo": "foo", - "bar": "bar", + "foo": "foo2", + "bar": "bar2", }, dist=dist, ) @@ -442,15 +449,15 @@ def test_query_by_url_and_dist_from_legacy_bundle(self): "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "foo", - "sha1": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", - "size": 3, + "sha1": "aaadd94977b8fbf3f6fb09fc3bbbc9edbdfa8427", + "size": 4, }, "fake://bar": { "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "bar", - "sha1": "62cdb7020ff920e5aa642c3d4066950dd1f01f4d", - "size": 3, + "sha1": "033c4846b506a4a48e32cdf54515c91d3499adb3", + "size": 4, }, }, } @@ -468,7 +475,7 @@ def test_query_by_url_and_dist_from_legacy_bundle(self): archive2, archive2_file = self.create_archive( fields={}, files={ - "bar": "BAR", + "bar": "BAR2", }, dist=dist, ) @@ -479,15 +486,15 @@ def test_query_by_url_and_dist_from_legacy_bundle(self): "archive_ident": archive1.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "foo", - "sha1": "0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33", - "size": 3, + "sha1": "aaadd94977b8fbf3f6fb09fc3bbbc9edbdfa8427", + "size": 4, }, "fake://bar": { "archive_ident": archive2.ident, "date_created": "2021-06-11T09:13:01.317902Z", "filename": "bar", - "sha1": "a5d5c1bba91fdb6c669e1ae0413820885bbfc455", - "size": 3, + "sha1": "528c5563f06a1e98954d17d365a219b68dd93baf", + "size": 4, }, }, } @@ -530,7 +537,7 @@ def test_renewal_with_debug_id(self): "path/in/zip/c": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"baz", + "content": b"baz_renew", "headers": { "debug-id": debug_id, }, @@ -600,7 +607,7 @@ def test_renewal_with_url(self): "path/in/zip/c": { "url": "~/path/to/app.js", "type": "source_map", - "content": b"baz", + "content": b"baz_renew", }, }, )
Vulnerability mechanics
Generated by null/stub on May 9, 2026. Inputs: CWE entries + fix-commit diffs from this CVE's patches. Citations validated against bundle.
References
6- github.com/advisories/GHSA-m4hc-m2v6-hfw8ghsaADVISORY
- nvd.nist.gov/vuln/detail/CVE-2023-36826ghsaADVISORY
- github.com/getsentry/sentry/commit/e932b15435bf36239431eaa3790a6bcfa47046a9ghsax_refsource_MISCWEB
- github.com/getsentry/sentry/pull/49680ghsax_refsource_MISCWEB
- github.com/getsentry/sentry/security/advisories/GHSA-m4hc-m2v6-hfw8ghsax_refsource_CONFIRMWEB
- github.com/pypa/advisory-database/tree/main/vulns/sentry/PYSEC-2023-130.yamlghsaWEB
News mentions
0No linked articles in our index yet.