From 036e12943f0123601604defe8170027b76959220 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 May 2025 10:11:23 +0200 Subject: [PATCH 01/22] register option --- src/sentry/options/defaults.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index b918557cda0745..23f9d0352c5594 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -99,6 +99,8 @@ default=False, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# Use Alpha version of Sentry Python SDK for dogfooding +register("system.use-python-sdk-alpha", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) # Redis register( "redis.clusters", From 889341ff05503bb5188a5078d6c7c1b747be8cd6 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 May 2025 13:01:46 +0200 Subject: [PATCH 02/22] First try to add option --- src/sentry/options/defaults.py | 4 ++-- src/sentry/utils/sdk.py | 32 +++++++++++++++++++++----------- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 23f9d0352c5594..a393d7b21e3581 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -99,8 +99,6 @@ default=False, flags=FLAG_AUTOMATOR_MODIFIABLE, ) -# Use Alpha version of Sentry Python SDK for dogfooding -register("system.use-python-sdk-alpha", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) # Redis register( "redis.clusters", @@ -2364,6 +2362,8 @@ default=False, flags=FLAG_AUTOMATOR_MODIFIABLE, ) +# Gradually roll out Python SDK alpha version for dogfooding +register("sentry-sdk.use-python-sdk-alpha", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) register( # Lists the shared resource ids we want to account usage for. diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 278cffd88af1f9..b8eb4abca67e85 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -7,20 +7,9 @@ from types import FrameType from typing import TYPE_CHECKING, Any, NamedTuple -import sentry_sdk from django.conf import settings from rest_framework.request import Request -# Reexport sentry_sdk just in case we ever have to write another shim like we -# did for raven -from sentry_sdk import Scope, capture_exception, capture_message, isolation_scope -from sentry_sdk._types import AnnotatedValue -from sentry_sdk.client import get_options -from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER -from sentry_sdk.transport import make_transport -from sentry_sdk.types import Event, Hint, Log -from sentry_sdk.utils import logger as sdk_logger - from sentry import options from sentry.conf.types.sdk_config import SdkConfig from sentry.options.rollout import in_random_rollout @@ -28,6 +17,27 @@ from sentry.utils.db import DjangoAtomicIntegration from sentry.utils.rust import RustInfoIntegration +# Reexport sentry_sdk just in case we ever have to write another shim like we +# did for raven +if in_random_rollout("sentry-sdk.use-python-sdk-alpha"): + import sentry_sdk_alpha as sentry_sdk + from sentry_sdk_alpha import Scope, capture_exception, capture_message, isolation_scope + from sentry_sdk_alpha._types import AnnotatedValue + from sentry_sdk_alpha.client import get_options + from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER + from sentry_sdk_alpha.transport import make_transport + from sentry_sdk_alpha.types import Event, Hint, Log + from sentry_sdk_alpha.utils import logger as sdk_logger +else: + import sentry_sdk + from sentry_sdk import Scope, capture_exception, capture_message, isolation_scope + from sentry_sdk._types import AnnotatedValue + from sentry_sdk.client import get_options + from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER + from sentry_sdk.transport import make_transport + from sentry_sdk.types import Event, Hint, Log + from sentry_sdk.utils import logger as sdk_logger + # Can't import models in utils because utils should be the bottom of the food chain if TYPE_CHECKING: from sentry.models.organization import Organization From 05d5af6cdfa45c84e1da18907a7df727d790ac9b Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 May 2025 13:09:59 +0200 Subject: [PATCH 03/22] Added type --- src/sentry/options/defaults.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index a393d7b21e3581..97576d5bb260b0 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2363,7 +2363,9 @@ flags=FLAG_AUTOMATOR_MODIFIABLE, ) # Gradually roll out Python SDK alpha version for dogfooding -register("sentry-sdk.use-python-sdk-alpha", default=0.0, flags=FLAG_AUTOMATOR_MODIFIABLE) +register( + "sentry-sdk.use-python-sdk-alpha", default=0.0, type=Float, flags=FLAG_AUTOMATOR_MODIFIABLE +) register( # Lists the shared resource ids we want to account usage for. From 26567dbe22f7487dc70efe60936f0db5f0773fc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= Date: Wed, 14 May 2025 14:07:56 +0200 Subject: [PATCH 04/22] monkeypatching sentry sdk (#91615) PoC of monkey patching sentry_sdk in runtime --- src/sentry/utils/sdk.py | 66 ++++++++++++++++++++++++++++------------- 1 file changed, 45 insertions(+), 21 deletions(-) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index b8eb4abca67e85..abef993edc6a75 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -1,14 +1,25 @@ from __future__ import annotations import copy +import importlib.machinery import logging import sys from collections.abc import Generator, Mapping, Sequence, Sized from types import FrameType from typing import TYPE_CHECKING, Any, NamedTuple +# Reexport sentry_sdk just in case we ever have to write another shim like we +# did for raven +import sentry_sdk from django.conf import settings from rest_framework.request import Request +from sentry_sdk import Scope, capture_exception, capture_message, isolation_scope +from sentry_sdk._types import AnnotatedValue +from sentry_sdk.client import get_options +from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER +from sentry_sdk.transport import make_transport +from sentry_sdk.types import Event, Hint, Log +from sentry_sdk.utils import logger as sdk_logger from sentry import options from sentry.conf.types.sdk_config import SdkConfig @@ -17,27 +28,6 @@ from sentry.utils.db import DjangoAtomicIntegration from sentry.utils.rust import RustInfoIntegration -# Reexport sentry_sdk just in case we ever have to write another shim like we -# did for raven -if in_random_rollout("sentry-sdk.use-python-sdk-alpha"): - import sentry_sdk_alpha as sentry_sdk - from sentry_sdk_alpha import Scope, capture_exception, capture_message, isolation_scope - from sentry_sdk_alpha._types import AnnotatedValue - from sentry_sdk_alpha.client import get_options - from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER - from sentry_sdk_alpha.transport import make_transport - from sentry_sdk_alpha.types import Event, Hint, Log - from sentry_sdk_alpha.utils import logger as sdk_logger -else: - import sentry_sdk - from sentry_sdk import Scope, capture_exception, capture_message, isolation_scope - from sentry_sdk._types import AnnotatedValue - from sentry_sdk.client import get_options - from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER - from sentry_sdk.transport import make_transport - from sentry_sdk.types import Event, Hint, Log - from sentry_sdk.utils import logger as sdk_logger - # Can't import models in utils because utils should be the bottom of the food chain if TYPE_CHECKING: from sentry.models.organization import Organization @@ -501,6 +491,40 @@ def flush( **sdk_options, ) + # monkey patch sentry + class ImportRedirector(importlib.abc.MetaPathFinder, importlib.abc.Loader): + def __init__(self, original_module, target_module): + self.original_module = original_module + self.target_module = target_module + + def find_spec(self, fullname, path, target=None): + if fullname == self.original_module: + # Create a spec for the target module + spec = importlib.machinery.ModuleSpec( + fullname, + self, + origin=f"redirected from {self.original_module} to {self.target_module}", + ) + return spec + return None + + def create_module(self, spec): + return importlib.import_module(self.target_module) + + def exec_module(self, module): + pass + + def redirect_import(original_module, target_module): + redirector = ImportRedirector(original_module, target_module) + sys.meta_path.insert(0, redirector) + if original_module in sys.modules: + # cleaning up cache if the module is already imported + del sys.modules[original_module] + + # monkey patch to anything but sentry_sdk + if in_random_rollout("sentry-sdk.use-python-sdk-alpha") or True: + redirect_import("sentry_sdk", "requests") + def check_tag_for_scope_bleed( tag_key: str, expected_value: str | int, add_to_scope: bool = True From f15c5ed25329136ea08fe9d76ee53f5c2200573a Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 May 2025 13:36:08 +0200 Subject: [PATCH 05/22] patch at runtime --- src/sentry/options/defaults.py | 2 +- src/sentry/utils/sdk.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py index 97576d5bb260b0..d53c3515953bfe 100644 --- a/src/sentry/options/defaults.py +++ b/src/sentry/options/defaults.py @@ -2364,7 +2364,7 @@ ) # Gradually roll out Python SDK alpha version for dogfooding register( - "sentry-sdk.use-python-sdk-alpha", default=0.0, type=Float, flags=FLAG_AUTOMATOR_MODIFIABLE + "sentry-sdk.use-python-sdk-alpha", default=1.0, type=Float, flags=FLAG_AUTOMATOR_MODIFIABLE ) register( diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index abef993edc6a75..24006c16a2fbf7 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -311,6 +311,16 @@ def configure_sdk(): """ Setup and initialize the Sentry SDK. """ + if in_random_rollout("sentry-sdk.use-python-sdk-alpha"): + import sentry_sdk_alpha + + global sentry_sdk + sentry_sdk = sentry_sdk_alpha + + # trying to call non existing function in sentry_sdk_alpha + # never reached because sentry_sdk_alpha does not exist yet :) + sentry_sdk.start_transaction() + sdk_options, dsns = _get_sdk_options() if settings.SPOTLIGHT: sdk_options["spotlight"] = ( From 92a10008531463a91fbbeaf007cda003430bd593 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 15 May 2025 10:18:24 +0200 Subject: [PATCH 06/22] debug output --- src/sentry/api/endpoints/warmup.py | 22 ++++++++++++---------- src/sentry/utils/sdk.py | 13 ++----------- 2 files changed, 14 insertions(+), 21 deletions(-) diff --git a/src/sentry/api/endpoints/warmup.py b/src/sentry/api/endpoints/warmup.py index d33192376f9238..36719dce4882c9 100644 --- a/src/sentry/api/endpoints/warmup.py +++ b/src/sentry/api/endpoints/warmup.py @@ -1,25 +1,21 @@ -import django.contrib.messages.storage.fallback -import django.contrib.sessions.serializers -import django.db.models.sql.compiler # NOQA +import sentry_sdk from django.conf import settings from django.urls import reverse from django.utils import translation from rest_framework.request import Request from rest_framework.response import Response +from sentry_sdk.consts import VERSION as SDK_VERSION -import sentry.identity.services.identity.impl # NOQA -import sentry.integrations.services.integration.impl # NOQA -import sentry.middleware.integrations.parsers.plugin # NOQA -import sentry.notifications.services.impl # NOQA -import sentry.sentry_apps.services.app.impl # NOQA -import sentry.users.services.user.impl # NOQA -import sentry.users.services.user_option.impl # NOQA from sentry.api.api_owners import ApiOwner from sentry.api.api_publish_status import ApiPublishStatus from sentry.api.base import Endpoint, all_silo_endpoint from sentry.ratelimits.config import RateLimitConfig +import logging +logger = logging.getLogger(__name__) +import sentry_sdk +from sentry_sdk.consts import VERSION as SDK_VERSION @all_silo_endpoint class WarmupEndpoint(Endpoint): publish_status = { @@ -30,6 +26,12 @@ class WarmupEndpoint(Endpoint): rate_limits = RateLimitConfig(group="INTERNAL") def get(self, request: Request) -> Response: + logger.warning("xxxxxxxxxxxxxxxxxx") + logger.warning(sentry_sdk) + logger.warning("yyyyyyyyyyyyyyyyyy") + logger.warning(SDK_VERSION) + logger.warning("zzzzzzzzzzzzzzzz") + languages = [lang for lang, _ in settings.LANGUAGES] languages.append(settings.LANGUAGE_CODE) diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 24006c16a2fbf7..a5f26733682fa6 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -311,16 +311,6 @@ def configure_sdk(): """ Setup and initialize the Sentry SDK. """ - if in_random_rollout("sentry-sdk.use-python-sdk-alpha"): - import sentry_sdk_alpha - - global sentry_sdk - sentry_sdk = sentry_sdk_alpha - - # trying to call non existing function in sentry_sdk_alpha - # never reached because sentry_sdk_alpha does not exist yet :) - sentry_sdk.start_transaction() - sdk_options, dsns = _get_sdk_options() if settings.SPOTLIGHT: sdk_options["spotlight"] = ( @@ -527,13 +517,14 @@ def exec_module(self, module): def redirect_import(original_module, target_module): redirector = ImportRedirector(original_module, target_module) sys.meta_path.insert(0, redirector) + # TODO: Not sure the original module should be deleted.... if original_module in sys.modules: # cleaning up cache if the module is already imported del sys.modules[original_module] # monkey patch to anything but sentry_sdk if in_random_rollout("sentry-sdk.use-python-sdk-alpha") or True: - redirect_import("sentry_sdk", "requests") + redirect_import("sentry_sdk", "sentry_sdk_alpha") def check_tag_for_scope_bleed( From 8c4b551ba20eda1904abbc49db9ee46d14ee944d Mon Sep 17 00:00:00 2001 From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com> Date: Thu, 15 May 2025 08:19:37 +0000 Subject: [PATCH 07/22] :hammer_and_wrench: apply pre-commit fixes --- src/sentry/api/endpoints/warmup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/sentry/api/endpoints/warmup.py b/src/sentry/api/endpoints/warmup.py index 36719dce4882c9..4f417cabdf9390 100644 --- a/src/sentry/api/endpoints/warmup.py +++ b/src/sentry/api/endpoints/warmup.py @@ -1,3 +1,5 @@ +import logging + import sentry_sdk from django.conf import settings from django.urls import reverse @@ -11,11 +13,12 @@ from sentry.api.base import Endpoint, all_silo_endpoint from sentry.ratelimits.config import RateLimitConfig -import logging logger = logging.getLogger(__name__) import sentry_sdk from sentry_sdk.consts import VERSION as SDK_VERSION + + @all_silo_endpoint class WarmupEndpoint(Endpoint): publish_status = { From 6e0213fd767547acb13aae4ef0cdb93da966e77c Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 15 May 2025 10:19:45 +0200 Subject: [PATCH 08/22] adding alpha of sdk --- src/sentry_sdk_alpha/__init__.py | 54 + src/sentry_sdk_alpha/_compat.py | 97 + src/sentry_sdk_alpha/_init_implementation.py | 46 + src/sentry_sdk_alpha/_log_batcher.py | 161 ++ src/sentry_sdk_alpha/_lru_cache.py | 47 + src/sentry_sdk_alpha/_queue.py | 289 +++ src/sentry_sdk_alpha/_types.py | 271 +++ src/sentry_sdk_alpha/_werkzeug.py | 98 + src/sentry_sdk_alpha/ai/__init__.py | 0 src/sentry_sdk_alpha/ai/monitoring.py | 120 + src/sentry_sdk_alpha/ai/utils.py | 32 + src/sentry_sdk_alpha/api.py | 339 +++ src/sentry_sdk_alpha/attachments.py | 75 + src/sentry_sdk_alpha/client.py | 1007 +++++++++ src/sentry_sdk_alpha/consts.py | 1065 +++++++++ src/sentry_sdk_alpha/crons/__init__.py | 10 + src/sentry_sdk_alpha/crons/api.py | 57 + src/sentry_sdk_alpha/crons/consts.py | 4 + src/sentry_sdk_alpha/crons/decorator.py | 135 ++ src/sentry_sdk_alpha/debug.py | 31 + src/sentry_sdk_alpha/envelope.py | 353 +++ src/sentry_sdk_alpha/feature_flags.py | 72 + src/sentry_sdk_alpha/integrations/__init__.py | 296 +++ .../integrations/_asgi_common.py | 108 + .../integrations/_wsgi_common.py | 242 ++ src/sentry_sdk_alpha/integrations/aiohttp.py | 404 ++++ .../integrations/anthropic.py | 289 +++ src/sentry_sdk_alpha/integrations/argv.py | 31 + src/sentry_sdk_alpha/integrations/ariadne.py | 161 ++ src/sentry_sdk_alpha/integrations/arq.py | 257 +++ src/sentry_sdk_alpha/integrations/asgi.py | 357 +++ src/sentry_sdk_alpha/integrations/asyncio.py | 128 ++ src/sentry_sdk_alpha/integrations/asyncpg.py | 223 ++ src/sentry_sdk_alpha/integrations/atexit.py | 57 + .../integrations/aws_lambda.py | 516 +++++ src/sentry_sdk_alpha/integrations/beam.py | 176 ++ src/sentry_sdk_alpha/integrations/boto3.py | 166 ++ src/sentry_sdk_alpha/integrations/bottle.py | 221 ++ .../integrations/celery/__init__.py | 538 +++++ .../integrations/celery/beat.py | 293 +++ .../integrations/celery/utils.py | 43 + src/sentry_sdk_alpha/integrations/chalice.py | 134 ++ .../integrations/clickhouse_driver.py | 188 ++ .../integrations/cloud_resource_context.py | 280 +++ src/sentry_sdk_alpha/integrations/cohere.py | 272 +++ src/sentry_sdk_alpha/integrations/dedupe.py | 51 + .../integrations/django/__init__.py | 726 ++++++ .../integrations/django/asgi.py | 249 +++ .../integrations/django/caching.py | 182 ++ .../integrations/django/middleware.py | 188 ++ .../integrations/django/signals_handlers.py | 93 + .../integrations/django/templates.py | 184 ++ .../integrations/django/transactions.py | 154 ++ .../integrations/django/views.py | 99 + src/sentry_sdk_alpha/integrations/dramatiq.py | 168 ++ .../integrations/excepthook.py | 83 + .../integrations/executing.py | 67 + src/sentry_sdk_alpha/integrations/falcon.py | 254 +++ src/sentry_sdk_alpha/integrations/fastapi.py | 147 ++ src/sentry_sdk_alpha/integrations/flask.py | 275 +++ src/sentry_sdk_alpha/integrations/gcp.py | 259 +++ .../integrations/gnu_backtrace.py | 107 + src/sentry_sdk_alpha/integrations/gql.py | 145 ++ src/sentry_sdk_alpha/integrations/graphene.py | 144 ++ .../integrations/grpc/__init__.py | 151 ++ .../integrations/grpc/aio/__init__.py | 7 + .../integrations/grpc/aio/client.py | 100 + .../integrations/grpc/aio/server.py | 98 + .../integrations/grpc/client.py | 94 + .../integrations/grpc/consts.py | 1 + .../integrations/grpc/server.py | 64 + src/sentry_sdk_alpha/integrations/httpx.py | 202 ++ src/sentry_sdk_alpha/integrations/huey.py | 178 ++ .../integrations/huggingface_hub.py | 176 ++ .../integrations/langchain.py | 472 ++++ .../integrations/launchdarkly.py | 62 + src/sentry_sdk_alpha/integrations/litestar.py | 308 +++ src/sentry_sdk_alpha/integrations/logging.py | 413 ++++ src/sentry_sdk_alpha/integrations/loguru.py | 130 ++ src/sentry_sdk_alpha/integrations/modules.py | 29 + src/sentry_sdk_alpha/integrations/openai.py | 431 ++++ .../integrations/openfeature.py | 37 + .../integrations/pure_eval.py | 139 ++ src/sentry_sdk_alpha/integrations/pymongo.py | 207 ++ src/sentry_sdk_alpha/integrations/pyramid.py | 229 ++ src/sentry_sdk_alpha/integrations/quart.py | 237 ++ src/sentry_sdk_alpha/integrations/ray.py | 147 ++ .../integrations/redis/__init__.py | 38 + .../integrations/redis/_async_common.py | 120 + .../integrations/redis/_sync_common.py | 123 ++ .../integrations/redis/consts.py | 19 + .../integrations/redis/modules/__init__.py | 0 .../integrations/redis/modules/caches.py | 124 ++ .../integrations/redis/modules/queries.py | 71 + src/sentry_sdk_alpha/integrations/redis/rb.py | 32 + .../integrations/redis/redis.py | 69 + .../integrations/redis/redis_cluster.py | 101 + .../redis/redis_py_cluster_legacy.py | 50 + .../integrations/redis/utils.py | 188 ++ src/sentry_sdk_alpha/integrations/rq.py | 206 ++ .../integrations/rust_tracing.py | 269 +++ src/sentry_sdk_alpha/integrations/sanic.py | 378 ++++ .../integrations/serverless.py | 76 + src/sentry_sdk_alpha/integrations/socket.py | 100 + .../integrations/spark/__init__.py | 4 + .../integrations/spark/spark_driver.py | 315 +++ .../integrations/spark/spark_worker.py | 116 + .../integrations/sqlalchemy.py | 146 ++ .../integrations/starlette.py | 723 ++++++ src/sentry_sdk_alpha/integrations/starlite.py | 294 +++ src/sentry_sdk_alpha/integrations/statsig.py | 37 + src/sentry_sdk_alpha/integrations/stdlib.py | 312 +++ .../integrations/strawberry.py | 374 ++++ src/sentry_sdk_alpha/integrations/sys_exit.py | 70 + .../integrations/threading.py | 134 ++ src/sentry_sdk_alpha/integrations/tornado.py | 259 +++ src/sentry_sdk_alpha/integrations/trytond.py | 53 + src/sentry_sdk_alpha/integrations/typer.py | 60 + src/sentry_sdk_alpha/integrations/unleash.py | 33 + src/sentry_sdk_alpha/integrations/wsgi.py | 356 +++ src/sentry_sdk_alpha/logger.py | 56 + src/sentry_sdk_alpha/monitor.py | 124 ++ .../opentelemetry/__init__.py | 9 + src/sentry_sdk_alpha/opentelemetry/consts.py | 33 + .../opentelemetry/contextvars_context.py | 73 + .../opentelemetry/propagator.py | 108 + src/sentry_sdk_alpha/opentelemetry/sampler.py | 326 +++ src/sentry_sdk_alpha/opentelemetry/scope.py | 218 ++ .../opentelemetry/span_processor.py | 329 +++ src/sentry_sdk_alpha/opentelemetry/tracing.py | 35 + src/sentry_sdk_alpha/opentelemetry/utils.py | 476 ++++ src/sentry_sdk_alpha/profiler/__init__.py | 9 + .../profiler/continuous_profiler.py | 675 ++++++ .../profiler/transaction_profiler.py | 786 +++++++ src/sentry_sdk_alpha/profiler/utils.py | 199 ++ src/sentry_sdk_alpha/py.typed | 0 src/sentry_sdk_alpha/scope.py | 1563 +++++++++++++ src/sentry_sdk_alpha/scrubber.py | 177 ++ src/sentry_sdk_alpha/serializer.py | 388 ++++ src/sentry_sdk_alpha/session.py | 175 ++ src/sentry_sdk_alpha/sessions.py | 191 ++ src/sentry_sdk_alpha/spotlight.py | 242 ++ src/sentry_sdk_alpha/tracing.py | 642 ++++++ src/sentry_sdk_alpha/tracing_utils.py | 869 ++++++++ src/sentry_sdk_alpha/transport.py | 823 +++++++ src/sentry_sdk_alpha/types.py | 49 + src/sentry_sdk_alpha/utils.py | 1952 +++++++++++++++++ src/sentry_sdk_alpha/worker.py | 141 ++ 148 files changed, 33148 insertions(+) create mode 100644 src/sentry_sdk_alpha/__init__.py create mode 100644 src/sentry_sdk_alpha/_compat.py create mode 100644 src/sentry_sdk_alpha/_init_implementation.py create mode 100644 src/sentry_sdk_alpha/_log_batcher.py create mode 100644 src/sentry_sdk_alpha/_lru_cache.py create mode 100644 src/sentry_sdk_alpha/_queue.py create mode 100644 src/sentry_sdk_alpha/_types.py create mode 100644 src/sentry_sdk_alpha/_werkzeug.py create mode 100644 src/sentry_sdk_alpha/ai/__init__.py create mode 100644 src/sentry_sdk_alpha/ai/monitoring.py create mode 100644 src/sentry_sdk_alpha/ai/utils.py create mode 100644 src/sentry_sdk_alpha/api.py create mode 100644 src/sentry_sdk_alpha/attachments.py create mode 100644 src/sentry_sdk_alpha/client.py create mode 100644 src/sentry_sdk_alpha/consts.py create mode 100644 src/sentry_sdk_alpha/crons/__init__.py create mode 100644 src/sentry_sdk_alpha/crons/api.py create mode 100644 src/sentry_sdk_alpha/crons/consts.py create mode 100644 src/sentry_sdk_alpha/crons/decorator.py create mode 100644 src/sentry_sdk_alpha/debug.py create mode 100644 src/sentry_sdk_alpha/envelope.py create mode 100644 src/sentry_sdk_alpha/feature_flags.py create mode 100644 src/sentry_sdk_alpha/integrations/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/_asgi_common.py create mode 100644 src/sentry_sdk_alpha/integrations/_wsgi_common.py create mode 100644 src/sentry_sdk_alpha/integrations/aiohttp.py create mode 100644 src/sentry_sdk_alpha/integrations/anthropic.py create mode 100644 src/sentry_sdk_alpha/integrations/argv.py create mode 100644 src/sentry_sdk_alpha/integrations/ariadne.py create mode 100644 src/sentry_sdk_alpha/integrations/arq.py create mode 100644 src/sentry_sdk_alpha/integrations/asgi.py create mode 100644 src/sentry_sdk_alpha/integrations/asyncio.py create mode 100644 src/sentry_sdk_alpha/integrations/asyncpg.py create mode 100644 src/sentry_sdk_alpha/integrations/atexit.py create mode 100644 src/sentry_sdk_alpha/integrations/aws_lambda.py create mode 100644 src/sentry_sdk_alpha/integrations/beam.py create mode 100644 src/sentry_sdk_alpha/integrations/boto3.py create mode 100644 src/sentry_sdk_alpha/integrations/bottle.py create mode 100644 src/sentry_sdk_alpha/integrations/celery/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/celery/beat.py create mode 100644 src/sentry_sdk_alpha/integrations/celery/utils.py create mode 100644 src/sentry_sdk_alpha/integrations/chalice.py create mode 100644 src/sentry_sdk_alpha/integrations/clickhouse_driver.py create mode 100644 src/sentry_sdk_alpha/integrations/cloud_resource_context.py create mode 100644 src/sentry_sdk_alpha/integrations/cohere.py create mode 100644 src/sentry_sdk_alpha/integrations/dedupe.py create mode 100644 src/sentry_sdk_alpha/integrations/django/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/django/asgi.py create mode 100644 src/sentry_sdk_alpha/integrations/django/caching.py create mode 100644 src/sentry_sdk_alpha/integrations/django/middleware.py create mode 100644 src/sentry_sdk_alpha/integrations/django/signals_handlers.py create mode 100644 src/sentry_sdk_alpha/integrations/django/templates.py create mode 100644 src/sentry_sdk_alpha/integrations/django/transactions.py create mode 100644 src/sentry_sdk_alpha/integrations/django/views.py create mode 100644 src/sentry_sdk_alpha/integrations/dramatiq.py create mode 100644 src/sentry_sdk_alpha/integrations/excepthook.py create mode 100644 src/sentry_sdk_alpha/integrations/executing.py create mode 100644 src/sentry_sdk_alpha/integrations/falcon.py create mode 100644 src/sentry_sdk_alpha/integrations/fastapi.py create mode 100644 src/sentry_sdk_alpha/integrations/flask.py create mode 100644 src/sentry_sdk_alpha/integrations/gcp.py create mode 100644 src/sentry_sdk_alpha/integrations/gnu_backtrace.py create mode 100644 src/sentry_sdk_alpha/integrations/gql.py create mode 100644 src/sentry_sdk_alpha/integrations/graphene.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/client.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/server.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/client.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/consts.py create mode 100644 src/sentry_sdk_alpha/integrations/grpc/server.py create mode 100644 src/sentry_sdk_alpha/integrations/httpx.py create mode 100644 src/sentry_sdk_alpha/integrations/huey.py create mode 100644 src/sentry_sdk_alpha/integrations/huggingface_hub.py create mode 100644 src/sentry_sdk_alpha/integrations/langchain.py create mode 100644 src/sentry_sdk_alpha/integrations/launchdarkly.py create mode 100644 src/sentry_sdk_alpha/integrations/litestar.py create mode 100644 src/sentry_sdk_alpha/integrations/logging.py create mode 100644 src/sentry_sdk_alpha/integrations/loguru.py create mode 100644 src/sentry_sdk_alpha/integrations/modules.py create mode 100644 src/sentry_sdk_alpha/integrations/openai.py create mode 100644 src/sentry_sdk_alpha/integrations/openfeature.py create mode 100644 src/sentry_sdk_alpha/integrations/pure_eval.py create mode 100644 src/sentry_sdk_alpha/integrations/pymongo.py create mode 100644 src/sentry_sdk_alpha/integrations/pyramid.py create mode 100644 src/sentry_sdk_alpha/integrations/quart.py create mode 100644 src/sentry_sdk_alpha/integrations/ray.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/_async_common.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/_sync_common.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/consts.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/caches.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/queries.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/rb.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/redis.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/redis_cluster.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py create mode 100644 src/sentry_sdk_alpha/integrations/redis/utils.py create mode 100644 src/sentry_sdk_alpha/integrations/rq.py create mode 100644 src/sentry_sdk_alpha/integrations/rust_tracing.py create mode 100644 src/sentry_sdk_alpha/integrations/sanic.py create mode 100644 src/sentry_sdk_alpha/integrations/serverless.py create mode 100644 src/sentry_sdk_alpha/integrations/socket.py create mode 100644 src/sentry_sdk_alpha/integrations/spark/__init__.py create mode 100644 src/sentry_sdk_alpha/integrations/spark/spark_driver.py create mode 100644 src/sentry_sdk_alpha/integrations/spark/spark_worker.py create mode 100644 src/sentry_sdk_alpha/integrations/sqlalchemy.py create mode 100644 src/sentry_sdk_alpha/integrations/starlette.py create mode 100644 src/sentry_sdk_alpha/integrations/starlite.py create mode 100644 src/sentry_sdk_alpha/integrations/statsig.py create mode 100644 src/sentry_sdk_alpha/integrations/stdlib.py create mode 100644 src/sentry_sdk_alpha/integrations/strawberry.py create mode 100644 src/sentry_sdk_alpha/integrations/sys_exit.py create mode 100644 src/sentry_sdk_alpha/integrations/threading.py create mode 100644 src/sentry_sdk_alpha/integrations/tornado.py create mode 100644 src/sentry_sdk_alpha/integrations/trytond.py create mode 100644 src/sentry_sdk_alpha/integrations/typer.py create mode 100644 src/sentry_sdk_alpha/integrations/unleash.py create mode 100644 src/sentry_sdk_alpha/integrations/wsgi.py create mode 100644 src/sentry_sdk_alpha/logger.py create mode 100644 src/sentry_sdk_alpha/monitor.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/__init__.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/consts.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/contextvars_context.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/propagator.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/sampler.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/scope.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/span_processor.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/tracing.py create mode 100644 src/sentry_sdk_alpha/opentelemetry/utils.py create mode 100644 src/sentry_sdk_alpha/profiler/__init__.py create mode 100644 src/sentry_sdk_alpha/profiler/continuous_profiler.py create mode 100644 src/sentry_sdk_alpha/profiler/transaction_profiler.py create mode 100644 src/sentry_sdk_alpha/profiler/utils.py create mode 100644 src/sentry_sdk_alpha/py.typed create mode 100644 src/sentry_sdk_alpha/scope.py create mode 100644 src/sentry_sdk_alpha/scrubber.py create mode 100644 src/sentry_sdk_alpha/serializer.py create mode 100644 src/sentry_sdk_alpha/session.py create mode 100644 src/sentry_sdk_alpha/sessions.py create mode 100644 src/sentry_sdk_alpha/spotlight.py create mode 100644 src/sentry_sdk_alpha/tracing.py create mode 100644 src/sentry_sdk_alpha/tracing_utils.py create mode 100644 src/sentry_sdk_alpha/transport.py create mode 100644 src/sentry_sdk_alpha/types.py create mode 100644 src/sentry_sdk_alpha/utils.py create mode 100644 src/sentry_sdk_alpha/worker.py diff --git a/src/sentry_sdk_alpha/__init__.py b/src/sentry_sdk_alpha/__init__.py new file mode 100644 index 00000000000000..3862499cc9ceea --- /dev/null +++ b/src/sentry_sdk_alpha/__init__.py @@ -0,0 +1,54 @@ +# TODO-neel scope switch +# TODO-neel avoid duplication between api and __init__ +from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope +from sentry_sdk_alpha.transport import Transport, HttpTransport +from sentry_sdk_alpha.client import Client + +from sentry_sdk_alpha.api import * # noqa + +from sentry_sdk_alpha.consts import VERSION # noqa + +__all__ = [ # noqa + "Scope", + "Client", + "Transport", + "HttpTransport", + "integrations", + # From sentry_sdk.api + "init", + "add_attachment", + "add_breadcrumb", + "capture_event", + "capture_exception", + "capture_message", + "continue_trace", + "flush", + "get_baggage", + "get_client", + "get_global_scope", + "get_isolation_scope", + "get_current_scope", + "get_current_span", + "get_traceparent", + "is_initialized", + "isolation_scope", + "last_event_id", + "new_scope", + "set_context", + "set_extra", + "set_level", + "set_tag", + "set_tags", + "set_user", + "start_span", + "start_transaction", + "trace", + "monitor", + "logger", +] + +# Initialize the debug support after everything is loaded +from sentry_sdk_alpha.debug import init_debug_support + +init_debug_support() +del init_debug_support diff --git a/src/sentry_sdk_alpha/_compat.py b/src/sentry_sdk_alpha/_compat.py new file mode 100644 index 00000000000000..87e7844cda31a2 --- /dev/null +++ b/src/sentry_sdk_alpha/_compat.py @@ -0,0 +1,97 @@ +import sys + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import TypeVar + + T = TypeVar("T") + + +PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8 +PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10 +PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11 + + +def with_metaclass(meta, *bases): + # type: (Any, *Any) -> Any + class MetaClass(type): + def __new__(metacls, name, this_bases, d): + # type: (Any, Any, Any, Any) -> Any + return meta(name, bases, d) + + return type.__new__(MetaClass, "temporary_class", (), {}) + + +def check_uwsgi_thread_support(): + # type: () -> bool + # We check two things here: + # + # 1. uWSGI doesn't run in threaded mode by default -- issue a warning if + # that's the case. + # + # 2. Additionally, if uWSGI is running in preforking mode (default), it needs + # the --py-call-uwsgi-fork-hooks option for the SDK to work properly. This + # is because any background threads spawned before the main process is + # forked are NOT CLEANED UP IN THE CHILDREN BY DEFAULT even if + # --enable-threads is on. One has to explicitly provide + # --py-call-uwsgi-fork-hooks to force uWSGI to run regular cpython + # after-fork hooks that take care of cleaning up stale thread data. + try: + from uwsgi import opt # type: ignore + except ImportError: + return True + + from sentry_sdk_alpha.consts import FALSE_VALUES + + def enabled(option): + # type: (str) -> bool + value = opt.get(option, False) + if isinstance(value, bool): + return value + + if isinstance(value, bytes): + try: + value = value.decode() + except Exception: + pass + + return value and str(value).lower() not in FALSE_VALUES + + # When `threads` is passed in as a uwsgi option, + # `enable-threads` is implied on. + threads_enabled = "threads" in opt or enabled("enable-threads") + fork_hooks_on = enabled("py-call-uwsgi-fork-hooks") + lazy_mode = enabled("lazy-apps") or enabled("lazy") + + if lazy_mode and not threads_enabled: + from warnings import warn + + warn( + Warning( + "IMPORTANT: " + "We detected the use of uWSGI without thread support. " + "This might lead to unexpected issues. " + 'Please run uWSGI with "--enable-threads" for full support.' + ) + ) + + return False + + elif not lazy_mode and (not threads_enabled or not fork_hooks_on): + from warnings import warn + + warn( + Warning( + "IMPORTANT: " + "We detected the use of uWSGI in preforking mode without " + "thread support. This might lead to crashing workers. " + 'Please run uWSGI with both "--enable-threads" and ' + '"--py-call-uwsgi-fork-hooks" for full support.' + ) + ) + + return False + + return True diff --git a/src/sentry_sdk_alpha/_init_implementation.py b/src/sentry_sdk_alpha/_init_implementation.py new file mode 100644 index 00000000000000..2799e179d765e4 --- /dev/null +++ b/src/sentry_sdk_alpha/_init_implementation.py @@ -0,0 +1,46 @@ +from typing import TYPE_CHECKING + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import ClientConstructor +from sentry_sdk_alpha.opentelemetry.scope import setup_scope_context_management + +if TYPE_CHECKING: + from typing import Any, Optional + + +def _check_python_deprecations(): + # type: () -> None + # Since we're likely to deprecate Python versions in the future, I'm keeping + # this handy function around. Use this to detect the Python version used and + # to output logger.warning()s if it's deprecated. + pass + + +def _init(*args, **kwargs): + # type: (*Optional[str], **Any) -> None + """Initializes the SDK and optionally integrations. + + This takes the same arguments as the client constructor. + """ + setup_scope_context_management() + client = sentry_sdk_alpha.Client(*args, **kwargs) + sentry_sdk_alpha.get_global_scope().set_client(client) + _check_python_deprecations() + + +if TYPE_CHECKING: + # Make mypy, PyCharm and other static analyzers think `init` is a type to + # have nicer autocompletion for params. + # + # Use `ClientConstructor` to define the argument types of `init` and + # `ContextManager[Any]` to tell static analyzers about the return type. + + class init(ClientConstructor): # noqa: N801 + pass + +else: + # Alias `init` for actual usage. Go through the lambda indirection to throw + # PyCharm off of the weakly typed signature (it would otherwise discover + # both the weakly typed signature of `_init` and our faked `init` type). + + init = (lambda: _init)() diff --git a/src/sentry_sdk_alpha/_log_batcher.py b/src/sentry_sdk_alpha/_log_batcher.py new file mode 100644 index 00000000000000..aa121e22f16325 --- /dev/null +++ b/src/sentry_sdk_alpha/_log_batcher.py @@ -0,0 +1,161 @@ +import os +import random +import threading +from datetime import datetime, timezone +from typing import Optional, List, Callable, TYPE_CHECKING, Any + +from sentry_sdk_alpha.utils import format_timestamp, safe_repr +from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef + +if TYPE_CHECKING: + from sentry_sdk_alpha._types import Log + + +class LogBatcher: + MAX_LOGS_BEFORE_FLUSH = 100 + FLUSH_WAIT_TIME = 5.0 + + def __init__( + self, + capture_func, # type: Callable[[Envelope], None] + ): + # type: (...) -> None + self._log_buffer = [] # type: List[Log] + self._capture_func = capture_func + self._running = True + self._lock = threading.Lock() + + self._flush_event = threading.Event() # type: threading.Event + + self._flusher = None # type: Optional[threading.Thread] + self._flusher_pid = None # type: Optional[int] + + def _ensure_thread(self): + # type: (...) -> bool + """For forking processes we might need to restart this thread. + This ensures that our process actually has that thread running. + """ + if not self._running: + return False + + pid = os.getpid() + if self._flusher_pid == pid: + return True + + with self._lock: + # Recheck to make sure another thread didn't get here and start the + # the flusher in the meantime + if self._flusher_pid == pid: + return True + + self._flusher_pid = pid + + self._flusher = threading.Thread(target=self._flush_loop) + self._flusher.daemon = True + + try: + self._flusher.start() + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self._running = False + return False + + return True + + def _flush_loop(self): + # type: (...) -> None + while self._running: + self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random()) + self._flush_event.clear() + self._flush() + + def add( + self, + log, # type: Log + ): + # type: (...) -> None + if not self._ensure_thread() or self._flusher is None: + return None + + with self._lock: + self._log_buffer.append(log) + if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH: + self._flush_event.set() + + def kill(self): + # type: (...) -> None + if self._flusher is None: + return + + self._running = False + self._flush_event.set() + self._flusher = None + + def flush(self): + # type: (...) -> None + self._flush() + + @staticmethod + def _log_to_transport_format(log): + # type: (Log) -> Any + def format_attribute(val): + # type: (int | float | str | bool) -> Any + if isinstance(val, bool): + return {"value": val, "type": "boolean"} + if isinstance(val, int): + return {"value": val, "type": "integer"} + if isinstance(val, float): + return {"value": val, "type": "double"} + if isinstance(val, str): + return {"value": val, "type": "string"} + return {"value": safe_repr(val), "type": "string"} + + if "sentry.severity_number" not in log["attributes"]: + log["attributes"]["sentry.severity_number"] = log["severity_number"] + if "sentry.severity_text" not in log["attributes"]: + log["attributes"]["sentry.severity_text"] = log["severity_text"] + + res = { + "timestamp": int(log["time_unix_nano"]) / 1.0e9, + "trace_id": log.get("trace_id", "00000000-0000-0000-0000-000000000000"), + "level": str(log["severity_text"]), + "body": str(log["body"]), + "attributes": { + k: format_attribute(v) for (k, v) in log["attributes"].items() + }, + } + + return res + + def _flush(self): + # type: (...) -> Optional[Envelope] + + envelope = Envelope( + headers={"sent_at": format_timestamp(datetime.now(timezone.utc))} + ) + with self._lock: + if len(self._log_buffer) == 0: + return None + + envelope.add_item( + Item( + type="log", + content_type="application/vnd.sentry.items.log+json", + headers={ + "item_count": len(self._log_buffer), + }, + payload=PayloadRef( + json={ + "items": [ + self._log_to_transport_format(log) + for log in self._log_buffer + ] + } + ), + ) + ) + self._log_buffer.clear() + + self._capture_func(envelope) + return envelope diff --git a/src/sentry_sdk_alpha/_lru_cache.py b/src/sentry_sdk_alpha/_lru_cache.py new file mode 100644 index 00000000000000..cbadd9723b6fc5 --- /dev/null +++ b/src/sentry_sdk_alpha/_lru_cache.py @@ -0,0 +1,47 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + + +_SENTINEL = object() + + +class LRUCache: + def __init__(self, max_size): + # type: (int) -> None + if max_size <= 0: + raise AssertionError(f"invalid max_size: {max_size}") + self.max_size = max_size + self._data = {} # type: dict[Any, Any] + self.hits = self.misses = 0 + self.full = False + + def set(self, key, value): + # type: (Any, Any) -> None + current = self._data.pop(key, _SENTINEL) + if current is not _SENTINEL: + self._data[key] = value + elif self.full: + self._data.pop(next(iter(self._data))) + self._data[key] = value + else: + self._data[key] = value + self.full = len(self._data) >= self.max_size + + def get(self, key, default=None): + # type: (Any, Any) -> Any + try: + ret = self._data.pop(key) + except KeyError: + self.misses += 1 + ret = default + else: + self.hits += 1 + self._data[key] = ret + + return ret + + def get_all(self): + # type: () -> list[tuple[Any, Any]] + return list(self._data.items()) diff --git a/src/sentry_sdk_alpha/_queue.py b/src/sentry_sdk_alpha/_queue.py new file mode 100644 index 00000000000000..a21c86ec0aeb7f --- /dev/null +++ b/src/sentry_sdk_alpha/_queue.py @@ -0,0 +1,289 @@ +""" +A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py') +with Lock swapped out for RLock to avoid a deadlock while garbage collecting. + +https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py + + +See also +https://codewithoutrules.com/2017/08/16/concurrency-python/ +https://bugs.python.org/issue14976 +https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1 + +We also vendor the code to evade eventlet's broken monkeypatching, see +https://github.com/getsentry/sentry-python/pull/484 + + +Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; + +All Rights Reserved + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + +""" + +import threading + +from collections import deque +from time import time + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + +__all__ = ["EmptyError", "FullError", "Queue"] + + +class EmptyError(Exception): + "Exception raised by Queue.get(block=0)/get_nowait()." + + pass + + +class FullError(Exception): + "Exception raised by Queue.put(block=0)/put_nowait()." + + pass + + +class Queue: + """Create a queue object with a given maximum size. + + If maxsize is <= 0, the queue size is infinite. + """ + + def __init__(self, maxsize=0): + self.maxsize = maxsize + self._init(maxsize) + + # mutex must be held whenever the queue is mutating. All methods + # that acquire mutex must release it before returning. mutex + # is shared between the three conditions, so acquiring and + # releasing the conditions also acquires and releases mutex. + self.mutex = threading.RLock() + + # Notify not_empty whenever an item is added to the queue; a + # thread waiting to get is notified then. + self.not_empty = threading.Condition(self.mutex) + + # Notify not_full whenever an item is removed from the queue; + # a thread waiting to put is notified then. + self.not_full = threading.Condition(self.mutex) + + # Notify all_tasks_done whenever the number of unfinished tasks + # drops to zero; thread waiting to join() is notified to resume + self.all_tasks_done = threading.Condition(self.mutex) + self.unfinished_tasks = 0 + + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by Queue consumer threads. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items + have been processed (meaning that a task_done() call was received + for every item that had been put() into the queue). + + Raises a ValueError if called more times than there were items + placed in the queue. + """ + with self.all_tasks_done: + unfinished = self.unfinished_tasks - 1 + if unfinished <= 0: + if unfinished < 0: + raise ValueError("task_done() called too many times") + self.all_tasks_done.notify_all() + self.unfinished_tasks = unfinished + + def join(self): + """Blocks until all items in the Queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer thread calls task_done() + to indicate the item was retrieved and all work on it is complete. + + When the count of unfinished tasks drops to zero, join() unblocks. + """ + with self.all_tasks_done: + while self.unfinished_tasks: + self.all_tasks_done.wait() + + def qsize(self): + """Return the approximate size of the queue (not reliable!).""" + with self.mutex: + return self._qsize() + + def empty(self): + """Return True if the queue is empty, False otherwise (not reliable!). + + This method is likely to be removed at some point. Use qsize() == 0 + as a direct substitute, but be aware that either approach risks a race + condition where a queue can grow before the result of empty() or + qsize() can be used. + + To create code that needs to wait for all queued tasks to be + completed, the preferred technique is to use the join() method. + """ + with self.mutex: + return not self._qsize() + + def full(self): + """Return True if the queue is full, False otherwise (not reliable!). + + This method is likely to be removed at some point. Use qsize() >= n + as a direct substitute, but be aware that either approach risks a race + condition where a queue can shrink before the result of full() or + qsize() can be used. + """ + with self.mutex: + return 0 < self.maxsize <= self._qsize() + + def put(self, item, block=True, timeout=None): + """Put an item into the queue. + + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until a free slot is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the FullError exception if no free slot was available within that time. + Otherwise ('block' is false), put an item on the queue if a free slot + is immediately available, else raise the FullError exception ('timeout' + is ignored in that case). + """ + with self.not_full: + if self.maxsize > 0: + if not block: + if self._qsize() >= self.maxsize: + raise FullError() + elif timeout is None: + while self._qsize() >= self.maxsize: + self.not_full.wait() + elif timeout < 0: + raise ValueError("'timeout' must be a non-negative number") + else: + endtime = time() + timeout + while self._qsize() >= self.maxsize: + remaining = endtime - time() + if remaining <= 0.0: + raise FullError() + self.not_full.wait(remaining) + self._put(item) + self.unfinished_tasks += 1 + self.not_empty.notify() + + def get(self, block=True, timeout=None): + """Remove and return an item from the queue. + + If optional args 'block' is true and 'timeout' is None (the default), + block if necessary until an item is available. If 'timeout' is + a non-negative number, it blocks at most 'timeout' seconds and raises + the EmptyError exception if no item was available within that time. + Otherwise ('block' is false), return an item if one is immediately + available, else raise the EmptyError exception ('timeout' is ignored + in that case). + """ + with self.not_empty: + if not block: + if not self._qsize(): + raise EmptyError() + elif timeout is None: + while not self._qsize(): + self.not_empty.wait() + elif timeout < 0: + raise ValueError("'timeout' must be a non-negative number") + else: + endtime = time() + timeout + while not self._qsize(): + remaining = endtime - time() + if remaining <= 0.0: + raise EmptyError() + self.not_empty.wait(remaining) + item = self._get() + self.not_full.notify() + return item + + def put_nowait(self, item): + """Put an item into the queue without blocking. + + Only enqueue the item if a free slot is immediately available. + Otherwise raise the FullError exception. + """ + return self.put(item, block=False) + + def get_nowait(self): + """Remove and return an item from the queue without blocking. + + Only get an item if one is immediately available. Otherwise + raise the EmptyError exception. + """ + return self.get(block=False) + + # Override these methods to implement other queue organizations + # (e.g. stack or priority queue). + # These will only be called with appropriate locks held + + # Initialize the queue representation + def _init(self, maxsize): + self.queue = deque() # type: Any + + def _qsize(self): + return len(self.queue) + + # Put a new item in the queue + def _put(self, item): + self.queue.append(item) + + # Get an item from the queue + def _get(self): + return self.queue.popleft() diff --git a/src/sentry_sdk_alpha/_types.py b/src/sentry_sdk_alpha/_types.py new file mode 100644 index 00000000000000..79260e3431f3cf --- /dev/null +++ b/src/sentry_sdk_alpha/_types.py @@ -0,0 +1,271 @@ +from typing import TYPE_CHECKING, TypeVar, Union + + +# Re-exported for compat, since code out there in the wild might use this variable. +MYPY = TYPE_CHECKING + + +SENSITIVE_DATA_SUBSTITUTE = "[Filtered]" + + +class AnnotatedValue: + """ + Meta information for a data field in the event payload. + This is to tell Relay that we have tampered with the fields value. + See: + https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423 + """ + + __slots__ = ("value", "metadata") + + def __init__(self, value, metadata): + # type: (Optional[Any], Dict[str, Any]) -> None + self.value = value + self.metadata = metadata + + def __eq__(self, other): + # type: (Any) -> bool + if not isinstance(other, AnnotatedValue): + return False + + return self.value == other.value and self.metadata == other.metadata + + def __str__(self): + # type: (AnnotatedValue) -> str + return str({"value": str(self.value), "metadata": str(self.metadata)}) + + def __len__(self): + # type: (AnnotatedValue) -> int + if self.value is not None: + return len(self.value) + else: + return 0 + + @classmethod + def removed_because_raw_data(cls): + # type: () -> AnnotatedValue + """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form.""" + return AnnotatedValue( + value="", + metadata={ + "rem": [ # Remark + [ + "!raw", # Unparsable raw data + "x", # The fields original value was removed + ] + ] + }, + ) + + @classmethod + def removed_because_over_size_limit(cls, value=""): + # type: (Any) -> AnnotatedValue + """ + The actual value was removed because the size of the field exceeded the configured maximum size, + for example specified with the max_request_body_size sdk option. + """ + return AnnotatedValue( + value=value, + metadata={ + "rem": [ # Remark + [ + "!config", # Because of configured maximum size + "x", # The fields original value was removed + ] + ] + }, + ) + + @classmethod + def substituted_because_contains_sensitive_data(cls): + # type: () -> AnnotatedValue + """The actual value was removed because it contained sensitive information.""" + return AnnotatedValue( + value=SENSITIVE_DATA_SUBSTITUTE, + metadata={ + "rem": [ # Remark + [ + "!config", # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies) + "s", # The fields original value was substituted + ] + ] + }, + ) + + +T = TypeVar("T") +Annotated = Union[AnnotatedValue, T] + + +if TYPE_CHECKING: + from collections.abc import Container, MutableMapping, Sequence + + from datetime import datetime + + from types import TracebackType + from typing import Any + from typing import Callable + from typing import Dict + from typing import Mapping + from typing import Optional + from typing import Type + from typing_extensions import Literal, TypedDict + + class SDKInfo(TypedDict): + name: str + version: str + packages: Sequence[Mapping[str, str]] + + # "critical" is an alias of "fatal" recognized by Relay + LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"] + + Event = TypedDict( + "Event", + { + "breadcrumbs": Annotated[ + dict[Literal["values"], list[dict[str, Any]]] + ], # TODO: We can expand on this type + "check_in_id": str, + "contexts": dict[str, dict[str, object]], + "dist": str, + "duration": Optional[float], + "environment": str, + "errors": list[dict[str, Any]], # TODO: We can expand on this type + "event_id": str, + "exception": dict[ + Literal["values"], list[dict[str, Any]] + ], # TODO: We can expand on this type + "extra": MutableMapping[str, object], + "fingerprint": list[str], + "level": LogLevelStr, + "logentry": Mapping[str, object], + "logger": str, + "message": str, + "modules": dict[str, str], + "monitor_config": Mapping[str, object], + "monitor_slug": Optional[str], + "platform": Literal["python"], + "profile": object, # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports + "release": str, + "request": dict[str, object], + "sdk": Mapping[str, object], + "server_name": str, + "spans": Annotated[list[dict[str, object]]], + "stacktrace": dict[ + str, object + ], # We access this key in the code, but I am unsure whether we ever set it + "start_timestamp": datetime, + "status": Optional[str], + "tags": MutableMapping[ + str, str + ], # Tags must be less than 200 characters each + "threads": dict[ + Literal["values"], list[dict[str, Any]] + ], # TODO: We can expand on this type + "timestamp": Optional[datetime], # Must be set before sending the event + "transaction": str, + "transaction_info": Mapping[str, Any], # TODO: We can expand on this type + "type": Literal["check_in", "transaction"], + "user": dict[str, object], + "_dropped_spans": int, + }, + total=False, + ) + + ExcInfo = Union[ + tuple[Type[BaseException], BaseException, Optional[TracebackType]], + tuple[None, None, None], + ] + + # TODO: Make a proper type definition for this (PRs welcome!) + Hint = Dict[str, Any] + + Log = TypedDict( + "Log", + { + "severity_text": str, + "severity_number": int, + "body": str, + "attributes": dict[str, str | bool | float | int], + "time_unix_nano": int, + "trace_id": Optional[str], + }, + ) + + # TODO: Make a proper type definition for this (PRs welcome!) + Breadcrumb = Dict[str, Any] + + # TODO: Make a proper type definition for this (PRs welcome!) + BreadcrumbHint = Dict[str, Any] + + # TODO: Make a proper type definition for this (PRs welcome!) + SamplingContext = Dict[str, Any] + + EventProcessor = Callable[[Event, Hint], Optional[Event]] + ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]] + BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]] + TransactionProcessor = Callable[[Event, Hint], Optional[Event]] + LogProcessor = Callable[[Log, Hint], Optional[Log]] + + TracesSampler = Callable[[SamplingContext], Union[float, int, bool]] + + # https://github.com/python/mypy/issues/5710 + NotImplementedType = Any + + EventDataCategory = Literal[ + "default", + "error", + "crash", + "transaction", + "security", + "attachment", + "session", + "internal", + "profile", + "profile_chunk", + "monitor", + "span", + "log", + ] + SessionStatus = Literal["ok", "exited", "crashed", "abnormal"] + + ContinuousProfilerMode = Literal["thread", "gevent", "unknown"] + ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]] + + MonitorConfigScheduleType = Literal["crontab", "interval"] + MonitorConfigScheduleUnit = Literal[ + "year", + "month", + "week", + "day", + "hour", + "minute", + "second", # not supported in Sentry and will result in a warning + ] + + MonitorConfigSchedule = TypedDict( + "MonitorConfigSchedule", + { + "type": MonitorConfigScheduleType, + "value": Union[int, str], + "unit": MonitorConfigScheduleUnit, + }, + total=False, + ) + + MonitorConfig = TypedDict( + "MonitorConfig", + { + "schedule": MonitorConfigSchedule, + "timezone": str, + "checkin_margin": int, + "max_runtime": int, + "failure_issue_threshold": int, + "recovery_threshold": int, + }, + total=False, + ) + + HttpStatusCodeRange = Union[int, Container[int]] + + OtelExtractedSpanData = tuple[str, str, Optional[str], Optional[int], Optional[str]] diff --git a/src/sentry_sdk_alpha/_werkzeug.py b/src/sentry_sdk_alpha/_werkzeug.py new file mode 100644 index 00000000000000..0fa3d611f154b4 --- /dev/null +++ b/src/sentry_sdk_alpha/_werkzeug.py @@ -0,0 +1,98 @@ +""" +Copyright (c) 2007 by the Pallets team. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND +CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF +USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. +""" + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Dict + from typing import Iterator + from typing import Tuple + + +# +# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders` +# https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361 +# +# We need this function because Django does not give us a "pure" http header +# dict. So we might as well use it for all WSGI integrations. +# +def _get_headers(environ): + # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] + """ + Returns only proper HTTP headers. + """ + for key, value in environ.items(): + key = str(key) + if key.startswith("HTTP_") and key not in ( + "HTTP_CONTENT_TYPE", + "HTTP_CONTENT_LENGTH", + ): + yield key[5:].replace("_", "-").title(), value + elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"): + yield key.replace("_", "-").title(), value + + +# +# `get_host` comes from `werkzeug.wsgi.get_host` +# https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145 +# +def get_host(environ, use_x_forwarded_for=False): + # type: (Dict[str, str], bool) -> str + """ + Return the host for the given WSGI environment. + """ + if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ: + rv = environ["HTTP_X_FORWARDED_HOST"] + if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): + rv = rv[:-3] + elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): + rv = rv[:-4] + elif environ.get("HTTP_HOST"): + rv = environ["HTTP_HOST"] + if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): + rv = rv[:-3] + elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): + rv = rv[:-4] + elif environ.get("SERVER_NAME"): + rv = environ["SERVER_NAME"] + if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in ( + ("https", "443"), + ("http", "80"), + ): + rv += ":" + environ["SERVER_PORT"] + else: + # In spite of the WSGI spec, SERVER_NAME might not be present. + rv = "unknown" + + return rv diff --git a/src/sentry_sdk_alpha/ai/__init__.py b/src/sentry_sdk_alpha/ai/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry_sdk_alpha/ai/monitoring.py b/src/sentry_sdk_alpha/ai/monitoring.py new file mode 100644 index 00000000000000..7122b45d998129 --- /dev/null +++ b/src/sentry_sdk_alpha/ai/monitoring.py @@ -0,0 +1,120 @@ +import inspect +from functools import wraps + +from sentry_sdk_alpha.consts import SPANDATA +import sentry_sdk_alpha.utils +from sentry_sdk_alpha import start_span +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.utils import ContextVar + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Callable, Any + +_ai_pipeline_name = ContextVar("ai_pipeline_name", default=None) + + +def set_ai_pipeline_name(name): + # type: (Optional[str]) -> None + _ai_pipeline_name.set(name) + + +def get_ai_pipeline_name(): + # type: () -> Optional[str] + return _ai_pipeline_name.get() + + +def ai_track(description, **span_kwargs): + # type: (str, Any) -> Callable[..., Any] + def decorator(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + def sync_wrapped(*args, **kwargs): + # type: (Any, Any) -> Any + curr_pipeline = _ai_pipeline_name.get() + op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") + + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: + for k, v in kwargs.pop("sentry_tags", {}).items(): + span.set_tag(k, v) + for k, v in kwargs.pop("sentry_data", {}).items(): + span.set_attribute(k, v) + if curr_pipeline: + span.set_attribute(SPANDATA.AI_PIPELINE_NAME, curr_pipeline) + return f(*args, **kwargs) + else: + _ai_pipeline_name.set(description) + try: + res = f(*args, **kwargs) + except Exception as e: + event, hint = sentry_sdk_alpha.utils.event_from_exception( + e, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "ai_monitoring", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + raise e from None + finally: + _ai_pipeline_name.set(None) + return res + + async def async_wrapped(*args, **kwargs): + # type: (Any, Any) -> Any + curr_pipeline = _ai_pipeline_name.get() + op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline") + + with start_span( + name=description, op=op, only_if_parent=True, **span_kwargs + ) as span: + for k, v in kwargs.pop("sentry_tags", {}).items(): + span.set_tag(k, v) + for k, v in kwargs.pop("sentry_data", {}).items(): + span.set_attribute(k, v) + if curr_pipeline: + span.set_attribute(SPANDATA.AI_PIPELINE_NAME, curr_pipeline) + return await f(*args, **kwargs) + else: + _ai_pipeline_name.set(description) + try: + res = await f(*args, **kwargs) + except Exception as e: + event, hint = sentry_sdk_alpha.utils.event_from_exception( + e, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "ai_monitoring", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + raise e from None + finally: + _ai_pipeline_name.set(None) + return res + + if inspect.iscoroutinefunction(f): + return wraps(f)(async_wrapped) + else: + return wraps(f)(sync_wrapped) + + return decorator + + +def record_token_usage( + span, prompt_tokens=None, completion_tokens=None, total_tokens=None +): + # type: (Span, Optional[int], Optional[int], Optional[int]) -> None + ai_pipeline_name = get_ai_pipeline_name() + if ai_pipeline_name: + span.set_attribute(SPANDATA.AI_PIPELINE_NAME, ai_pipeline_name) + if prompt_tokens is not None: + span.set_attribute(SPANDATA.AI_PROMPT_TOKENS_USED, prompt_tokens) + if completion_tokens is not None: + span.set_attribute(SPANDATA.AI_COMPLETION_TOKENS_USED, completion_tokens) + if ( + total_tokens is None + and prompt_tokens is not None + and completion_tokens is not None + ): + total_tokens = prompt_tokens + completion_tokens + if total_tokens is not None: + span.set_attribute(SPANDATA.AI_TOTAL_TOKENS_USED, total_tokens) diff --git a/src/sentry_sdk_alpha/ai/utils.py b/src/sentry_sdk_alpha/ai/utils.py new file mode 100644 index 00000000000000..b1789aa9582915 --- /dev/null +++ b/src/sentry_sdk_alpha/ai/utils.py @@ -0,0 +1,32 @@ +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.utils import logger + + +def _normalize_data(data): + # type: (Any) -> Any + + # convert pydantic data (e.g. OpenAI v1+) to json compatible format + if hasattr(data, "model_dump"): + try: + return data.model_dump() + except Exception as e: + logger.warning("Could not convert pydantic data to JSON: %s", e) + return data + if isinstance(data, list): + if len(data) == 1: + return _normalize_data(data[0]) # remove empty dimensions + return list(_normalize_data(x) for x in data) + if isinstance(data, dict): + return {k: _normalize_data(v) for (k, v) in data.items()} + return data + + +def set_data_normalized(span, key, value): + # type: (Span, str, Any) -> None + normalized = _normalize_data(value) + span.set_attribute(key, normalized) diff --git a/src/sentry_sdk_alpha/api.py b/src/sentry_sdk_alpha/api.py new file mode 100644 index 00000000000000..deb4649ab5a8b7 --- /dev/null +++ b/src/sentry_sdk_alpha/api.py @@ -0,0 +1,339 @@ +import inspect +from contextlib import contextmanager + +from sentry_sdk_alpha import tracing_utils, Client +from sentry_sdk_alpha._init_implementation import init +from sentry_sdk_alpha.tracing import trace +from sentry_sdk_alpha.crons import monitor + +# TODO-neel-potel make 2 scope strategies/impls and switch +from sentry_sdk_alpha.scope import Scope as BaseScope +from sentry_sdk_alpha.opentelemetry.scope import ( + PotelScope as Scope, + new_scope, + isolation_scope, + use_scope, + use_isolation_scope, +) + + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Mapping + + from typing import Any + from typing import Dict + from typing import Optional + from typing import Callable + from typing import TypeVar + from typing import Union + from typing import Generator + + import sentry_sdk_alpha + + T = TypeVar("T") + F = TypeVar("F", bound=Callable[..., Any]) + + +# When changing this, update __all__ in __init__.py too +__all__ = [ + "init", + "add_attachment", + "add_breadcrumb", + "capture_event", + "capture_exception", + "capture_message", + "continue_trace", + "flush", + "get_baggage", + "get_client", + "get_global_scope", + "get_isolation_scope", + "get_current_scope", + "get_current_span", + "get_traceparent", + "is_initialized", + "isolation_scope", + "last_event_id", + "new_scope", + "set_context", + "set_extra", + "set_level", + "set_tag", + "set_tags", + "set_user", + "start_span", + "start_transaction", + "trace", + "monitor", + "use_scope", + "use_isolation_scope", +] + + +def scopemethod(f): + # type: (F) -> F + f.__doc__ = "%s\n\n%s" % ( + "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__, + inspect.getdoc(getattr(Scope, f.__name__)), + ) + return f + + +def clientmethod(f): + # type: (F) -> F + f.__doc__ = "%s\n\n%s" % ( + "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__, + inspect.getdoc(getattr(Client, f.__name__)), + ) + return f + + +@scopemethod +def get_client(): + # type: () -> sentry_sdk.client.BaseClient + return Scope.get_client() + + +def is_initialized(): + # type: () -> bool + """ + .. versionadded:: 2.0.0 + + Returns whether Sentry has been initialized or not. + + If a client is available and the client is active + (meaning it is configured to send data) then + Sentry is initialized. + """ + return get_client().is_active() + + +@scopemethod +def get_global_scope(): + # type: () -> BaseScope + return Scope.get_global_scope() + + +@scopemethod +def get_isolation_scope(): + # type: () -> Scope + return Scope.get_isolation_scope() + + +@scopemethod +def get_current_scope(): + # type: () -> Scope + return Scope.get_current_scope() + + +@scopemethod +def last_event_id(): + # type: () -> Optional[str] + """ + See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding + this method's limitations. + """ + return Scope.last_event_id() + + +@scopemethod +def capture_event( + event, # type: sentry_sdk._types.Event + hint=None, # type: Optional[sentry_sdk._types.Hint] + scope=None, # type: Optional[Any] + **scope_kwargs, # type: Any +): + # type: (...) -> Optional[str] + return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs) + + +@scopemethod +def capture_message( + message, # type: str + level=None, # type: Optional[sentry_sdk._types.LogLevelStr] + scope=None, # type: Optional[Any] + **scope_kwargs, # type: Any +): + # type: (...) -> Optional[str] + return get_current_scope().capture_message( + message, level, scope=scope, **scope_kwargs + ) + + +@scopemethod +def capture_exception( + error=None, # type: Optional[Union[BaseException, sentry_sdk._types.ExcInfo]] + scope=None, # type: Optional[Any] + **scope_kwargs, # type: Any +): + # type: (...) -> Optional[str] + return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs) + + +@scopemethod +def add_attachment( + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool +): + # type: (...) -> None + return get_isolation_scope().add_attachment( + bytes, filename, path, content_type, add_to_transactions + ) + + +@scopemethod +def add_breadcrumb( + crumb=None, # type: Optional[sentry_sdk._types.Breadcrumb] + hint=None, # type: Optional[sentry_sdk._types.BreadcrumbHint] + **kwargs, # type: Any +): + # type: (...) -> None + return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs) + + +@scopemethod +def set_tag(key, value): + # type: (str, Any) -> None + return get_isolation_scope().set_tag(key, value) + + +@scopemethod +def set_tags(tags): + # type: (Mapping[str, object]) -> None + return get_isolation_scope().set_tags(tags) + + +@scopemethod +def set_context(key, value): + # type: (str, Dict[str, Any]) -> None + return get_isolation_scope().set_context(key, value) + + +@scopemethod +def set_extra(key, value): + # type: (str, Any) -> None + return get_isolation_scope().set_extra(key, value) + + +@scopemethod +def set_user(value): + # type: (Optional[Dict[str, Any]]) -> None + return get_isolation_scope().set_user(value) + + +@scopemethod +def set_level(value): + # type: (sentry_sdk._types.LogLevelStr) -> None + return get_isolation_scope().set_level(value) + + +@clientmethod +def flush( + timeout=None, # type: Optional[float] + callback=None, # type: Optional[Callable[[int, float], None]] +): + # type: (...) -> None + return get_client().flush(timeout=timeout, callback=callback) + + +def start_span(**kwargs): + # type: (Any) -> sentry_sdk.tracing.Span + """ + Start and return a span. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the span. + To start a new child span within the span, call the `start_child()` method. + + When used as a context manager, spans are automatically finished at the end + of the `with` block. If not using context managers, call the `finish()` + method. + """ + return get_current_scope().start_span(**kwargs) + + +def start_transaction( + transaction=None, # type: Optional[sentry_sdk.tracing.Span] + **kwargs, # type: Any +): + # type: (...) -> sentry_sdk.tracing.Span + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + + Start and return a transaction on the current scope. + + Start an existing transaction if given, otherwise create and start a new + transaction with kwargs. + + This is the entry point to manual tracing instrumentation. + + A tree structure can be built by adding child spans to the transaction, + and child spans to other spans. To start a new child span within the + transaction or any span, call the respective `.start_child()` method. + + Every child span must be finished before the transaction is finished, + otherwise the unfinished spans are discarded. + + When used as context managers, spans and transactions are automatically + finished at the end of the `with` block. If not using context managers, + call the `.finish()` method. + + When the transaction is finished, it will be sent to Sentry with all its + finished child spans. + + :param transaction: The transaction to start. If omitted, we create and + start a new transaction. + :param kwargs: Optional keyword arguments to be passed to the Transaction + constructor. See :py:class:`sentry_sdk.tracing.Transaction` for + available arguments. + """ + return start_span( + span=transaction, + **kwargs, + ) + + +def get_current_span(scope=None): + # type: (Optional[Scope]) -> Optional[sentry_sdk.tracing.Span] + """ + Returns the currently active span if there is one running, otherwise `None` + """ + return tracing_utils.get_current_span(scope) + + +def get_traceparent(): + # type: () -> Optional[str] + """ + Returns the traceparent either from the active span or from the scope. + """ + return get_current_scope().get_traceparent() + + +def get_baggage(): + # type: () -> Optional[str] + """ + Returns Baggage either from the active span or from the scope. + """ + baggage = get_current_scope().get_baggage() + if baggage is not None: + return baggage.serialize() + + return None + + +@contextmanager +def continue_trace(environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] + """ + Sets the propagation context from environment or headers to continue an incoming trace. + """ + with get_isolation_scope().continue_trace(environ_or_headers): + yield diff --git a/src/sentry_sdk_alpha/attachments.py b/src/sentry_sdk_alpha/attachments.py new file mode 100644 index 00000000000000..2e0b2ee89432d4 --- /dev/null +++ b/src/sentry_sdk_alpha/attachments.py @@ -0,0 +1,75 @@ +import os +import mimetypes + +from sentry_sdk_alpha.envelope import Item, PayloadRef + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Union, Callable + + +class Attachment: + """Additional files/data to send along with an event. + + This class stores attachments that can be sent along with an event. Attachments are files or other data, e.g. + config or log files, that are relevant to an event. Attachments are set on the ``Scope``, and are sent along with + all non-transaction events (or all events including transactions if ``add_to_transactions`` is ``True``) that are + captured within the ``Scope``. + + To add an attachment to a ``Scope``, use :py:meth:`sentry_sdk.Scope.add_attachment`. The parameters for + ``add_attachment`` are the same as the parameters for this class's constructor. + + :param bytes: Raw bytes of the attachment, or a function that returns the raw bytes. Must be provided unless + ``path`` is provided. + :param filename: The filename of the attachment. Must be provided unless ``path`` is provided. + :param path: Path to a file to attach. Must be provided unless ``bytes`` is provided. + :param content_type: The content type of the attachment. If not provided, it will be guessed from the ``filename`` + parameter, if available, or the ``path`` parameter if ``filename`` is ``None``. + :param add_to_transactions: Whether to add this attachment to transactions. Defaults to ``False``. + """ + + def __init__( + self, + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool + ): + # type: (...) -> None + if bytes is None and path is None: + raise TypeError("path or raw bytes required for attachment") + if filename is None and path is not None: + filename = os.path.basename(path) + if filename is None: + raise TypeError("filename is required for attachment") + if content_type is None: + content_type = mimetypes.guess_type(filename)[0] + self.bytes = bytes + self.filename = filename + self.path = path + self.content_type = content_type + self.add_to_transactions = add_to_transactions + + def to_envelope_item(self): + # type: () -> Item + """Returns an envelope item for this attachment.""" + payload = None # type: Union[None, PayloadRef, bytes] + if self.bytes is not None: + if callable(self.bytes): + payload = self.bytes() + else: + payload = self.bytes + else: + payload = PayloadRef(path=self.path) + return Item( + payload=payload, + type="attachment", + content_type=self.content_type, + filename=self.filename, + ) + + def __repr__(self): + # type: () -> str + return "" % (self.filename,) diff --git a/src/sentry_sdk_alpha/client.py b/src/sentry_sdk_alpha/client.py new file mode 100644 index 00000000000000..a261a374df26cb --- /dev/null +++ b/src/sentry_sdk_alpha/client.py @@ -0,0 +1,1007 @@ +import os +import uuid +import random +import socket +from collections.abc import Mapping +from datetime import datetime, timezone +from importlib import import_module +from typing import TYPE_CHECKING, List, Dict, cast, overload + +from sentry_sdk_alpha._compat import check_uwsgi_thread_support +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + ContextVar, + capture_internal_exceptions, + current_stacktrace, + env_to_bool, + format_timestamp, + get_sdk_name, + get_type_name, + get_default_release, + handle_in_app, + logger, +) +from sentry_sdk_alpha.serializer import serialize +from sentry_sdk_alpha.tracing import trace +from sentry_sdk_alpha.transport import BaseHttpTransport, make_transport +from sentry_sdk_alpha.consts import ( + SPANDATA, + DEFAULT_MAX_VALUE_LENGTH, + DEFAULT_OPTIONS, + VERSION, + ClientConstructor, +) +from sentry_sdk_alpha.integrations import setup_integrations +from sentry_sdk_alpha.integrations.dedupe import DedupeIntegration +from sentry_sdk_alpha.sessions import SessionFlusher +from sentry_sdk_alpha.envelope import Envelope + +from sentry_sdk_alpha.profiler.continuous_profiler import setup_continuous_profiler +from sentry_sdk_alpha.profiler.transaction_profiler import ( + has_profiling_enabled, + Profile, + setup_profiler, +) +from sentry_sdk_alpha.scrubber import EventScrubber +from sentry_sdk_alpha.monitor import Monitor +from sentry_sdk_alpha.spotlight import setup_spotlight + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Optional + from typing import Sequence + from typing import Type + from typing import Union + from typing import TypeVar + + from sentry_sdk_alpha._types import Event, Hint, SDKInfo, Log + from sentry_sdk_alpha.integrations import Integration + from sentry_sdk_alpha.scope import Scope + from sentry_sdk_alpha.session import Session + from sentry_sdk_alpha.spotlight import SpotlightClient + from sentry_sdk_alpha.transport import Transport + from sentry_sdk_alpha._log_batcher import LogBatcher + + I = TypeVar("I", bound=Integration) # noqa: E741 + +_client_init_debug = ContextVar("client_init_debug") + + +SDK_INFO = { + "name": "sentry.python", # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations() + "version": VERSION, + "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}], +} # type: SDKInfo + + +def _get_options(*args, **kwargs): + # type: (*Optional[str], **Any) -> Dict[str, Any] + if args and (isinstance(args[0], (bytes, str)) or args[0] is None): + dsn = args[0] # type: Optional[str] + args = args[1:] + else: + dsn = None + + if len(args) > 1: + raise TypeError("Only single positional argument is expected") + + rv = dict(DEFAULT_OPTIONS) + options = dict(*args, **kwargs) + if dsn is not None and options.get("dsn") is None: + options["dsn"] = dsn + + for key, value in options.items(): + if key not in rv: + raise TypeError("Unknown option %r" % (key,)) + + rv[key] = value + + if rv["dsn"] is None: + rv["dsn"] = os.environ.get("SENTRY_DSN") + + if rv["release"] is None: + rv["release"] = get_default_release() + + if rv["environment"] is None: + rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production" + + if rv["debug"] is None: + rv["debug"] = env_to_bool(os.environ.get("SENTRY_DEBUG", "False"), strict=True) + + if rv["server_name"] is None and hasattr(socket, "gethostname"): + rv["server_name"] = socket.gethostname() + + if rv["project_root"] is None: + try: + project_root = os.getcwd() + except Exception: + project_root = None + + rv["project_root"] = project_root + + if rv["event_scrubber"] is None: + rv["event_scrubber"] = EventScrubber( + send_default_pii=( + False if rv["send_default_pii"] is None else rv["send_default_pii"] + ) + ) + + if rv["socket_options"] and not isinstance(rv["socket_options"], list): + logger.warning( + "Ignoring socket_options because of unexpected format. See urllib3.HTTPConnection.socket_options for the expected format." + ) + rv["socket_options"] = None + + return rv + + +class BaseClient: + """ + .. versionadded:: 2.0.0 + + The basic definition of a client that is used for sending data to Sentry. + """ + + spotlight = None # type: Optional[SpotlightClient] + + def __init__(self, options=None): + # type: (Optional[Dict[str, Any]]) -> None + self.options = ( + options if options is not None else DEFAULT_OPTIONS + ) # type: Dict[str, Any] + + self.transport = None # type: Optional[Transport] + self.monitor = None # type: Optional[Monitor] + self.log_batcher = None # type: Optional[LogBatcher] + + def __getstate__(self, *args, **kwargs): + # type: (*Any, **Any) -> Any + return {"options": {}} + + def __setstate__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + pass + + @property + def dsn(self): + # type: () -> Optional[str] + return None + + def should_send_default_pii(self): + # type: () -> bool + return False + + def is_active(self): + # type: () -> bool + """ + .. versionadded:: 2.0.0 + + Returns whether the client is active (able to send data to Sentry) + """ + return False + + def capture_event(self, *args, **kwargs): + # type: (*Any, **Any) -> Optional[str] + return None + + def _capture_experimental_log(self, scope, log): + # type: (Scope, Log) -> None + pass + + def capture_session(self, *args, **kwargs): + # type: (*Any, **Any) -> None + return None + + if TYPE_CHECKING: + + @overload + def get_integration(self, name_or_class): + # type: (str) -> Optional[Integration] + ... + + @overload + def get_integration(self, name_or_class): + # type: (type[I]) -> Optional[I] + ... + + def get_integration(self, name_or_class): + # type: (Union[str, type[Integration]]) -> Optional[Integration] + return None + + def close(self, *args, **kwargs): + # type: (*Any, **Any) -> None + return None + + def flush(self, *args, **kwargs): + # type: (*Any, **Any) -> None + return None + + def __enter__(self): + # type: () -> BaseClient + return self + + def __exit__(self, exc_type, exc_value, tb): + # type: (Any, Any, Any) -> None + return None + + +class NonRecordingClient(BaseClient): + """ + .. versionadded:: 2.0.0 + + A client that does not send any events to Sentry. This is used as a fallback when the Sentry SDK is not yet initialized. + """ + + pass + + +class _Client(BaseClient): + """ + The client is internally responsible for capturing the events and + forwarding them to sentry through the configured transport. It takes + the client options as keyword arguments and optionally the DSN as first + argument. + + Alias of :py:class:`sentry_sdk.Client`. (Was created for better intelisense support) + """ + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + super(_Client, self).__init__(options=get_options(*args, **kwargs)) + self._init_impl() + + def __getstate__(self): + # type: () -> Any + return {"options": self.options} + + def __setstate__(self, state): + # type: (Any) -> None + self.options = state["options"] + self._init_impl() + + def _setup_instrumentation(self, functions_to_trace): + # type: (Sequence[Dict[str, str]]) -> None + """ + Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator. + """ + for function in functions_to_trace: + class_name = None + function_qualname = function["qualified_name"] + module_name, function_name = function_qualname.rsplit(".", 1) + + try: + # Try to import module and function + # ex: "mymodule.submodule.funcname" + + module_obj = import_module(module_name) + function_obj = getattr(module_obj, function_name) + setattr(module_obj, function_name, trace(function_obj)) + logger.debug("Enabled tracing for %s", function_qualname) + except ModuleNotFoundError: + try: + # Try to import a class + # ex: "mymodule.submodule.MyClassName.member_function" + + module_name, class_name = module_name.rsplit(".", 1) + module_obj = import_module(module_name) + class_obj = getattr(module_obj, class_name) + function_obj = getattr(class_obj, function_name) + function_type = type(class_obj.__dict__[function_name]) + traced_function = trace(function_obj) + + if function_type in (staticmethod, classmethod): + traced_function = staticmethod(traced_function) + + setattr(class_obj, function_name, traced_function) + setattr(module_obj, class_name, class_obj) + logger.debug("Enabled tracing for %s", function_qualname) + + except Exception as e: + logger.warning( + "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", + function_qualname, + e, + ) + + except Exception as e: + logger.warning( + "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.", + function_qualname, + e, + ) + + def _init_impl(self): + # type: () -> None + old_debug = _client_init_debug.get(False) + + def _capture_envelope(envelope): + # type: (Envelope) -> None + if self.transport is not None: + self.transport.capture_envelope(envelope) + + try: + _client_init_debug.set(self.options["debug"]) + self.transport = make_transport(self.options) + + self.monitor = None + if self.transport: + if self.options["enable_backpressure_handling"]: + self.monitor = Monitor(self.transport) + + self.session_flusher = SessionFlusher(capture_func=_capture_envelope) + + experiments = self.options.get("_experiments", {}) + self.log_batcher = None + if experiments.get("enable_logs", False): + from sentry_sdk_alpha._log_batcher import LogBatcher + + self.log_batcher = LogBatcher(capture_func=_capture_envelope) + + max_request_body_size = ("always", "never", "small", "medium") + if self.options["max_request_body_size"] not in max_request_body_size: + raise ValueError( + "Invalid value for max_request_body_size. Must be one of {}".format( + max_request_body_size + ) + ) + + self.integrations = setup_integrations( + self.options["integrations"], + with_defaults=self.options["default_integrations"], + with_auto_enabling_integrations=self.options[ + "auto_enabling_integrations" + ], + disabled_integrations=self.options["disabled_integrations"], + ) + + spotlight_config = self.options.get("spotlight") + if spotlight_config is None and "SENTRY_SPOTLIGHT" in os.environ: + spotlight_env_value = os.environ["SENTRY_SPOTLIGHT"] + spotlight_config = env_to_bool(spotlight_env_value, strict=True) + self.options["spotlight"] = ( + spotlight_config + if spotlight_config is not None + else spotlight_env_value + ) + + if self.options.get("spotlight"): + self.spotlight = setup_spotlight(self.options) + if not self.options["dsn"]: + sample_all = lambda *_args, **_kwargs: 1.0 + self.options["send_default_pii"] = True + self.options["error_sampler"] = sample_all + self.options["traces_sampler"] = sample_all + self.options["profiles_sampler"] = sample_all + + sdk_name = get_sdk_name(list(self.integrations.keys())) + SDK_INFO["name"] = sdk_name + logger.debug("Setting SDK name to '%s'", sdk_name) + + if has_profiling_enabled(self.options): + try: + setup_profiler(self.options) + except Exception as e: + logger.debug("Can not set up profiler. (%s)", e) + else: + try: + setup_continuous_profiler( + self.options, + sdk_info=SDK_INFO, + capture_func=_capture_envelope, + ) + except Exception as e: + logger.debug("Can not set up continuous profiler. (%s)", e) + + from sentry_sdk_alpha.opentelemetry.tracing import ( + patch_readable_span, + setup_sentry_tracing, + ) + + patch_readable_span() + setup_sentry_tracing() + finally: + _client_init_debug.set(old_debug) + + self._setup_instrumentation(self.options.get("functions_to_trace", [])) + + if ( + self.monitor + or self.log_batcher + or has_profiling_enabled(self.options) + or isinstance(self.transport, BaseHttpTransport) + ): + # If we have anything on that could spawn a background thread, we + # need to check if it's safe to use them. + check_uwsgi_thread_support() + + def is_active(self): + # type: () -> bool + """ + .. versionadded:: 2.0.0 + + Returns whether the client is active (able to send data to Sentry) + """ + return True + + def should_send_default_pii(self): + # type: () -> bool + """ + .. versionadded:: 2.0.0 + + Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry. + """ + return self.options.get("send_default_pii") or False + + @property + def dsn(self): + # type: () -> Optional[str] + """Returns the configured DSN as string.""" + return self.options["dsn"] + + def _prepare_event( + self, + event, # type: Event + hint, # type: Hint + scope, # type: Optional[Scope] + ): + # type: (...) -> Optional[Event] + + previous_total_spans = None # type: Optional[int] + previous_total_breadcrumbs = None # type: Optional[int] + + if event.get("timestamp") is None: + event["timestamp"] = datetime.now(timezone.utc) + + if scope is not None: + is_transaction = event.get("type") == "transaction" + spans_before = len(cast(List[Dict[str, object]], event.get("spans", []))) + event_ = scope.apply_to_event(event, hint, self.options) + + # one of the event/error processors returned None + if event_ is None: + if self.transport: + self.transport.record_lost_event( + "event_processor", + data_category=("transaction" if is_transaction else "error"), + ) + if is_transaction: + self.transport.record_lost_event( + "event_processor", + data_category="span", + quantity=spans_before + 1, # +1 for the transaction itself + ) + return None + + event = event_ # type: Optional[Event] # type: ignore[no-redef] + spans_delta = spans_before - len( + cast(List[Dict[str, object]], event.get("spans", [])) + ) + if is_transaction and spans_delta > 0 and self.transport is not None: + self.transport.record_lost_event( + "event_processor", data_category="span", quantity=spans_delta + ) + + dropped_spans = event.pop("_dropped_spans", 0) + spans_delta # type: int + if dropped_spans > 0: + previous_total_spans = spans_before + dropped_spans + if scope._n_breadcrumbs_truncated > 0: + breadcrumbs = event.get("breadcrumbs", {}) + values = ( + breadcrumbs.get("values", []) + if not isinstance(breadcrumbs, AnnotatedValue) + else [] + ) + previous_total_breadcrumbs = ( + len(values) + scope._n_breadcrumbs_truncated + ) + + if ( + self.options["attach_stacktrace"] + and "exception" not in event + and "stacktrace" not in event + and "threads" not in event + ): + with capture_internal_exceptions(): + event["threads"] = { + "values": [ + { + "stacktrace": current_stacktrace( + include_local_variables=self.options.get( + "include_local_variables", True + ), + max_value_length=self.options.get( + "max_value_length", DEFAULT_MAX_VALUE_LENGTH + ), + ), + "crashed": False, + "current": True, + } + ] + } + + for key in "release", "environment", "server_name", "dist": + if event.get(key) is None and self.options[key] is not None: + event[key] = str(self.options[key]).strip() + if event.get("sdk") is None: + sdk_info = dict(SDK_INFO) + sdk_info["integrations"] = sorted(self.integrations.keys()) + event["sdk"] = sdk_info + + if event.get("platform") is None: + event["platform"] = "python" + + event = handle_in_app( + event, + self.options["in_app_exclude"], + self.options["in_app_include"], + self.options["project_root"], + ) + + if event is not None: + event_scrubber = self.options["event_scrubber"] + if event_scrubber: + event_scrubber.scrub_event(event) + + if previous_total_spans is not None: + event["spans"] = AnnotatedValue( + event.get("spans", []), {"len": previous_total_spans} + ) + if previous_total_breadcrumbs is not None: + event["breadcrumbs"] = AnnotatedValue( + event.get("breadcrumbs", []), {"len": previous_total_breadcrumbs} + ) + # Postprocess the event here so that annotated types do + # generally not surface in before_send + if event is not None: + event = cast( + "Event", + serialize( + cast("Dict[str, Any]", event), + max_request_body_size=self.options.get("max_request_body_size"), + max_value_length=self.options.get("max_value_length"), + custom_repr=self.options.get("custom_repr"), + ), + ) + + before_send = self.options["before_send"] + if ( + before_send is not None + and event is not None + and event.get("type") != "transaction" + ): + new_event = None # type: Optional[Event] + with capture_internal_exceptions(): + new_event = before_send(event, hint or {}) + if new_event is None: + logger.info("before send dropped event") + if self.transport: + self.transport.record_lost_event( + "before_send", data_category="error" + ) + + # If this is an exception, reset the DedupeIntegration. It still + # remembers the dropped exception as the last exception, meaning + # that if the same exception happens again and is not dropped + # in before_send, it'd get dropped by DedupeIntegration. + if event.get("exception"): + DedupeIntegration.reset_last_seen() + + event = new_event # type: Optional[Event] # type: ignore[no-redef] + + before_send_transaction = self.options["before_send_transaction"] + if ( + before_send_transaction is not None + and event is not None + and event.get("type") == "transaction" + ): + new_event = None + spans_before = len(cast(List[Dict[str, object]], event.get("spans", []))) + with capture_internal_exceptions(): + new_event = before_send_transaction(event, hint or {}) + if new_event is None: + logger.info("before send transaction dropped event") + if self.transport: + self.transport.record_lost_event( + reason="before_send", data_category="transaction" + ) + self.transport.record_lost_event( + reason="before_send", + data_category="span", + quantity=spans_before + 1, # +1 for the transaction itself + ) + else: + spans_delta = spans_before - len( + cast(List[Dict[str, object]], new_event.get("spans", [])) + ) + if spans_delta > 0 and self.transport is not None: + self.transport.record_lost_event( + reason="before_send", data_category="span", quantity=spans_delta + ) + + event = new_event # type: Optional[Event] # type: ignore[no-redef] + + return event + + def _is_ignored_error(self, event, hint): + # type: (Event, Hint) -> bool + exc_info = hint.get("exc_info") + if exc_info is None: + return False + + error = exc_info[0] + error_type_name = get_type_name(exc_info[0]) + error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name) + + for ignored_error in self.options["ignore_errors"]: + # String types are matched against the type name in the + # exception only + if isinstance(ignored_error, str): + if ignored_error == error_full_name or ignored_error == error_type_name: + return True + else: + if issubclass(error, ignored_error): + return True + + return False + + def _should_capture( + self, + event, # type: Event + hint, # type: Hint + scope=None, # type: Optional[Scope] + ): + # type: (...) -> bool + # Transactions are sampled independent of error events. + is_transaction = event.get("type") == "transaction" + if is_transaction: + return True + + ignoring_prevents_recursion = scope is not None and not scope._should_capture + if ignoring_prevents_recursion: + return False + + ignored_by_config_option = self._is_ignored_error(event, hint) + if ignored_by_config_option: + return False + + return True + + def _should_sample_error( + self, + event, # type: Event + hint, # type: Hint + ): + # type: (...) -> bool + error_sampler = self.options.get("error_sampler", None) + + if callable(error_sampler): + with capture_internal_exceptions(): + sample_rate = error_sampler(event, hint) + else: + sample_rate = self.options["sample_rate"] + + try: + not_in_sample_rate = sample_rate < 1.0 and random.random() >= sample_rate + except NameError: + logger.warning( + "The provided error_sampler raised an error. Defaulting to sampling the event." + ) + + # If the error_sampler raised an error, we should sample the event, since the default behavior + # (when no sample_rate or error_sampler is provided) is to sample all events. + not_in_sample_rate = False + except TypeError: + parameter, verb = ( + ("error_sampler", "returned") + if callable(error_sampler) + else ("sample_rate", "contains") + ) + logger.warning( + "The provided %s %s an invalid value of %s. The value should be a float or a bool. Defaulting to sampling the event." + % (parameter, verb, repr(sample_rate)) + ) + + # If the sample_rate has an invalid value, we should sample the event, since the default behavior + # (when no sample_rate or error_sampler is provided) is to sample all events. + not_in_sample_rate = False + + if not_in_sample_rate: + # because we will not sample this event, record a "lost event". + if self.transport: + self.transport.record_lost_event("sample_rate", data_category="error") + + return False + + return True + + def _update_session_from_event( + self, + session, # type: Session + event, # type: Event + ): + # type: (...) -> None + + crashed = False + errored = False + user_agent = None + + exceptions = (event.get("exception") or {}).get("values") + if exceptions: + errored = True + for error in exceptions: + if isinstance(error, AnnotatedValue): + error = error.value or {} + mechanism = error.get("mechanism") + if isinstance(mechanism, Mapping) and mechanism.get("handled") is False: + crashed = True + break + + user = event.get("user") + + if session.user_agent is None: + headers = (event.get("request") or {}).get("headers") + headers_dict = headers if isinstance(headers, dict) else {} + for k, v in headers_dict.items(): + if k.lower() == "user-agent": + user_agent = v + break + + session.update( + status="crashed" if crashed else None, + user=user, + user_agent=user_agent, + errors=session.errors + (errored or crashed), + ) + + def capture_event( + self, + event, # type: Event + hint=None, # type: Optional[Hint] + scope=None, # type: Optional[Scope] + ): + # type: (...) -> Optional[str] + """Captures an event. + + :param event: A ready-made event that can be directly sent to Sentry. + + :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. + + :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. + + :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help. + """ + hint = dict(hint or ()) # type: Hint + + if not self._should_capture(event, hint, scope): + return None + + profile = event.pop("profile", None) + + event_id = event.get("event_id") + if event_id is None: + event["event_id"] = event_id = uuid.uuid4().hex + event_opt = self._prepare_event(event, hint, scope) + if event_opt is None: + return None + + # whenever we capture an event we also check if the session needs + # to be updated based on that information. + session = scope._session if scope else None + if session: + self._update_session_from_event(session, event) + + is_transaction = event_opt.get("type") == "transaction" + is_checkin = event_opt.get("type") == "check_in" + + if ( + not is_transaction + and not is_checkin + and not self._should_sample_error(event, hint) + ): + return None + + attachments = hint.get("attachments") + + trace_context = event_opt.get("contexts", {}).get("trace") or {} + dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {}) + + headers = { + "event_id": event_opt["event_id"], + "sent_at": format_timestamp(datetime.now(timezone.utc)), + } # type: dict[str, object] + + if dynamic_sampling_context: + headers["trace"] = dynamic_sampling_context + + envelope = Envelope(headers=headers) + + if is_transaction: + if isinstance(profile, Profile): + envelope.add_profile(profile.to_json(event_opt, self.options)) + envelope.add_transaction(event_opt) + elif is_checkin: + envelope.add_checkin(event_opt) + else: + envelope.add_event(event_opt) + + for attachment in attachments or (): + envelope.add_item(attachment.to_envelope_item()) + + return_value = None + if self.spotlight: + self.spotlight.capture_envelope(envelope) + return_value = event_id + + if self.transport is not None: + self.transport.capture_envelope(envelope) + return_value = event_id + + return return_value + + def _capture_experimental_log(self, current_scope, log): + # type: (Scope, Log) -> None + logs_enabled = self.options["_experiments"].get("enable_logs", False) + if not logs_enabled: + return + isolation_scope = current_scope.get_isolation_scope() + + log["attributes"]["sentry.sdk.name"] = SDK_INFO["name"] + log["attributes"]["sentry.sdk.version"] = SDK_INFO["version"] + + server_name = self.options.get("server_name") + if server_name is not None and SPANDATA.SERVER_ADDRESS not in log["attributes"]: + log["attributes"][SPANDATA.SERVER_ADDRESS] = server_name + + environment = self.options.get("environment") + if environment is not None and "sentry.environment" not in log["attributes"]: + log["attributes"]["sentry.environment"] = environment + + release = self.options.get("release") + if release is not None and "sentry.release" not in log["attributes"]: + log["attributes"]["sentry.release"] = release + + span = current_scope.span + if span is not None and "sentry.trace.parent_span_id" not in log["attributes"]: + log["attributes"]["sentry.trace.parent_span_id"] = span.span_id + + if log.get("trace_id") is None: + transaction = current_scope.root_span + propagation_context = isolation_scope.get_active_propagation_context() + if transaction is not None: + log["trace_id"] = transaction.trace_id + elif propagation_context is not None: + log["trace_id"] = propagation_context.trace_id + + # If debug is enabled, log the log to the console + debug = self.options.get("debug", False) + if debug: + logger.debug( + f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}' + ) + + before_send_log = self.options["_experiments"].get("before_send_log") + if before_send_log is not None: + log = before_send_log(log, {}) + if log is None: + return + + if self.log_batcher: + self.log_batcher.add(log) + + def capture_session( + self, session # type: Session + ): + # type: (...) -> None + if not session.release: + logger.info("Discarded session update because of missing release") + else: + self.session_flusher.add_session(session) + + if TYPE_CHECKING: + + @overload + def get_integration(self, name_or_class): + # type: (str) -> Optional[Integration] + ... + + @overload + def get_integration(self, name_or_class): + # type: (type[I]) -> Optional[I] + ... + + def get_integration( + self, name_or_class # type: Union[str, Type[Integration]] + ): + # type: (...) -> Optional[Integration] + """Returns the integration for this client by name or class. + If the client does not have that integration then `None` is returned. + """ + if isinstance(name_or_class, str): + integration_name = name_or_class + elif name_or_class.identifier is not None: + integration_name = name_or_class.identifier + else: + raise ValueError("Integration has no name") + + return self.integrations.get(integration_name) + + def close( + self, + timeout=None, # type: Optional[float] + callback=None, # type: Optional[Callable[[int, float], None]] + ): + # type: (...) -> None + """ + Close the client and shut down the transport. Arguments have the same + semantics as :py:meth:`Client.flush`. + """ + if self.transport is not None: + self.flush(timeout=timeout, callback=callback) + + self.session_flusher.kill() + + if self.log_batcher is not None: + self.log_batcher.kill() + + if self.monitor: + self.monitor.kill() + + self.transport.kill() + self.transport = None + + def flush( + self, + timeout=None, # type: Optional[float] + callback=None, # type: Optional[Callable[[int, float], None]] + ): + # type: (...) -> None + """ + Wait for the current events to be sent. + + :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used. + + :param callback: Is invoked with the number of pending events and the configured timeout. + """ + if self.transport is not None: + if timeout is None: + timeout = self.options["shutdown_timeout"] + self.session_flusher.flush() + + if self.log_batcher is not None: + self.log_batcher.flush() + + self.transport.flush(timeout=timeout, callback=callback) + + def __enter__(self): + # type: () -> _Client + return self + + def __exit__(self, exc_type, exc_value, tb): + # type: (Any, Any, Any) -> None + self.close() + + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # Make mypy, PyCharm and other static analyzers think `get_options` is a + # type to have nicer autocompletion for params. + # + # Use `ClientConstructor` to define the argument types of `init` and + # `Dict[str, Any]` to tell static analyzers about the return type. + + class get_options(ClientConstructor, Dict[str, Any]): # noqa: N801 + pass + + class Client(ClientConstructor, _Client): + pass + +else: + # Alias `get_options` for actual usage. Go through the lambda indirection + # to throw PyCharm off of the weakly typed signature (it would otherwise + # discover both the weakly typed signature of `_init` and our faked `init` + # type). + + get_options = (lambda: _get_options)() + Client = (lambda: _Client)() diff --git a/src/sentry_sdk_alpha/consts.py b/src/sentry_sdk_alpha/consts.py new file mode 100644 index 00000000000000..80b46dd4a796a5 --- /dev/null +++ b/src/sentry_sdk_alpha/consts.py @@ -0,0 +1,1065 @@ +import itertools + +from enum import Enum +from typing import TYPE_CHECKING + +# up top to prevent circular import due to integration import +DEFAULT_MAX_VALUE_LENGTH = 1024 + +DEFAULT_MAX_STACK_FRAMES = 100 +DEFAULT_ADD_FULL_STACK = False + + +# Also needs to be at the top to prevent circular import +class EndpointType(Enum): + """ + The type of an endpoint. This is an enum, rather than a constant, for historical reasons + (the old /store endpoint). The enum also preserve future compatibility, in case we ever + have a new endpoint. + """ + + ENVELOPE = "envelope" + + +class CompressionAlgo(Enum): + GZIP = "gzip" + BROTLI = "br" + + +if TYPE_CHECKING: + import sentry_sdk_alpha + + from typing import Optional + from typing import Callable + from typing import Union + from typing import List + from typing import Type + from typing import Dict + from typing import Any + from typing import Sequence + from typing import Tuple + from typing_extensions import Literal + from typing_extensions import TypedDict + + from sentry_sdk_alpha._types import ( + BreadcrumbProcessor, + ContinuousProfilerMode, + Event, + EventProcessor, + Hint, + ProfilerMode, + TracesSampler, + TransactionProcessor, + ) + + # Experiments are feature flags to enable and disable certain unstable SDK + # functionality. Changing them from the defaults (`None`) in production + # code is highly discouraged. They are not subject to any stability + # guarantees such as the ones from semantic versioning. + Experiments = TypedDict( + "Experiments", + { + "max_spans": Optional[int], + "max_flags": Optional[int], + "record_sql_params": Optional[bool], + "continuous_profiling_auto_start": Optional[bool], + "continuous_profiling_mode": Optional[ContinuousProfilerMode], + "otel_powered_performance": Optional[bool], + "transport_zlib_compression_level": Optional[int], + "transport_compression_level": Optional[int], + "transport_compression_algo": Optional[CompressionAlgo], + "transport_num_pools": Optional[int], + "transport_http2": Optional[bool], + "enable_logs": Optional[bool], + }, + total=False, + ) + +DEFAULT_QUEUE_SIZE = 100 +DEFAULT_MAX_BREADCRUMBS = 100 +MATCH_ALL = r".*" + +FALSE_VALUES = [ + "false", + "no", + "off", + "n", + "0", +] + + +class SPANDATA: + """ + Additional information describing the type of the span. + See: https://develop.sentry.dev/sdk/performance/span-data-conventions/ + """ + + AI_FREQUENCY_PENALTY = "ai.frequency_penalty" + """ + Used to reduce repetitiveness of generated tokens. + Example: 0.5 + """ + + AI_PRESENCE_PENALTY = "ai.presence_penalty" + """ + Used to reduce repetitiveness of generated tokens. + Example: 0.5 + """ + + AI_INPUT_MESSAGES = "ai.input_messages" + """ + The input messages to an LLM call. + Example: [{"role": "user", "message": "hello"}] + """ + + AI_MODEL_ID = "ai.model_id" + """ + The unique descriptor of the model being execugted + Example: gpt-4 + """ + + AI_METADATA = "ai.metadata" + """ + Extra metadata passed to an AI pipeline step. + Example: {"executed_function": "add_integers"} + """ + + AI_TAGS = "ai.tags" + """ + Tags that describe an AI pipeline step. + Example: {"executed_function": "add_integers"} + """ + + AI_STREAMING = "ai.streaming" + """ + Whether or not the AI model call's repsonse was streamed back asynchronously + Example: true + """ + + AI_TEMPERATURE = "ai.temperature" + """ + For an AI model call, the temperature parameter. Temperature essentially means how random the output will be. + Example: 0.5 + """ + + AI_TOP_P = "ai.top_p" + """ + For an AI model call, the top_p parameter. Top_p essentially controls how random the output will be. + Example: 0.5 + """ + + AI_TOP_K = "ai.top_k" + """ + For an AI model call, the top_k parameter. Top_k essentially controls how random the output will be. + Example: 35 + """ + + AI_FUNCTION_CALL = "ai.function_call" + """ + For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls + """ + + AI_TOOL_CALLS = "ai.tool_calls" + """ + For an AI model call, the function that was called. + """ + + AI_TOOLS = "ai.tools" + """ + For an AI model call, the functions that are available + """ + + AI_RESPONSE_FORMAT = "ai.response_format" + """ + For an AI model call, the format of the response + """ + + AI_LOGIT_BIAS = "ai.logit_bias" + """ + For an AI model call, the logit bias + """ + + AI_PREAMBLE = "ai.preamble" + """ + For an AI model call, the preamble parameter. + Preambles are a part of the prompt used to adjust the model's overall behavior and conversation style. + Example: "You are now a clown." + """ + + AI_RAW_PROMPTING = "ai.raw_prompting" + """ + Minimize pre-processing done to the prompt sent to the LLM. + Example: true + """ + AI_RESPONSES = "ai.responses" + """ + The responses to an AI model call. Always as a list. + Example: ["hello", "world"] + """ + + AI_SEED = "ai.seed" + """ + The seed, ideally models given the same seed and same other parameters will produce the exact same output. + Example: 123.45 + """ + + AI_CITATIONS = "ai.citations" + """ + References or sources cited by the AI model in its response. + Example: ["Smith et al. 2020", "Jones 2019"] + """ + + AI_DOCUMENTS = "ai.documents" + """ + Documents or content chunks used as context for the AI model. + Example: ["doc1.txt", "doc2.pdf"] + """ + + AI_SEARCH_QUERIES = "ai.search_queries" + """ + Queries used to search for relevant context or documents. + Example: ["climate change effects", "renewable energy"] + """ + + AI_SEARCH_RESULTS = "ai.search_results" + """ + Results returned from search queries for context. + Example: ["Result 1", "Result 2"] + """ + + AI_GENERATION_ID = "ai.generation_id" + """ + Unique identifier for the completion. + Example: "gen_123abc" + """ + + AI_SEARCH_REQUIRED = "ai.is_search_required" + """ + Boolean indicating if the model needs to perform a search. + Example: true + """ + + AI_FINISH_REASON = "ai.finish_reason" + """ + The reason why the model stopped generating. + Example: "length" + """ + + AI_PIPELINE_NAME = "ai.pipeline.name" + """ + Name of the AI pipeline or chain being executed. + Example: "qa-pipeline" + """ + + AI_PROMPT_TOKENS_USED = "ai.prompt_tokens.used" + """ + The number of input prompt tokens used by the model. + Example: 10 + """ + + AI_COMPLETION_TOKENS_USED = "ai.completion_tokens.used" + """ + The number of output completion tokens used by the model. + Example: 10 + """ + + AI_TOTAL_TOKENS_USED = "ai.total_tokens.used" + """ + The total number of tokens (input + output) used by the request to the model. + Example: 20 + """ + + AI_TEXTS = "ai.texts" + """ + Raw text inputs provided to the model. + Example: ["What is machine learning?"] + """ + + AI_WARNINGS = "ai.warnings" + """ + Warning messages generated during model execution. + Example: ["Token limit exceeded"] + """ + + DB_NAME = "db.name" + """ + The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails). + Example: myDatabase + """ + + DB_USER = "db.user" + """ + The name of the database user used for connecting to the database. + See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md + Example: my_user + """ + + DB_OPERATION = "db.operation" + """ + The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword. + See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md + Example: findAndModify, HMSET, SELECT + """ + + DB_SYSTEM = "db.system" + """ + An identifier for the database management system (DBMS) product being used. + See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md + Example: postgresql + """ + + DB_MONGODB_COLLECTION = "db.mongodb.collection" + """ + The MongoDB collection being accessed within the database. + See: https://github.com/open-telemetry/semantic-conventions/blob/main/docs/database/mongodb.md#attributes + Example: public.users; customers + """ + + CACHE_HIT = "cache.hit" + """ + A boolean indicating whether the requested data was found in the cache. + Example: true + """ + + CACHE_ITEM_SIZE = "cache.item_size" + """ + The size of the requested data in bytes. + Example: 58 + """ + + CACHE_KEY = "cache.key" + """ + The key of the requested data. + Example: template.cache.some_item.867da7e2af8e6b2f3aa7213a4080edb3 + """ + + NETWORK_PEER_ADDRESS = "network.peer.address" + """ + Peer address of the network connection - IP address or Unix domain socket name. + Example: 10.1.2.80, /tmp/my.sock, localhost + """ + + NETWORK_PEER_PORT = "network.peer.port" + """ + Peer port number of the network connection. + Example: 6379 + """ + + HTTP_QUERY = "http.query" + """ + The Query string present in the URL. + Example: ?foo=bar&bar=baz + """ + + HTTP_FRAGMENT = "http.fragment" + """ + The Fragments present in the URL. + Example: #foo=bar + """ + + HTTP_METHOD = "http.method" + """ + The HTTP method used. + Example: GET + """ + + HTTP_STATUS_CODE = "http.response.status_code" + """ + The HTTP status code as an integer. + Example: 418 + """ + + MESSAGING_DESTINATION_NAME = "messaging.destination.name" + """ + The destination name where the message is being consumed from, + e.g. the queue name or topic. + """ + + MESSAGING_MESSAGE_ID = "messaging.message.id" + """ + The message's identifier. + """ + + MESSAGING_MESSAGE_RETRY_COUNT = "messaging.message.retry.count" + """ + Number of retries/attempts to process a message. + """ + + MESSAGING_MESSAGE_RECEIVE_LATENCY = "messaging.message.receive.latency" + """ + The latency between when the task was enqueued and when it was started to be processed. + """ + + MESSAGING_SYSTEM = "messaging.system" + """ + The messaging system's name, e.g. `kafka`, `aws_sqs` + """ + + SERVER_ADDRESS = "server.address" + """ + Name of the database host. + Example: example.com + """ + + SERVER_PORT = "server.port" + """ + Logical server port number + Example: 80; 8080; 443 + """ + + SERVER_SOCKET_ADDRESS = "server.socket.address" + """ + Physical server IP address or Unix socket address. + Example: 10.5.3.2 + """ + + SERVER_SOCKET_PORT = "server.socket.port" + """ + Physical server port. + Recommended: If different than server.port. + Example: 16456 + """ + + CODE_FILEPATH = "code.filepath" + """ + The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path). + Example: "/app/myapplication/http/handler/server.py" + """ + + CODE_LINENO = "code.lineno" + """ + The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`. + Example: 42 + """ + + CODE_FUNCTION = "code.function" + """ + The method or function name, or equivalent (usually rightmost part of the code unit's name). + Example: "server_request" + """ + + CODE_NAMESPACE = "code.namespace" + """ + The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit. + Example: "http.handler" + """ + + THREAD_ID = "thread.id" + """ + Identifier of a thread from where the span originated. This should be a string. + Example: "7972576320" + """ + + THREAD_NAME = "thread.name" + """ + Label identifying a thread from where the span originated. This should be a string. + Example: "MainThread" + """ + + PROFILER_ID = "profiler_id" + """ + Label identifying the profiler id that the span occurred in. This should be a string. + Example: "5249fbada8d5416482c2f6e47e337372" + """ + + +class SPANSTATUS: + """ + The status of a Sentry span. + + See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context + """ + + ABORTED = "aborted" + ALREADY_EXISTS = "already_exists" + CANCELLED = "cancelled" + DATA_LOSS = "data_loss" + DEADLINE_EXCEEDED = "deadline_exceeded" + FAILED_PRECONDITION = "failed_precondition" + INTERNAL_ERROR = "internal_error" + INVALID_ARGUMENT = "invalid_argument" + NOT_FOUND = "not_found" + OK = "ok" + OUT_OF_RANGE = "out_of_range" + PERMISSION_DENIED = "permission_denied" + RESOURCE_EXHAUSTED = "resource_exhausted" + UNAUTHENTICATED = "unauthenticated" + UNAVAILABLE = "unavailable" + UNIMPLEMENTED = "unimplemented" + UNKNOWN_ERROR = "unknown_error" + + +class OP: + ANTHROPIC_MESSAGES_CREATE = "ai.messages.create.anthropic" + CACHE_GET = "cache.get" + CACHE_PUT = "cache.put" + COHERE_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.cohere" + COHERE_EMBEDDINGS_CREATE = "ai.embeddings.create.cohere" + DB = "db" + DB_REDIS = "db.redis" + EVENT_DJANGO = "event.django" + FUNCTION = "function" + FUNCTION_AWS = "function.aws" + FUNCTION_GCP = "function.gcp" + GRAPHQL_EXECUTE = "graphql.execute" + GRAPHQL_MUTATION = "graphql.mutation" + GRAPHQL_PARSE = "graphql.parse" + GRAPHQL_RESOLVE = "graphql.resolve" + GRAPHQL_SUBSCRIPTION = "graphql.subscription" + GRAPHQL_QUERY = "graphql.query" + GRAPHQL_VALIDATE = "graphql.validate" + GRPC_CLIENT = "grpc.client" + GRPC_SERVER = "grpc.server" + HTTP_CLIENT = "http.client" + HTTP_CLIENT_STREAM = "http.client.stream" + HTTP_SERVER = "http.server" + MIDDLEWARE_DJANGO = "middleware.django" + MIDDLEWARE_LITESTAR = "middleware.litestar" + MIDDLEWARE_LITESTAR_RECEIVE = "middleware.litestar.receive" + MIDDLEWARE_LITESTAR_SEND = "middleware.litestar.send" + MIDDLEWARE_STARLETTE = "middleware.starlette" + MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive" + MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send" + MIDDLEWARE_STARLITE = "middleware.starlite" + MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive" + MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send" + OPENAI_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.openai" + OPENAI_EMBEDDINGS_CREATE = "ai.embeddings.create.openai" + HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE = ( + "ai.chat_completions.create.huggingface_hub" + ) + LANGCHAIN_PIPELINE = "ai.pipeline.langchain" + LANGCHAIN_RUN = "ai.run.langchain" + LANGCHAIN_TOOL = "ai.tool.langchain" + LANGCHAIN_AGENT = "ai.agent.langchain" + LANGCHAIN_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.langchain" + QUEUE_PROCESS = "queue.process" + QUEUE_PUBLISH = "queue.publish" + QUEUE_SUBMIT_ARQ = "queue.submit.arq" + QUEUE_TASK_ARQ = "queue.task.arq" + QUEUE_SUBMIT_CELERY = "queue.submit.celery" + QUEUE_TASK_CELERY = "queue.task.celery" + QUEUE_TASK_RQ = "queue.task.rq" + QUEUE_SUBMIT_HUEY = "queue.submit.huey" + QUEUE_TASK_HUEY = "queue.task.huey" + QUEUE_SUBMIT_RAY = "queue.submit.ray" + QUEUE_TASK_RAY = "queue.task.ray" + SUBPROCESS = "subprocess" + SUBPROCESS_WAIT = "subprocess.wait" + SUBPROCESS_COMMUNICATE = "subprocess.communicate" + TEMPLATE_RENDER = "template.render" + VIEW_RENDER = "view.render" + VIEW_RESPONSE_RENDER = "view.response.render" + WEBSOCKET_SERVER = "websocket.server" + SOCKET_CONNECTION = "socket.connection" + SOCKET_DNS = "socket.dns" + + +BAGGAGE_HEADER_NAME = "baggage" +SENTRY_TRACE_HEADER_NAME = "sentry-trace" + +DEFAULT_SPAN_ORIGIN = "manual" +DEFAULT_SPAN_NAME = "" + + +# Transaction source +# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations +class TransactionSource(str, Enum): + COMPONENT = "component" + CUSTOM = "custom" + ROUTE = "route" + TASK = "task" + URL = "url" + VIEW = "view" + + def __str__(self): + # type: () -> str + return self.value + + +# These are typically high cardinality and the server hates them +LOW_QUALITY_TRANSACTION_SOURCES = [ + TransactionSource.URL, +] + +SOURCE_FOR_STYLE = { + "endpoint": TransactionSource.COMPONENT, + "function_name": TransactionSource.COMPONENT, + "handler_name": TransactionSource.COMPONENT, + "method_and_path_pattern": TransactionSource.ROUTE, + "path": TransactionSource.URL, + "route_name": TransactionSource.COMPONENT, + "route_pattern": TransactionSource.ROUTE, + "uri_template": TransactionSource.ROUTE, + "url": TransactionSource.ROUTE, +} + + +# This type exists to trick mypy and PyCharm into thinking `init` and `Client` +# take these arguments (even though they take opaque **kwargs) +class ClientConstructor: + + def __init__( + self, + dsn=None, # type: Optional[str] + *, + max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS, # type: int + release=None, # type: Optional[str] + environment=None, # type: Optional[str] + server_name=None, # type: Optional[str] + shutdown_timeout=2, # type: float + integrations=[], # type: Sequence[sentry_sdk.integrations.Integration] # noqa: B006 + in_app_include=[], # type: List[str] # noqa: B006 + in_app_exclude=[], # type: List[str] # noqa: B006 + default_integrations=True, # type: bool + dist=None, # type: Optional[str] + transport=None, # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]] + transport_queue_size=DEFAULT_QUEUE_SIZE, # type: int + sample_rate=1.0, # type: float + send_default_pii=None, # type: Optional[bool] + http_proxy=None, # type: Optional[str] + https_proxy=None, # type: Optional[str] + ignore_errors=[], # type: Sequence[Union[type, str]] # noqa: B006 + max_request_body_size="medium", # type: str + socket_options=None, # type: Optional[List[Tuple[int, int, int | bytes]]] + keep_alive=False, # type: bool + before_send=None, # type: Optional[EventProcessor] + before_breadcrumb=None, # type: Optional[BreadcrumbProcessor] + debug=None, # type: Optional[bool] + attach_stacktrace=False, # type: bool + ca_certs=None, # type: Optional[str] + traces_sample_rate=None, # type: Optional[float] + traces_sampler=None, # type: Optional[TracesSampler] + profiles_sample_rate=None, # type: Optional[float] + profiles_sampler=None, # type: Optional[TracesSampler] + profiler_mode=None, # type: Optional[ProfilerMode] + profile_lifecycle="manual", # type: Literal["manual", "trace"] + profile_session_sample_rate=None, # type: Optional[float] + auto_enabling_integrations=True, # type: bool + disabled_integrations=None, # type: Optional[Sequence[sentry_sdk.integrations.Integration]] + auto_session_tracking=True, # type: bool + send_client_reports=True, # type: bool + _experiments={}, # type: Experiments # noqa: B006 + proxy_headers=None, # type: Optional[Dict[str, str]] + before_send_transaction=None, # type: Optional[TransactionProcessor] + project_root=None, # type: Optional[str] + include_local_variables=True, # type: Optional[bool] + include_source_context=True, # type: Optional[bool] + trace_propagation_targets=[ # noqa: B006 + MATCH_ALL + ], # type: Optional[Sequence[str]] + functions_to_trace=[], # type: Sequence[Dict[str, str]] # noqa: B006 + event_scrubber=None, # type: Optional[sentry_sdk.scrubber.EventScrubber] + max_value_length=DEFAULT_MAX_VALUE_LENGTH, # type: int + enable_backpressure_handling=True, # type: bool + error_sampler=None, # type: Optional[Callable[[Event, Hint], Union[float, bool]]] + enable_db_query_source=True, # type: bool + db_query_source_threshold_ms=100, # type: int + spotlight=None, # type: Optional[Union[bool, str]] + cert_file=None, # type: Optional[str] + key_file=None, # type: Optional[str] + custom_repr=None, # type: Optional[Callable[..., Optional[str]]] + add_full_stack=DEFAULT_ADD_FULL_STACK, # type: bool + max_stack_frames=DEFAULT_MAX_STACK_FRAMES, # type: Optional[int] + ): + # type: (...) -> None + """Initialize the Sentry SDK with the given parameters. All parameters described here can be used in a call to `sentry_sdk.init()`. + + :param dsn: The DSN tells the SDK where to send the events. + + If this option is not set, the SDK will just not send any data. + + The `dsn` config option takes precedence over the environment variable. + + Learn more about `DSN utilization `_. + + :param debug: Turns debug mode on or off. + + When `True`, the SDK will attempt to print out debugging information. This can be useful if something goes + wrong with event sending. + + The default is always `False`. It's generally not recommended to turn it on in production because of the + increase in log output. + + The `debug` config option takes precedence over the environment variable. + + :param release: Sets the release. + + If not set, the SDK will try to automatically configure a release out of the box but it's a better idea to + manually set it to guarantee that the release is in sync with your deploy integrations. + + Release names are strings, but some formats are detected by Sentry and might be rendered differently. + + See `the releases documentation `_ to learn how the SDK tries to + automatically configure a release. + + The `release` config option takes precedence over the environment variable. + + Learn more about how to send release data so Sentry can tell you about regressions between releases and + identify the potential source in `the product documentation `_. + + :param environment: Sets the environment. This string is freeform and set to `production` by default. + + A release can be associated with more than one environment to separate them in the UI (think `staging` vs + `production` or similar). + + The `environment` config option takes precedence over the environment variable. + + :param dist: The distribution of the application. + + Distributions are used to disambiguate build or deployment variants of the same release of an application. + + The dist can be for example a build number. + + :param sample_rate: Configures the sample rate for error events, in the range of `0.0` to `1.0`. + + The default is `1.0`, which means that 100% of error events will be sent. If set to `0.1`, only 10% of + error events will be sent. + + Events are picked randomly. + + :param error_sampler: Dynamically configures the sample rate for error events on a per-event basis. + + This configuration option accepts a function, which takes two parameters (the `event` and the `hint`), and + which returns a boolean (indicating whether the event should be sent to Sentry) or a floating-point number + between `0.0` and `1.0`, inclusive. + + The number indicates the probability the event is sent to Sentry; the SDK will randomly decide whether to + send the event with the given probability. + + If this configuration option is specified, the `sample_rate` option is ignored. + + :param ignore_errors: A list of exception class names that shouldn't be sent to Sentry. + + Errors that are an instance of these exceptions or a subclass of them, will be filtered out before they're + sent to Sentry. + + By default, all errors are sent. + + :param max_breadcrumbs: This variable controls the total amount of breadcrumbs that should be captured. + + This defaults to `100`, but you can set this to any number. + + However, you should be aware that Sentry has a `maximum payload size `_ + and any events exceeding that payload size will be dropped. + + :param attach_stacktrace: When enabled, stack traces are automatically attached to all messages logged. + + Stack traces are always attached to exceptions; however, when this option is set, stack traces are also + sent with messages. + + This option means that stack traces appear next to all log messages. + + Grouping in Sentry is different for events with stack traces and without. As a result, you will get new + groups as you enable or disable this flag for certain events. + + :param send_default_pii: If this flag is enabled, `certain personally identifiable information (PII) + `_ is added by active integrations. + + If you enable this option, be sure to manually remove what you don't want to send using our features for + managing `Sensitive Data `_. + + :param event_scrubber: Scrubs the event payload for sensitive information such as cookies, sessions, and + passwords from a `denylist`. + + It can additionally be used to scrub from another `pii_denylist` if `send_default_pii` is disabled. + + See how to `configure the scrubber here `_. + + :param include_source_context: When enabled, source context will be included in events sent to Sentry. + + This source context includes the five lines of code above and below the line of code where an error + happened. + + :param include_local_variables: When enabled, the SDK will capture a snapshot of local variables to send with + the event to help with debugging. + + :param add_full_stack: When capturing errors, Sentry stack traces typically only include frames that start the + moment an error occurs. + + But if the `add_full_stack` option is enabled (set to `True`), all frames from the start of execution will + be included in the stack trace sent to Sentry. + + :param max_stack_frames: This option limits the number of stack frames that will be captured when + `add_full_stack` is enabled. + + :param server_name: This option can be used to supply a server name. + + When provided, the name of the server is sent along and persisted in the event. + + For many integrations, the server name actually corresponds to the device hostname, even in situations + where the machine is not actually a server. + + :param project_root: The full path to the root directory of your application. + + The `project_root` is used to mark frames in a stack trace either as being in your application or outside + of the application. + + :param in_app_include: A list of string prefixes of module names that belong to the app. + + This option takes precedence over `in_app_exclude`. + + Sentry differentiates stack frames that are directly related to your application ("in application") from + stack frames that come from other packages such as the standard library, frameworks, or other dependencies. + + The application package is automatically marked as `inApp`. + + The difference is visible in [sentry.io](https://sentry.io), where only the "in application" frames are + displayed by default. + + :param in_app_exclude: A list of string prefixes of module names that do not belong to the app, but rather to + third-party packages. + + Modules considered not part of the app will be hidden from stack traces by default. + + This option can be overridden using `in_app_include`. + + :param max_request_body_size: This parameter controls whether integrations should capture HTTP request bodies. + It can be set to one of the following values: + + - `never`: Request bodies are never sent. + - `small`: Only small request bodies will be captured. The cutoff for small depends on the SDK (typically + 4KB). + - `medium`: Medium and small requests will be captured (typically 10KB). + - `always`: The SDK will always capture the request body as long as Sentry can make sense of it. + + Please note that the Sentry server [limits HTTP request body size](https://develop.sentry.dev/sdk/ + expected-features/data-handling/#variable-size). The server always enforces its size limit, regardless of + how you configure this option. + + :param max_value_length: The number of characters after which the values containing text in the event payload + will be truncated. + + WARNING: If the value you set for this is exceptionally large, the event may exceed 1 MiB and will be + dropped by Sentry. + + :param ca_certs: A path to an alternative CA bundle file in PEM-format. + + :param send_client_reports: Set this boolean to `False` to disable sending of client reports. + + Client reports allow the client to send status reports about itself to Sentry, such as information about + events that were dropped before being sent. + + :param integrations: List of integrations to enable in addition to `auto-enabling integrations (overview) + `_. + + This setting can be used to override the default config options for a specific auto-enabling integration + or to add an integration that is not auto-enabled. + + :param disabled_integrations: List of integrations that will be disabled. + + This setting can be used to explicitly turn off specific `auto-enabling integrations (list) + `_ or + `default `_ integrations. + + :param auto_enabling_integrations: Configures whether `auto-enabling integrations (configuration) + `_ should be enabled. + + When set to `False`, no auto-enabling integrations will be enabled by default, even if the corresponding + framework/library is detected. + + :param default_integrations: Configures whether `default integrations + `_ should be enabled. + + Setting `default_integrations` to `False` disables all default integrations **as well as all auto-enabling + integrations**, unless they are specifically added in the `integrations` option, described above. + + :param before_send: This function is called with an SDK-specific message or error event object, and can return + a modified event object, or `null` to skip reporting the event. + + This can be used, for instance, for manual PII stripping before sending. + + By the time `before_send` is executed, all scope data has already been applied to the event. Further + modification of the scope won't have any effect. + + :param before_send_transaction: This function is called with an SDK-specific transaction event object, and can + return a modified transaction event object, or `null` to skip reporting the event. + + One way this might be used is for manual PII stripping before sending. + + :param before_breadcrumb: This function is called with an SDK-specific breadcrumb object before the breadcrumb + is added to the scope. + + When nothing is returned from the function, the breadcrumb is dropped. + + To pass the breadcrumb through, return the first argument, which contains the breadcrumb object. + + The callback typically gets a second argument (called a "hint") which contains the original object from + which the breadcrumb was created to further customize what the breadcrumb should look like. + + :param transport: Switches out the transport used to send events. + + How this works depends on the SDK. It can, for instance, be used to capture events for unit-testing or to + send it through some more complex setup that requires proxy authentication. + + :param transport_queue_size: The maximum number of events that will be queued before the transport is forced to + flush. + + :param http_proxy: When set, a proxy can be configured that should be used for outbound requests. + + This is also used for HTTPS requests unless a separate `https_proxy` is configured. However, not all SDKs + support a separate HTTPS proxy. + + SDKs will attempt to default to the system-wide configured proxy, if possible. For instance, on Unix + systems, the `http_proxy` environment variable will be picked up. + + :param https_proxy: Configures a separate proxy for outgoing HTTPS requests. + + This value might not be supported by all SDKs. When not supported the `http-proxy` value is also used for + HTTPS requests at all times. + + :param proxy_headers: A dict containing additional proxy headers (usually for authentication) to be forwarded + to `urllib3`'s `ProxyManager `_. + + :param shutdown_timeout: Controls how many seconds to wait before shutting down. + + Sentry SDKs send events from a background queue. This queue is given a certain amount to drain pending + events. The default is SDK specific but typically around two seconds. + + Setting this value too low may cause problems for sending events from command line applications. + + Setting the value too high will cause the application to block for a long time for users experiencing + network connectivity problems. + + :param keep_alive: Determines whether to keep the connection alive between requests. + + This can be useful in environments where you encounter frequent network issues such as connection resets. + + :param cert_file: Path to the client certificate to use. + + If set, supersedes the `CLIENT_CERT_FILE` environment variable. + + :param key_file: Path to the key file to use. + + If set, supersedes the `CLIENT_KEY_FILE` environment variable. + + :param socket_options: An optional list of socket options to use. + + These provide fine-grained, low-level control over the way the SDK connects to Sentry. + + If provided, the options will override the default `urllib3` `socket options + `_. + + :param traces_sample_rate: A number between `0` and `1`, controlling the percentage chance a given transaction + will be sent to Sentry. + + (`0` represents 0% while `1` represents 100%.) Applies equally to all transactions created in the app. + + Either this or `traces_sampler` must be defined to enable tracing. + + If `traces_sample_rate` is `0`, this means that no new traces will be created. However, if you have + another service (for example a JS frontend) that makes requests to your service that include trace + information, those traces will be continued and thus transactions will be sent to Sentry. + + If you want to disable all tracing you need to set `traces_sample_rate=None`. In this case, no new traces + will be started and no incoming traces will be continued. + + :param traces_sampler: A function responsible for determining the percentage chance a given transaction will be + sent to Sentry. + + It will automatically be passed information about the transaction and the context in which it's being + created, and must return a number between `0` (0% chance of being sent) and `1` (100% chance of being + sent). + + Can also be used for filtering transactions, by returning `0` for those that are unwanted. + + Either this or `traces_sample_rate` must be defined to enable tracing. + + :param trace_propagation_targets: An optional property that controls which downstream services receive tracing + data, in the form of a `sentry-trace` and a `baggage` header attached to any outgoing HTTP requests. + + The option may contain a list of strings or regex against which the URLs of outgoing requests are matched. + + If one of the entries in the list matches the URL of an outgoing request, trace data will be attached to + that request. + + String entries do not have to be full matches, meaning the URL of a request is matched when it _contains_ + a string provided through the option. + + If `trace_propagation_targets` is not provided, trace data is attached to every outgoing request from the + instrumented client. + + :param functions_to_trace: An optional list of functions that should be set up for tracing. + + For each function in the list, a span will be created when the function is executed. + + Functions in the list are represented as strings containing the fully qualified name of the function. + + This is a convenient option, making it possible to have one central place for configuring what functions + to trace, instead of having custom instrumentation scattered all over your code base. + + To learn more, see the `Custom Instrumentation `_ documentation. + + :param enable_backpressure_handling: When enabled, a new monitor thread will be spawned to perform health + checks on the SDK. + + If the system is unhealthy, the SDK will keep halving the `traces_sample_rate` set by you in 10 second + intervals until recovery. + + This down sampling helps ensure that the system stays stable and reduces SDK overhead under high load. + + This option is enabled by default. + + :param enable_db_query_source: When enabled, the source location will be added to database queries. + + :param db_query_source_threshold_ms: The threshold in milliseconds for adding the source location to database + queries. + + The query location will be added to the query for queries slower than the specified threshold. + + :param custom_repr: A custom `repr `_ function to run + while serializing an object. + + Use this to control how your custom objects and classes are visible in Sentry. + + Return a string for that repr value to be used or `None` to continue serializing how Sentry would have + done it anyway. + + :param profiles_sample_rate: A number between `0` and `1`, controlling the percentage chance a given sampled + transaction will be profiled. + + (`0` represents 0% while `1` represents 100%.) Applies equally to all transactions created in the app. + + This is relative to the tracing sample rate - e.g. `0.5` means 50% of sampled transactions will be + profiled. + + :param profiles_sampler: + + :param profiler_mode: + + :param profile_lifecycle: + + :param profile_session_sample_rate: + + :param auto_session_tracking: + + :param spotlight: + + :param instrumenter: + + :param _experiments: + """ + pass + + +def _get_default_options(): + # type: () -> dict[str, Any] + import inspect + + a = inspect.getfullargspec(ClientConstructor.__init__) + defaults = a.defaults or () + kwonlydefaults = a.kwonlydefaults or {} + + return dict( + itertools.chain( + zip(a.args[-len(defaults) :], defaults), + kwonlydefaults.items(), + ) + ) + + +DEFAULT_OPTIONS = _get_default_options() +del _get_default_options + + +VERSION = "3.3.3a1" diff --git a/src/sentry_sdk_alpha/crons/__init__.py b/src/sentry_sdk_alpha/crons/__init__.py new file mode 100644 index 00000000000000..24509e63c6d926 --- /dev/null +++ b/src/sentry_sdk_alpha/crons/__init__.py @@ -0,0 +1,10 @@ +from sentry_sdk_alpha.crons.api import capture_checkin +from sentry_sdk_alpha.crons.consts import MonitorStatus +from sentry_sdk_alpha.crons.decorator import monitor + + +__all__ = [ + "capture_checkin", + "MonitorStatus", + "monitor", +] diff --git a/src/sentry_sdk_alpha/crons/api.py b/src/sentry_sdk_alpha/crons/api.py new file mode 100644 index 00000000000000..51427fbbee1862 --- /dev/null +++ b/src/sentry_sdk_alpha/crons/api.py @@ -0,0 +1,57 @@ +import uuid + +import sentry_sdk_alpha + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + from sentry_sdk_alpha._types import Event, MonitorConfig + + +def _create_check_in_event( + monitor_slug=None, # type: Optional[str] + check_in_id=None, # type: Optional[str] + status=None, # type: Optional[str] + duration_s=None, # type: Optional[float] + monitor_config=None, # type: Optional[MonitorConfig] +): + # type: (...) -> Event + options = sentry_sdk_alpha.get_client().options + check_in_id = check_in_id or uuid.uuid4().hex # type: str + + check_in = { + "type": "check_in", + "monitor_slug": monitor_slug, + "check_in_id": check_in_id, + "status": status, + "duration": duration_s, + "environment": options.get("environment", None), + "release": options.get("release", None), + } # type: Event + + if monitor_config: + check_in["monitor_config"] = monitor_config + + return check_in + + +def capture_checkin( + monitor_slug=None, # type: Optional[str] + check_in_id=None, # type: Optional[str] + status=None, # type: Optional[str] + duration=None, # type: Optional[float] + monitor_config=None, # type: Optional[MonitorConfig] +): + # type: (...) -> str + check_in_event = _create_check_in_event( + monitor_slug=monitor_slug, + check_in_id=check_in_id, + status=status, + duration_s=duration, + monitor_config=monitor_config, + ) + + sentry_sdk_alpha.capture_event(check_in_event) + + return check_in_event["check_in_id"] diff --git a/src/sentry_sdk_alpha/crons/consts.py b/src/sentry_sdk_alpha/crons/consts.py new file mode 100644 index 00000000000000..be686b4539439d --- /dev/null +++ b/src/sentry_sdk_alpha/crons/consts.py @@ -0,0 +1,4 @@ +class MonitorStatus: + IN_PROGRESS = "in_progress" + OK = "ok" + ERROR = "error" diff --git a/src/sentry_sdk_alpha/crons/decorator.py b/src/sentry_sdk_alpha/crons/decorator.py new file mode 100644 index 00000000000000..2c433b32079b14 --- /dev/null +++ b/src/sentry_sdk_alpha/crons/decorator.py @@ -0,0 +1,135 @@ +from functools import wraps +from inspect import iscoroutinefunction + +from sentry_sdk_alpha.crons import capture_checkin +from sentry_sdk_alpha.crons.consts import MonitorStatus +from sentry_sdk_alpha.utils import now + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + from types import TracebackType + from typing import ( + Any, + Optional, + ParamSpec, + Type, + TypeVar, + Union, + cast, + overload, + ) + from sentry_sdk_alpha._types import MonitorConfig + + P = ParamSpec("P") + R = TypeVar("R") + + +class monitor: # noqa: N801 + """ + Decorator/context manager to capture checkin events for a monitor. + + Usage (as decorator): + ``` + import sentry_sdk + + app = Celery() + + @app.task + @sentry_sdk.monitor(monitor_slug='my-fancy-slug') + def test(arg): + print(arg) + ``` + + This does not have to be used with Celery, but if you do use it with celery, + put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator. + + Usage (as context manager): + ``` + import sentry_sdk + + def test(arg): + with sentry_sdk.monitor(monitor_slug='my-fancy-slug'): + print(arg) + ``` + """ + + def __init__(self, monitor_slug=None, monitor_config=None): + # type: (Optional[str], Optional[MonitorConfig]) -> None + self.monitor_slug = monitor_slug + self.monitor_config = monitor_config + + def __enter__(self): + # type: () -> None + self.start_timestamp = now() + self.check_in_id = capture_checkin( + monitor_slug=self.monitor_slug, + status=MonitorStatus.IN_PROGRESS, + monitor_config=self.monitor_config, + ) + + def __exit__(self, exc_type, exc_value, traceback): + # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None + duration_s = now() - self.start_timestamp + + if exc_type is None and exc_value is None and traceback is None: + status = MonitorStatus.OK + else: + status = MonitorStatus.ERROR + + capture_checkin( + monitor_slug=self.monitor_slug, + check_in_id=self.check_in_id, + status=status, + duration=duration_s, + monitor_config=self.monitor_config, + ) + + if TYPE_CHECKING: + + @overload + def __call__(self, fn): + # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + # Unfortunately, mypy does not give us any reliable way to type check the + # return value of an Awaitable (i.e. async function) for this overload, + # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]]. + ... + + @overload + def __call__(self, fn): + # type: (Callable[P, R]) -> Callable[P, R] + ... + + def __call__( + self, + fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]] + ): + # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]] + if iscoroutinefunction(fn): + return self._async_wrapper(fn) + + else: + if TYPE_CHECKING: + fn = cast("Callable[P, R]", fn) + return self._sync_wrapper(fn) + + def _async_wrapper(self, fn): + # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]] + @wraps(fn) + async def inner(*args: "P.args", **kwargs: "P.kwargs"): + # type: (...) -> R + with self: + return await fn(*args, **kwargs) + + return inner + + def _sync_wrapper(self, fn): + # type: (Callable[P, R]) -> Callable[P, R] + @wraps(fn) + def inner(*args: "P.args", **kwargs: "P.kwargs"): + # type: (...) -> R + with self: + return fn(*args, **kwargs) + + return inner diff --git a/src/sentry_sdk_alpha/debug.py b/src/sentry_sdk_alpha/debug.py new file mode 100644 index 00000000000000..0bf0cfe2e4d109 --- /dev/null +++ b/src/sentry_sdk_alpha/debug.py @@ -0,0 +1,31 @@ +import sys +import logging + +from sentry_sdk_alpha import get_client +from sentry_sdk_alpha.client import _client_init_debug +from sentry_sdk_alpha.utils import logger +from logging import LogRecord + + +class _DebugFilter(logging.Filter): + def filter(self, record): + # type: (LogRecord) -> bool + if _client_init_debug.get(False): + return True + + return get_client().options["debug"] + + +def init_debug_support(): + # type: () -> None + if not logger.handlers: + configure_logger() + + +def configure_logger(): + # type: () -> None + _handler = logging.StreamHandler(sys.stderr) + _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s")) + logger.addHandler(_handler) + logger.setLevel(logging.DEBUG) + logger.addFilter(_DebugFilter()) diff --git a/src/sentry_sdk_alpha/envelope.py b/src/sentry_sdk_alpha/envelope.py new file mode 100644 index 00000000000000..e928c18da42ccf --- /dev/null +++ b/src/sentry_sdk_alpha/envelope.py @@ -0,0 +1,353 @@ +import io +import json +import mimetypes + +from sentry_sdk_alpha.session import Session +from sentry_sdk_alpha.utils import json_dumps, capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Optional + from typing import Union + from typing import Dict + from typing import List + from typing import Iterator + + from sentry_sdk_alpha._types import Event, EventDataCategory + + +def parse_json(data): + # type: (Union[bytes, str]) -> Any + # on some python 3 versions this needs to be bytes + if isinstance(data, bytes): + data = data.decode("utf-8", "replace") + return json.loads(data) + + +class Envelope: + """ + Represents a Sentry Envelope. The calling code is responsible for adhering to the constraints + documented in the Sentry docs: https://develop.sentry.dev/sdk/envelopes/#data-model. In particular, + each envelope may have at most one Item with type "event" or "transaction" (but not both). + """ + + def __init__( + self, + headers=None, # type: Optional[Dict[str, Any]] + items=None, # type: Optional[List[Item]] + ): + # type: (...) -> None + if headers is not None: + headers = dict(headers) + self.headers = headers or {} + if items is None: + items = [] + else: + items = list(items) + self.items = items + + @property + def description(self): + # type: (...) -> str + return "envelope with %s items (%s)" % ( + len(self.items), + ", ".join(x.data_category for x in self.items), + ) + + def add_event( + self, event # type: Event + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=event), type="event")) + + def add_transaction( + self, transaction # type: Event + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction")) + + def add_profile( + self, profile # type: Any + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=profile), type="profile")) + + def add_profile_chunk( + self, profile_chunk # type: Any + ): + # type: (...) -> None + self.add_item( + Item( + payload=PayloadRef(json=profile_chunk), + type="profile_chunk", + headers={"platform": profile_chunk.get("platform", "python")}, + ) + ) + + def add_checkin( + self, checkin # type: Any + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in")) + + def add_session( + self, session # type: Union[Session, Any] + ): + # type: (...) -> None + if isinstance(session, Session): + session = session.to_json() + self.add_item(Item(payload=PayloadRef(json=session), type="session")) + + def add_sessions( + self, sessions # type: Any + ): + # type: (...) -> None + self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions")) + + def add_item( + self, item # type: Item + ): + # type: (...) -> None + self.items.append(item) + + def get_event(self): + # type: (...) -> Optional[Event] + for items in self.items: + event = items.get_event() + if event is not None: + return event + return None + + def get_transaction_event(self): + # type: (...) -> Optional[Event] + for item in self.items: + event = item.get_transaction_event() + if event is not None: + return event + return None + + def __iter__(self): + # type: (...) -> Iterator[Item] + return iter(self.items) + + def serialize_into( + self, f # type: Any + ): + # type: (...) -> None + f.write(json_dumps(self.headers)) + f.write(b"\n") + for item in self.items: + item.serialize_into(f) + + def serialize(self): + # type: (...) -> bytes + out = io.BytesIO() + self.serialize_into(out) + return out.getvalue() + + @classmethod + def deserialize_from( + cls, f # type: Any + ): + # type: (...) -> Envelope + headers = parse_json(f.readline()) + items = [] + while 1: + item = Item.deserialize_from(f) + if item is None: + break + items.append(item) + return cls(headers=headers, items=items) + + @classmethod + def deserialize( + cls, bytes # type: bytes + ): + # type: (...) -> Envelope + return cls.deserialize_from(io.BytesIO(bytes)) + + def __repr__(self): + # type: (...) -> str + return "" % (self.headers, self.items) + + +class PayloadRef: + def __init__( + self, + bytes=None, # type: Optional[bytes] + path=None, # type: Optional[Union[bytes, str]] + json=None, # type: Optional[Any] + ): + # type: (...) -> None + self.json = json + self.bytes = bytes + self.path = path + + def get_bytes(self): + # type: (...) -> bytes + if self.bytes is None: + if self.path is not None: + with capture_internal_exceptions(): + with open(self.path, "rb") as f: + self.bytes = f.read() + elif self.json is not None: + self.bytes = json_dumps(self.json) + return self.bytes or b"" + + @property + def inferred_content_type(self): + # type: (...) -> str + if self.json is not None: + return "application/json" + elif self.path is not None: + path = self.path + if isinstance(path, bytes): + path = path.decode("utf-8", "replace") + ty = mimetypes.guess_type(path)[0] + if ty: + return ty + return "application/octet-stream" + + def __repr__(self): + # type: (...) -> str + return "" % (self.inferred_content_type,) + + +class Item: + def __init__( + self, + payload, # type: Union[bytes, str, PayloadRef] + headers=None, # type: Optional[Dict[str, Any]] + type=None, # type: Optional[str] + content_type=None, # type: Optional[str] + filename=None, # type: Optional[str] + ): + if headers is not None: + headers = dict(headers) + elif headers is None: + headers = {} + self.headers = headers + if isinstance(payload, bytes): + payload = PayloadRef(bytes=payload) + elif isinstance(payload, str): + payload = PayloadRef(bytes=payload.encode("utf-8")) + else: + payload = payload + + if filename is not None: + headers["filename"] = filename + if type is not None: + headers["type"] = type + if content_type is not None: + headers["content_type"] = content_type + elif "content_type" not in headers: + headers["content_type"] = payload.inferred_content_type + + self.payload = payload + + def __repr__(self): + # type: (...) -> str + return "" % ( + self.headers, + self.payload, + self.data_category, + ) + + @property + def type(self): + # type: (...) -> Optional[str] + return self.headers.get("type") + + @property + def data_category(self): + # type: (...) -> EventDataCategory + ty = self.headers.get("type") + if ty == "session" or ty == "sessions": + return "session" + elif ty == "attachment": + return "attachment" + elif ty == "transaction": + return "transaction" + elif ty == "event": + return "error" + elif ty == "log": + return "log" + elif ty == "client_report": + return "internal" + elif ty == "profile": + return "profile" + elif ty == "profile_chunk": + return "profile_chunk" + elif ty == "check_in": + return "monitor" + else: + return "default" + + def get_bytes(self): + # type: (...) -> bytes + return self.payload.get_bytes() + + def get_event(self): + # type: (...) -> Optional[Event] + """ + Returns an error event if there is one. + """ + if self.type == "event" and self.payload.json is not None: + return self.payload.json + return None + + def get_transaction_event(self): + # type: (...) -> Optional[Event] + if self.type == "transaction" and self.payload.json is not None: + return self.payload.json + return None + + def serialize_into( + self, f # type: Any + ): + # type: (...) -> None + headers = dict(self.headers) + bytes = self.get_bytes() + headers["length"] = len(bytes) + f.write(json_dumps(headers)) + f.write(b"\n") + f.write(bytes) + f.write(b"\n") + + def serialize(self): + # type: (...) -> bytes + out = io.BytesIO() + self.serialize_into(out) + return out.getvalue() + + @classmethod + def deserialize_from( + cls, f # type: Any + ): + # type: (...) -> Optional[Item] + line = f.readline().rstrip() + if not line: + return None + headers = parse_json(line) + length = headers.get("length") + if length is not None: + payload = f.read(length) + f.readline() + else: + # if no length was specified we need to read up to the end of line + # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope) + payload = f.readline().rstrip(b"\n") + if headers.get("type") in ("event", "transaction"): + rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload))) + else: + rv = cls(headers=headers, payload=payload) + return rv + + @classmethod + def deserialize( + cls, bytes # type: bytes + ): + # type: (...) -> Optional[Item] + return cls.deserialize_from(io.BytesIO(bytes)) diff --git a/src/sentry_sdk_alpha/feature_flags.py b/src/sentry_sdk_alpha/feature_flags.py new file mode 100644 index 00000000000000..9ba7d82c53d21a --- /dev/null +++ b/src/sentry_sdk_alpha/feature_flags.py @@ -0,0 +1,72 @@ +import copy +import sentry_sdk_alpha +from sentry_sdk_alpha._lru_cache import LRUCache +from threading import Lock + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from typing import TypedDict + + FlagData = TypedDict("FlagData", {"flag": str, "result": bool}) + + +DEFAULT_FLAG_CAPACITY = 100 + + +class FlagBuffer: + + def __init__(self, capacity): + # type: (int) -> None + self.capacity = capacity + self.lock = Lock() + + # Buffer is private. The name is mangled to discourage use. If you use this attribute + # directly you're on your own! + self.__buffer = LRUCache(capacity) + + def clear(self): + # type: () -> None + self.__buffer = LRUCache(self.capacity) + + def __deepcopy__(self, memo): + # type: (dict[int, Any]) -> FlagBuffer + with self.lock: + buffer = FlagBuffer(self.capacity) + buffer.__buffer = copy.deepcopy(self.__buffer, memo) + return buffer + + def get(self): + # type: () -> list[FlagData] + with self.lock: + return [ + {"flag": key, "result": value} for key, value in self.__buffer.get_all() + ] + + def set(self, flag, result): + # type: (str, bool) -> None + if isinstance(result, FlagBuffer): + # If someone were to insert `self` into `self` this would create a circular dependency + # on the lock. This is of course a deadlock. However, this is far outside the expected + # usage of this class. We guard against it here for completeness and to document this + # expected failure mode. + raise ValueError( + "FlagBuffer instances can not be inserted into the dictionary." + ) + + with self.lock: + self.__buffer.set(flag, result) + + +def add_feature_flag(flag, result): + # type: (str, bool) -> None + """ + Records a flag and its value to be sent on subsequent error events. + We recommend you do this on flag evaluations. Flags are buffered per Sentry scope. + """ + flags = sentry_sdk_alpha.get_isolation_scope().flags + flags.set(flag, result) + + span = sentry_sdk_alpha.get_current_span() + if span: + span.set_flag(flag, result) diff --git a/src/sentry_sdk_alpha/integrations/__init__.py b/src/sentry_sdk_alpha/integrations/__init__.py new file mode 100644 index 00000000000000..30daec17778668 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/__init__.py @@ -0,0 +1,296 @@ +from abc import ABC, abstractmethod +from threading import Lock + +from sentry_sdk_alpha.utils import logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Sequence + from typing import Callable + from typing import Dict + from typing import Iterator + from typing import List + from typing import Optional + from typing import Set + from typing import Type + from typing import Union + + +_DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600)) + + +_installer_lock = Lock() + +# Set of all integration identifiers we have attempted to install +_processed_integrations = set() # type: Set[str] + +# Set of all integration identifiers we have actually installed +_installed_integrations = set() # type: Set[str] + + +def _generate_default_integrations_iterator( + integrations, # type: List[str] + auto_enabling_integrations, # type: List[str] +): + # type: (...) -> Callable[[bool], Iterator[Type[Integration]]] + + def iter_default_integrations(with_auto_enabling_integrations): + # type: (bool) -> Iterator[Type[Integration]] + """Returns an iterator of the default integration classes:""" + from importlib import import_module + + if with_auto_enabling_integrations: + all_import_strings = integrations + auto_enabling_integrations + else: + all_import_strings = integrations + + for import_string in all_import_strings: + try: + module, cls = import_string.rsplit(".", 1) + yield getattr(import_module(module), cls) + except (DidNotEnable, SyntaxError) as e: + logger.debug( + "Did not import default integration %s: %s", import_string, e + ) + + if isinstance(iter_default_integrations.__doc__, str): + for import_string in integrations: + iter_default_integrations.__doc__ += "\n- `{}`".format(import_string) + + return iter_default_integrations + + +_DEFAULT_INTEGRATIONS = [ + # stdlib/base runtime integrations + "sentry_sdk.integrations.argv.ArgvIntegration", + "sentry_sdk.integrations.atexit.AtexitIntegration", + "sentry_sdk.integrations.dedupe.DedupeIntegration", + "sentry_sdk.integrations.excepthook.ExcepthookIntegration", + "sentry_sdk.integrations.logging.LoggingIntegration", + "sentry_sdk.integrations.modules.ModulesIntegration", + "sentry_sdk.integrations.stdlib.StdlibIntegration", + "sentry_sdk.integrations.threading.ThreadingIntegration", +] + +_AUTO_ENABLING_INTEGRATIONS = [ + "sentry_sdk.integrations.aiohttp.AioHttpIntegration", + "sentry_sdk.integrations.anthropic.AnthropicIntegration", + "sentry_sdk.integrations.ariadne.AriadneIntegration", + "sentry_sdk.integrations.arq.ArqIntegration", + "sentry_sdk.integrations.asyncpg.AsyncPGIntegration", + "sentry_sdk.integrations.boto3.Boto3Integration", + "sentry_sdk.integrations.bottle.BottleIntegration", + "sentry_sdk.integrations.celery.CeleryIntegration", + "sentry_sdk.integrations.chalice.ChaliceIntegration", + "sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration", + "sentry_sdk.integrations.cohere.CohereIntegration", + "sentry_sdk.integrations.django.DjangoIntegration", + "sentry_sdk.integrations.falcon.FalconIntegration", + "sentry_sdk.integrations.fastapi.FastApiIntegration", + "sentry_sdk.integrations.flask.FlaskIntegration", + "sentry_sdk.integrations.gql.GQLIntegration", + "sentry_sdk.integrations.graphene.GrapheneIntegration", + "sentry_sdk.integrations.httpx.HttpxIntegration", + "sentry_sdk.integrations.huey.HueyIntegration", + "sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration", + "sentry_sdk.integrations.langchain.LangchainIntegration", + "sentry_sdk.integrations.litestar.LitestarIntegration", + "sentry_sdk.integrations.loguru.LoguruIntegration", + "sentry_sdk.integrations.openai.OpenAIIntegration", + "sentry_sdk.integrations.pymongo.PyMongoIntegration", + "sentry_sdk.integrations.pyramid.PyramidIntegration", + "sentry_sdk.integrations.quart.QuartIntegration", + "sentry_sdk.integrations.redis.RedisIntegration", + "sentry_sdk.integrations.rq.RqIntegration", + "sentry_sdk.integrations.sanic.SanicIntegration", + "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration", + "sentry_sdk.integrations.starlette.StarletteIntegration", + "sentry_sdk.integrations.starlite.StarliteIntegration", + "sentry_sdk.integrations.strawberry.StrawberryIntegration", + "sentry_sdk.integrations.tornado.TornadoIntegration", +] + +iter_default_integrations = _generate_default_integrations_iterator( + integrations=_DEFAULT_INTEGRATIONS, + auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS, +) + +del _generate_default_integrations_iterator + + +_MIN_VERSIONS = { + "aiohttp": (3, 4), + "anthropic": (0, 16), + "ariadne": (0, 20), + "arq": (0, 23), + "asyncpg": (0, 23), + "beam": (2, 12), + "boto3": (1, 12), # botocore + "bottle": (0, 12), + "celery": (4, 4, 7), + "chalice": (1, 16, 0), + "clickhouse_driver": (0, 2, 0), + "common": (1, 4, 0), # opentelemetry-sdk + "cohere": (5, 4, 0), + "django": (2, 0), + "dramatiq": (1, 9), + "falcon": (3, 0), + "fastapi": (0, 79, 0), + "flask": (1, 1, 4), + "gql": (3, 4, 1), + "graphene": (3, 3), + "grpc": (1, 32, 0), # grpcio + "huggingface_hub": (0, 22), + "langchain": (0, 0, 210), + "launchdarkly": (9, 8, 0), + "loguru": (0, 7, 0), + "openai": (1, 0, 0), + "openfeature": (0, 7, 1), + "quart": (0, 16, 0), + "ray": (2, 7, 0), + "requests": (2, 0, 0), + "rq": (0, 6), + "sanic": (0, 8), + "sqlalchemy": (1, 2), + "starlette": (0, 16), + "starlite": (1, 48), + "statsig": (0, 55, 3), + "strawberry": (0, 209, 5), + "tornado": (6, 0), + "trytond": (5, 0), + "typer": (0, 15), + "unleash": (6, 0, 1), +} + + +def setup_integrations( + integrations, + with_defaults=True, + with_auto_enabling_integrations=False, + disabled_integrations=None, +): + # type: (Sequence[Integration], bool, bool, Optional[Sequence[Union[type[Integration], Integration]]]) -> Dict[str, Integration] + """ + Given a list of integration instances, this installs them all. + + When `with_defaults` is set to `True` all default integrations are added + unless they were already provided before. + + `disabled_integrations` takes precedence over `with_defaults` and + `with_auto_enabling_integrations`. + """ + integrations = dict( + (integration.identifier, integration) for integration in integrations or () + ) + + logger.debug("Setting up integrations (with default = %s)", with_defaults) + + # Integrations that will not be enabled + disabled_integrations = [ + integration if isinstance(integration, type) else type(integration) + for integration in disabled_integrations or [] + ] + + # Integrations that are not explicitly set up by the user. + used_as_default_integration = set() + + if with_defaults: + for integration_cls in iter_default_integrations( + with_auto_enabling_integrations + ): + if integration_cls.identifier not in integrations: + instance = integration_cls() + integrations[instance.identifier] = instance + used_as_default_integration.add(instance.identifier) + + for identifier, integration in integrations.items(): + with _installer_lock: + if identifier not in _processed_integrations: + if type(integration) in disabled_integrations: + logger.debug("Ignoring integration %s", identifier) + else: + logger.debug( + "Setting up previously not enabled integration %s", identifier + ) + try: + type(integration).setup_once() + except DidNotEnable as e: + if identifier not in used_as_default_integration: + raise + + logger.debug( + "Did not enable default integration %s: %s", identifier, e + ) + else: + _installed_integrations.add(identifier) + + _processed_integrations.add(identifier) + + integrations = { + identifier: integration + for identifier, integration in integrations.items() + if identifier in _installed_integrations + } + + for identifier in integrations: + logger.debug("Enabling integration %s", identifier) + + return integrations + + +def _check_minimum_version(integration, version, package=None): + # type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None + package = package or integration.identifier + + if version is None: + raise DidNotEnable(f"Unparsable {package} version.") + + min_version = _MIN_VERSIONS.get(integration.identifier) + if min_version is None: + return + + if version < min_version: + raise DidNotEnable( + f"Integration only supports {package} {'.'.join(map(str, min_version))} or newer." + ) + + +class DidNotEnable(Exception): # noqa: N818 + """ + The integration could not be enabled due to a trivial user error like + `flask` not being installed for the `FlaskIntegration`. + + This exception is silently swallowed for default integrations, but reraised + for explicitly enabled integrations. + """ + + +class Integration(ABC): + """Baseclass for all integrations. + + To accept options for an integration, implement your own constructor that + saves those options on `self`. + """ + + install = None + """Legacy method, do not implement.""" + + identifier = None # type: str + """String unique ID of integration type""" + + @staticmethod + @abstractmethod + def setup_once(): + # type: () -> None + """ + Initialize the integration. + + This function is only called once, ever. Configuration is not available + at this point, so the only thing to do here is to hook into exception + handlers, and perhaps do monkeypatches. + + Inside those hooks `Integration.current` can be used to access the + instance again. + """ + pass diff --git a/src/sentry_sdk_alpha/integrations/_asgi_common.py b/src/sentry_sdk_alpha/integrations/_asgi_common.py new file mode 100644 index 00000000000000..11dd43b07420fc --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/_asgi_common.py @@ -0,0 +1,108 @@ +import urllib + +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.integrations._wsgi_common import _filter_headers + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Optional + from typing import Union + from typing_extensions import Literal + + from sentry_sdk_alpha.utils import AnnotatedValue + + +def _get_headers(asgi_scope): + # type: (Any) -> Dict[str, str] + """ + Extract headers from the ASGI scope, in the format that the Sentry protocol expects. + """ + headers = {} # type: Dict[str, str] + for raw_key, raw_value in asgi_scope.get("headers", {}): + key = raw_key.decode("latin-1") + value = raw_value.decode("latin-1") + if key in headers: + headers[key] = headers[key] + ", " + value + else: + headers[key] = value + + return headers + + +def _get_url(asgi_scope, default_scheme=None, host=None): + # type: (Dict[str, Any], Optional[Literal["ws", "http"]], Optional[Union[AnnotatedValue, str]]) -> str + """ + Extract URL from the ASGI scope, without also including the querystring. + """ + scheme = asgi_scope.get("scheme", default_scheme) + + server = asgi_scope.get("server", None) + path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "") + + if host: + return "%s://%s%s" % (scheme, host, path) + + if server is not None: + host, port = server + default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme) + if port != default_port: + return "%s://%s:%s%s" % (scheme, host, port, path) + return "%s://%s%s" % (scheme, host, path) + return path + + +def _get_query(asgi_scope): + # type: (Any) -> Any + """ + Extract querystring from the ASGI scope, in the format that the Sentry protocol expects. + """ + qs = asgi_scope.get("query_string") + if not qs: + return None + return urllib.parse.unquote(qs.decode("latin-1")) + + +def _get_ip(asgi_scope): + # type: (Any) -> str + """ + Extract IP Address from the ASGI scope based on request headers with fallback to scope client. + """ + headers = _get_headers(asgi_scope) + try: + return headers["x-forwarded-for"].split(",")[0].strip() + except (KeyError, IndexError): + pass + + try: + return headers["x-real-ip"] + except KeyError: + pass + + return asgi_scope.get("client")[0] + + +def _get_request_data(asgi_scope): + # type: (Any) -> Dict[str, Any] + """ + Returns data related to the HTTP request from the ASGI scope. + """ + request_data = {} # type: Dict[str, Any] + ty = asgi_scope["type"] + if ty in ("http", "websocket"): + request_data["method"] = asgi_scope.get("method") + + request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope)) + request_data["query_string"] = _get_query(asgi_scope) + + request_data["url"] = _get_url( + asgi_scope, "http" if ty == "http" else "ws", headers.get("host") + ) + + client = asgi_scope.get("client") + if client and should_send_default_pii(): + request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)} + + return request_data diff --git a/src/sentry_sdk_alpha/integrations/_wsgi_common.py b/src/sentry_sdk_alpha/integrations/_wsgi_common.py new file mode 100644 index 00000000000000..372b23f515b04b --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/_wsgi_common.py @@ -0,0 +1,242 @@ +import json +from copy import deepcopy + +import sentry_sdk_alpha +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE + +try: + from django.http.request import RawPostDataException +except ImportError: + RawPostDataException = None + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Mapping + from typing import MutableMapping + from typing import Optional + from typing import Union + from sentry_sdk_alpha._types import Event + + +SENSITIVE_ENV_KEYS = ( + "REMOTE_ADDR", + "HTTP_X_FORWARDED_FOR", + "HTTP_SET_COOKIE", + "HTTP_COOKIE", + "HTTP_AUTHORIZATION", + "HTTP_X_API_KEY", + "HTTP_X_FORWARDED_FOR", + "HTTP_X_REAL_IP", +) + +SENSITIVE_HEADERS = tuple( + x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_") +) + +DEFAULT_HTTP_METHODS_TO_CAPTURE = ( + "CONNECT", + "DELETE", + "GET", + # "HEAD", # do not capture HEAD requests by default + # "OPTIONS", # do not capture OPTIONS requests by default + "PATCH", + "POST", + "PUT", + "TRACE", +) + + +def request_body_within_bounds(client, content_length): + # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool + if client is None: + return False + + bodies = client.options["max_request_body_size"] + return not ( + bodies == "never" + or (bodies == "small" and content_length > 10**3) + or (bodies == "medium" and content_length > 10**4) + ) + + +class RequestExtractor: + """ + Base class for request extraction. + """ + + # It does not make sense to make this class an ABC because it is not used + # for typing, only so that child classes can inherit common methods from + # it. Only some child classes implement all methods that raise + # NotImplementedError in this class. + + def __init__(self, request): + # type: (Any) -> None + self.request = request + + def extract_into_event(self, event): + # type: (Event) -> None + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return + + data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] + + content_length = self.content_length() + request_info = event.get("request", {}) + + if should_send_default_pii(): + request_info["cookies"] = dict(self.cookies()) + + if not request_body_within_bounds(client, content_length): + data = AnnotatedValue.removed_because_over_size_limit() + else: + # First read the raw body data + # It is important to read this first because if it is Django + # it will cache the body and then we can read the cached version + # again in parsed_body() (or json() or wherever). + raw_data = None + try: + raw_data = self.raw_data() + except (RawPostDataException, ValueError): + # If DjangoRestFramework is used it already read the body for us + # so reading it here will fail. We can ignore this. + pass + + parsed_body = self.parsed_body() + if parsed_body is not None: + data = parsed_body + elif raw_data: + data = AnnotatedValue.removed_because_raw_data() + else: + data = None + + if data is not None: + request_info["data"] = data + + event["request"] = deepcopy(request_info) + + def content_length(self): + # type: () -> int + try: + return int(self.env().get("CONTENT_LENGTH", 0)) + except ValueError: + return 0 + + def cookies(self): + # type: () -> MutableMapping[str, Any] + raise NotImplementedError() + + def raw_data(self): + # type: () -> Optional[Union[str, bytes]] + raise NotImplementedError() + + def form(self): + # type: () -> Optional[Dict[str, Any]] + raise NotImplementedError() + + def parsed_body(self): + # type: () -> Optional[Dict[str, Any]] + try: + form = self.form() + except Exception: + form = None + try: + files = self.files() + except Exception: + files = None + + if form or files: + data = {} + if form: + data = dict(form.items()) + if files: + for key in files.keys(): + data[key] = AnnotatedValue.removed_because_raw_data() + + return data + + return self.json() + + def is_json(self): + # type: () -> bool + return _is_json_content_type(self.env().get("CONTENT_TYPE")) + + def json(self): + # type: () -> Optional[Any] + try: + if not self.is_json(): + return None + + try: + raw_data = self.raw_data() + except (RawPostDataException, ValueError): + # The body might have already been read, in which case this will + # fail + raw_data = None + + if raw_data is None: + return None + + if isinstance(raw_data, str): + return json.loads(raw_data) + else: + return json.loads(raw_data.decode("utf-8")) + except ValueError: + pass + + return None + + def files(self): + # type: () -> Optional[Dict[str, Any]] + raise NotImplementedError() + + def size_of_file(self, file): + # type: (Any) -> int + raise NotImplementedError() + + def env(self): + # type: () -> Dict[str, Any] + raise NotImplementedError() + + +def _is_json_content_type(ct): + # type: (Optional[str]) -> bool + mt = (ct or "").split(";", 1)[0] + return ( + mt == "application/json" + or (mt.startswith("application/")) + and mt.endswith("+json") + ) + + +def _filter_headers(headers): + # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]] + if should_send_default_pii(): + return headers + + return { + k: ( + v + if k.upper().replace("-", "_") not in SENSITIVE_HEADERS + else AnnotatedValue.removed_because_over_size_limit() + ) + for k, v in headers.items() + } + + +def _request_headers_to_span_attributes(headers): + # type: (dict[str, str]) -> dict[str, str] + attributes = {} + + headers = _filter_headers(headers) + + for header, value in headers.items(): + if isinstance(value, AnnotatedValue): + value = SENSITIVE_DATA_SUBSTITUTE + attributes[f"http.request.header.{header.lower()}"] = value + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/aiohttp.py b/src/sentry_sdk_alpha/integrations/aiohttp.py new file mode 100644 index 00000000000000..e12704d803d385 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/aiohttp.py @@ -0,0 +1,404 @@ +import sys +import weakref +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import ( + OP, + SPANSTATUS, + SPANDATA, + BAGGAGE_HEADER_NAME, + SOURCE_FOR_STYLE, + TransactionSource, +) +from sentry_sdk_alpha.integrations import ( + _DEFAULT_FAILED_REQUEST_STATUS_CODES, + _check_minimum_version, + Integration, + DidNotEnable, +) +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.sessions import track_session +from sentry_sdk_alpha.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, + request_body_within_bounds, +) +from sentry_sdk_alpha.tracing_utils import should_propagate_trace +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + http_client_status_to_breadcrumb_level, + logger, + parse_url, + parse_version, + reraise, + set_thread_info_from_span, + transaction_from_function, + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, + SENSITIVE_DATA_SUBSTITUTE, + AnnotatedValue, +) + +try: + import asyncio + + from aiohttp import __version__ as AIOHTTP_VERSION + from aiohttp import ClientSession, TraceConfig + from aiohttp.web import Application, HTTPException, UrlDispatcher +except ImportError: + raise DidNotEnable("AIOHTTP not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from aiohttp.web_request import Request + from aiohttp.web_urldispatcher import UrlMappingMatchInfo + from aiohttp import TraceRequestStartParams, TraceRequestEndParams + + from collections.abc import Set + from types import SimpleNamespace + from typing import Any + from typing import Optional + from typing import Tuple + from typing import Union + + from sentry_sdk_alpha.utils import ExcInfo + from sentry_sdk_alpha._types import Event, EventProcessor + + +TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern") + +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "query_string": "url.query", + "method": "http.request.method", + "scheme": "url.scheme", + "path": "url.path", +} + + +class AioHttpIntegration(Integration): + identifier = "aiohttp" + origin = f"auto.http.{identifier}" + + def __init__( + self, + transaction_style="handler_name", # type: str + *, + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] + ): + # type: (...) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + self._failed_request_status_codes = failed_request_status_codes + + @staticmethod + def setup_once(): + # type: () -> None + + version = parse_version(AIOHTTP_VERSION) + _check_minimum_version(AioHttpIntegration, version) + + if not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + raise DidNotEnable( + "The aiohttp integration for Sentry requires Python 3.7+ " + " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE + ) + + ignore_logger("aiohttp.server") + + old_handle = Application._handle + + async def sentry_app_handle(self, request, *args, **kwargs): + # type: (Any, Request, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration) + if integration is None: + return await old_handle(self, request, *args, **kwargs) + + weak_request = weakref.ref(request) + + with sentry_sdk_alpha.isolation_scope() as scope: + with track_session(scope, session_mode="request"): + # Scope data will not leak between requests because aiohttp + # create a task to wrap each request. + scope.generate_propagation_context() + scope.clear_breadcrumbs() + scope.add_event_processor(_make_request_processor(weak_request)) + + headers = dict(request.headers) + with sentry_sdk_alpha.continue_trace(headers): + with sentry_sdk_alpha.start_span( + op=OP.HTTP_SERVER, + # If this transaction name makes it to the UI, AIOHTTP's + # URL resolver did not find a route or died trying. + name="generic AIOHTTP request", + source=TransactionSource.ROUTE, + origin=AioHttpIntegration.origin, + attributes=_prepopulate_attributes(request), + ) as span: + try: + response = await old_handle(self, request) + except HTTPException as e: + span.set_http_status(e.status_code) + + if ( + e.status_code + in integration._failed_request_status_codes + ): + _capture_exception() + + raise + except (asyncio.CancelledError, ConnectionResetError): + span.set_status(SPANSTATUS.CANCELLED) + raise + except Exception: + # This will probably map to a 500 but seems like we + # have no way to tell. Do not set span status. + reraise(*_capture_exception()) + + span.set_http_status(response.status) + return response + + Application._handle = sentry_app_handle + + old_urldispatcher_resolve = UrlDispatcher.resolve + + @wraps(old_urldispatcher_resolve) + async def sentry_urldispatcher_resolve(self, request): + # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo + rv = await old_urldispatcher_resolve(self, request) + + integration = sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration) + if integration is None: + return rv + + name = None + + try: + if integration.transaction_style == "handler_name": + name = transaction_from_function(rv.handler) + elif integration.transaction_style == "method_and_path_pattern": + route_info = rv.get_info() + pattern = route_info.get("path") or route_info.get("formatter") + name = "{} {}".format(request.method, pattern) + except Exception: + pass + + if name is not None: + sentry_sdk_alpha.get_current_scope().set_transaction_name( + name, + source=SOURCE_FOR_STYLE[integration.transaction_style], + ) + + return rv + + UrlDispatcher.resolve = sentry_urldispatcher_resolve + + old_client_session_init = ClientSession.__init__ + + @ensure_integration_enabled(AioHttpIntegration, old_client_session_init) + def init(*args, **kwargs): + # type: (Any, Any) -> None + client_trace_configs = list(kwargs.get("trace_configs") or ()) + trace_config = create_trace_config() + client_trace_configs.append(trace_config) + + kwargs["trace_configs"] = client_trace_configs + return old_client_session_init(*args, **kwargs) + + ClientSession.__init__ = init + + +def create_trace_config(): + # type: () -> TraceConfig + + async def on_request_start(session, trace_config_ctx, params): + # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None + if sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration) is None: + return + + method = params.method.upper() + + parsed_url = None + with capture_internal_exceptions(): + parsed_url = parse_url(str(params.url), sanitize=False) + + span = sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT, + name="%s %s" + % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), + origin=AioHttpIntegration.origin, + only_if_parent=True, + ) + + data = { + SPANDATA.HTTP_METHOD: method, + } + set_thread_info_from_span(data, span) + + if parsed_url is not None: + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) + + client = sentry_sdk_alpha.get_client() + + if should_propagate_trace(client, str(params.url)): + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers( + span=span + ): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( + key=key, value=value, url=params.url + ) + ) + if key == BAGGAGE_HEADER_NAME and params.headers.get( + BAGGAGE_HEADER_NAME + ): + # do not overwrite any existing baggage, just append to it + params.headers[key] += "," + value + else: + params.headers[key] = value + + trace_config_ctx.span = span + trace_config_ctx.span_data = data + + async def on_request_end(session, trace_config_ctx, params): + # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None + if trace_config_ctx.span is None: + return + + span_data = trace_config_ctx.span_data or {} + status_code = int(params.response.status) + span_data[SPANDATA.HTTP_STATUS_CODE] = status_code + span_data["reason"] = params.response.reason + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) + + span = trace_config_ctx.span + span.set_http_status(int(params.response.status)) + span.set_attribute("reason", params.response.reason) + span.finish() + + trace_config = TraceConfig() + + trace_config.on_request_start.append(on_request_start) + trace_config.on_request_end.append(on_request_end) + + return trace_config + + +def _make_request_processor(weak_request): + # type: (weakref.ReferenceType[Request]) -> EventProcessor + def aiohttp_processor( + event, # type: Event + hint, # type: dict[str, Tuple[type, BaseException, Any]] + ): + # type: (...) -> Event + request = weak_request() + if request is None: + return event + + with capture_internal_exceptions(): + request_info = event.setdefault("request", {}) + + request_info["url"] = "%s://%s%s" % ( + request.scheme, + request.host, + request.path, + ) + + request_info["query_string"] = request.query_string + request_info["method"] = request.method + request_info["env"] = {"REMOTE_ADDR": request.remote} + request_info["headers"] = _filter_headers(dict(request.headers)) + + # Just attach raw data here if it is within bounds, if available. + # Unfortunately there's no way to get structured data from aiohttp + # without awaiting on some coroutine. + request_info["data"] = get_aiohttp_request_data(request) + + return event + + return aiohttp_processor + + +def _capture_exception(): + # type: () -> ExcInfo + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "aiohttp", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + return exc_info + + +BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]" + + +def get_aiohttp_request_data(request): + # type: (Request) -> Union[Optional[str], AnnotatedValue] + bytes_body = request._read_bytes + + if bytes_body is not None: + # we have body to show + if not request_body_within_bounds(sentry_sdk_alpha.get_client(), len(bytes_body)): + return AnnotatedValue.removed_because_over_size_limit() + + encoding = request.charset or "utf-8" + return bytes_body.decode(encoding, "replace") + + if request.can_read_body: + # body exists but we can't show it + return BODY_NOT_READ_MESSAGE + + # request has no body + return None + + +def _prepopulate_attributes(request): + # type: (Request) -> dict[str, Any] + """Construct initial span attributes that can be used in traces sampler.""" + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "host", None) is not None: + try: + host, port = request.host.split(":") + attributes["server.address"] = host + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + url = f"{request.scheme}://{request.host}{request.path}" # noqa: E231 + if request.query_string: + attributes["url.full"] = f"{url}?{request.query_string}" + + attributes.update(_request_headers_to_span_attributes(dict(request.headers))) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/anthropic.py b/src/sentry_sdk_alpha/integrations/anthropic.py new file mode 100644 index 00000000000000..ce374cefca5cc4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/anthropic.py @@ -0,0 +1,289 @@ +from functools import wraps +from typing import TYPE_CHECKING + +import sentry_sdk_alpha +from sentry_sdk_alpha.ai.monitoring import record_token_usage +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, + package_version, +) + +try: + from anthropic.resources import AsyncMessages, Messages + + if TYPE_CHECKING: + from anthropic.types import MessageStreamEvent +except ImportError: + raise DidNotEnable("Anthropic not installed") + +if TYPE_CHECKING: + from typing import Any, AsyncIterator, Iterator + from sentry_sdk_alpha.tracing import Span + + +class AnthropicIntegration(Integration): + identifier = "anthropic" + origin = f"auto.ai.{identifier}" + + def __init__(self, include_prompts=True): + # type: (AnthropicIntegration, bool) -> None + self.include_prompts = include_prompts + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("anthropic") + _check_minimum_version(AnthropicIntegration, version) + + Messages.create = _wrap_message_create(Messages.create) + AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create) + + +def _capture_exception(exc): + # type: (Any) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "anthropic", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _calculate_token_usage(result, span): + # type: (Messages, Span) -> None + input_tokens = 0 + output_tokens = 0 + if hasattr(result, "usage"): + usage = result.usage + if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int): + input_tokens = usage.input_tokens + if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int): + output_tokens = usage.output_tokens + + total_tokens = input_tokens + output_tokens + record_token_usage(span, input_tokens, output_tokens, total_tokens) + + +def _get_responses(content): + # type: (list[Any]) -> list[dict[str, Any]] + """ + Get JSON of a Anthropic responses. + """ + responses = [] + for item in content: + if hasattr(item, "text"): + responses.append( + { + "type": item.type, + "text": item.text, + } + ) + return responses + + +def _collect_ai_data(event, input_tokens, output_tokens, content_blocks): + # type: (MessageStreamEvent, int, int, list[str]) -> tuple[int, int, list[str]] + """ + Count token usage and collect content blocks from the AI streaming response. + """ + with capture_internal_exceptions(): + if hasattr(event, "type"): + if event.type == "message_start": + usage = event.message.usage + input_tokens += usage.input_tokens + output_tokens += usage.output_tokens + elif event.type == "content_block_start": + pass + elif event.type == "content_block_delta": + if hasattr(event.delta, "text"): + content_blocks.append(event.delta.text) + elif hasattr(event.delta, "partial_json"): + content_blocks.append(event.delta.partial_json) + elif event.type == "content_block_stop": + pass + elif event.type == "message_delta": + output_tokens += event.usage.output_tokens + + return input_tokens, output_tokens, content_blocks + + +def _add_ai_data_to_span( + span, integration, input_tokens, output_tokens, content_blocks +): + # type: (Span, AnthropicIntegration, int, int, list[str]) -> None + """ + Add token usage and content blocks from the AI streaming response to the span. + """ + with capture_internal_exceptions(): + if should_send_default_pii() and integration.include_prompts: + complete_message = "".join(content_blocks) + span.set_attribute( + SPANDATA.AI_RESPONSES, + [{"type": "text", "text": complete_message}], + ) + total_tokens = input_tokens + output_tokens + record_token_usage(span, input_tokens, output_tokens, total_tokens) + span.set_attribute(SPANDATA.AI_STREAMING, True) + + +def _sentry_patched_create_common(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + integration = kwargs.pop("integration") + if integration is None: + return f(*args, **kwargs) + + if "messages" not in kwargs: + return f(*args, **kwargs) + + try: + iter(kwargs["messages"]) + except TypeError: + return f(*args, **kwargs) + + span = sentry_sdk_alpha.start_span( + op=OP.ANTHROPIC_MESSAGES_CREATE, + description="Anthropic messages create", + origin=AnthropicIntegration.origin, + only_if_parent=True, + ) + span.__enter__() + + result = yield f, args, kwargs + + # add data to span and finish it + messages = list(kwargs["messages"]) + model = kwargs.get("model") + + with capture_internal_exceptions(): + span.set_attribute(SPANDATA.AI_MODEL_ID, model) + span.set_attribute(SPANDATA.AI_STREAMING, False) + + if should_send_default_pii() and integration.include_prompts: + span.set_attribute(SPANDATA.AI_INPUT_MESSAGES, messages) + + if hasattr(result, "content"): + if should_send_default_pii() and integration.include_prompts: + span.set_attribute( + SPANDATA.AI_RESPONSES, _get_responses(result.content) + ) + _calculate_token_usage(result, span) + span.__exit__(None, None, None) + + # Streaming response + elif hasattr(result, "_iterator"): + old_iterator = result._iterator + + def new_iterator(): + # type: () -> Iterator[MessageStreamEvent] + input_tokens = 0 + output_tokens = 0 + content_blocks = [] # type: list[str] + + for event in old_iterator: + input_tokens, output_tokens, content_blocks = _collect_ai_data( + event, input_tokens, output_tokens, content_blocks + ) + yield event + + _add_ai_data_to_span( + span, integration, input_tokens, output_tokens, content_blocks + ) + span.__exit__(None, None, None) + + async def new_iterator_async(): + # type: () -> AsyncIterator[MessageStreamEvent] + input_tokens = 0 + output_tokens = 0 + content_blocks = [] # type: list[str] + + async for event in old_iterator: + input_tokens, output_tokens, content_blocks = _collect_ai_data( + event, input_tokens, output_tokens, content_blocks + ) + yield event + + _add_ai_data_to_span( + span, integration, input_tokens, output_tokens, content_blocks + ) + span.__exit__(None, None, None) + + if str(type(result._iterator)) == "": + result._iterator = new_iterator_async() + else: + result._iterator = new_iterator() + + else: + span.set_attribute("unknown_response", True) + span.__exit__(None, None, None) + + return result + + +def _wrap_message_create(f): + # type: (Any) -> Any + def _execute_sync(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _sentry_patched_create_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return e.value + + try: + try: + result = f(*args, **kwargs) + except Exception as exc: + _capture_exception(exc) + raise exc from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + def _sentry_patched_create_sync(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(AnthropicIntegration) + kwargs["integration"] = integration + + return _execute_sync(f, *args, **kwargs) + + return _sentry_patched_create_sync + + +def _wrap_message_create_async(f): + # type: (Any) -> Any + async def _execute_async(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _sentry_patched_create_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return await e.value + + try: + try: + result = await f(*args, **kwargs) + except Exception as exc: + _capture_exception(exc) + raise exc from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + async def _sentry_patched_create_async(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(AnthropicIntegration) + kwargs["integration"] = integration + + return await _execute_async(f, *args, **kwargs) + + return _sentry_patched_create_async diff --git a/src/sentry_sdk_alpha/integrations/argv.py b/src/sentry_sdk_alpha/integrations/argv.py new file mode 100644 index 00000000000000..15c9518b73f73a --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/argv.py @@ -0,0 +1,31 @@ +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import add_global_event_processor + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + from sentry_sdk_alpha._types import Event, Hint + + +class ArgvIntegration(Integration): + identifier = "argv" + + @staticmethod + def setup_once(): + # type: () -> None + @add_global_event_processor + def processor(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if sentry_sdk_alpha.get_client().get_integration(ArgvIntegration) is not None: + extra = event.setdefault("extra", {}) + # If some event processor decided to set extra to e.g. an + # `int`, don't crash. Not here. + if isinstance(extra, dict): + extra["sys.argv"] = sys.argv + + return event diff --git a/src/sentry_sdk_alpha/integrations/ariadne.py b/src/sentry_sdk_alpha/integrations/ariadne.py new file mode 100644 index 00000000000000..ab27e065887605 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/ariadne.py @@ -0,0 +1,161 @@ +from importlib import import_module + +import sentry_sdk_alpha +from sentry_sdk_alpha import get_client, capture_event +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + package_version, +) + +try: + # importing like this is necessary due to name shadowing in ariadne + # (ariadne.graphql is also a function) + ariadne_graphql = import_module("ariadne.graphql") +except ImportError: + raise DidNotEnable("ariadne is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Dict, List, Optional + from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser # type: ignore + from graphql.language.ast import DocumentNode + from sentry_sdk_alpha._types import Event, EventProcessor + + +class AriadneIntegration(Integration): + identifier = "ariadne" + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("ariadne") + _check_minimum_version(AriadneIntegration, version) + + ignore_logger("ariadne") + + _patch_graphql() + + +def _patch_graphql(): + # type: () -> None + old_parse_query = ariadne_graphql.parse_query + old_handle_errors = ariadne_graphql.handle_graphql_errors + old_handle_query_result = ariadne_graphql.handle_query_result + + @ensure_integration_enabled(AriadneIntegration, old_parse_query) + def _sentry_patched_parse_query(context_value, query_parser, data): + # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode + event_processor = _make_request_event_processor(data) + sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor) + + result = old_parse_query(context_value, query_parser, data) + return result + + @ensure_integration_enabled(AriadneIntegration, old_handle_errors) + def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs): + # type: (List[GraphQLError], Any, Any) -> GraphQLResult + result = old_handle_errors(errors, *args, **kwargs) + + event_processor = _make_response_event_processor(result[1]) + sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor) + + client = get_client() + if client.is_active(): + with capture_internal_exceptions(): + for error in errors: + event, hint = event_from_exception( + error, + client_options=client.options, + mechanism={ + "type": AriadneIntegration.identifier, + "handled": False, + }, + ) + capture_event(event, hint=hint) + + return result + + @ensure_integration_enabled(AriadneIntegration, old_handle_query_result) + def _sentry_patched_handle_query_result(result, *args, **kwargs): + # type: (Any, Any, Any) -> GraphQLResult + query_result = old_handle_query_result(result, *args, **kwargs) + + event_processor = _make_response_event_processor(query_result[1]) + sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor) + + client = get_client() + if client.is_active(): + with capture_internal_exceptions(): + for error in result.errors or []: + event, hint = event_from_exception( + error, + client_options=client.options, + mechanism={ + "type": AriadneIntegration.identifier, + "handled": False, + }, + ) + capture_event(event, hint=hint) + + return query_result + + ariadne_graphql.parse_query = _sentry_patched_parse_query # type: ignore + ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors # type: ignore + ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result # type: ignore + + +def _make_request_event_processor(data): + # type: (GraphQLSchema) -> EventProcessor + """Add request data and api_target to events.""" + + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + if not isinstance(data, dict): + return event + + with capture_internal_exceptions(): + try: + content_length = int( + (data.get("headers") or {}).get("Content-Length", 0) + ) + except (TypeError, ValueError): + return event + + if should_send_default_pii() and request_body_within_bounds( + get_client(), content_length + ): + request_info = event.setdefault("request", {}) + request_info["api_target"] = "graphql" + request_info["data"] = data + + elif event.get("request", {}).get("data"): + del event["request"]["data"] + + return event + + return inner + + +def _make_response_event_processor(response): + # type: (Dict[str, Any]) -> EventProcessor + """Add response data to the event's response context.""" + + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + with capture_internal_exceptions(): + if should_send_default_pii() and response.get("errors"): + contexts = event.setdefault("contexts", {}) + contexts["response"] = { + "data": response, + } + + return event + + return inner diff --git a/src/sentry_sdk_alpha/integrations/arq.py b/src/sentry_sdk_alpha/integrations/arq.py new file mode 100644 index 00000000000000..7dac247c650464 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/arq.py @@ -0,0 +1,257 @@ +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANSTATUS +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + SENSITIVE_DATA_SUBSTITUTE, + parse_version, + reraise, +) + +try: + import arq.worker + from arq.version import VERSION as ARQ_VERSION + from arq.connections import ArqRedis + from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker +except ImportError: + raise DidNotEnable("Arq is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Dict, Optional, Union + + from sentry_sdk_alpha._types import EventProcessor, Event, ExcInfo, Hint + + from arq.cron import CronJob + from arq.jobs import Job + from arq.typing import WorkerCoroutine + from arq.worker import Function + +ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob) + +DEFAULT_TRANSACTION_NAME = "unknown arq task" + + +class ArqIntegration(Integration): + identifier = "arq" + origin = f"auto.queue.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + + try: + if isinstance(ARQ_VERSION, str): + version = parse_version(ARQ_VERSION) + else: + version = ARQ_VERSION.version[:2] + + except (TypeError, ValueError): + version = None + + _check_minimum_version(ArqIntegration, version) + + patch_enqueue_job() + patch_run_job() + patch_create_worker() + + ignore_logger("arq.worker") + + +def patch_enqueue_job(): + # type: () -> None + old_enqueue_job = ArqRedis.enqueue_job + original_kwdefaults = old_enqueue_job.__kwdefaults__ + + async def _sentry_enqueue_job(self, function, *args, **kwargs): + # type: (ArqRedis, str, *Any, **Any) -> Optional[Job] + integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration) + if integration is None: + return await old_enqueue_job(self, function, *args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_SUBMIT_ARQ, + name=function, + origin=ArqIntegration.origin, + only_if_parent=True, + ): + return await old_enqueue_job(self, function, *args, **kwargs) + + _sentry_enqueue_job.__kwdefaults__ = original_kwdefaults + ArqRedis.enqueue_job = _sentry_enqueue_job + + +def patch_run_job(): + # type: () -> None + old_run_job = Worker.run_job + + async def _sentry_run_job(self, job_id, score): + # type: (Worker, str, int) -> None + integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration) + if integration is None: + return await old_run_job(self, job_id, score) + + with sentry_sdk_alpha.isolation_scope() as scope: + scope._name = "arq" + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, + source=TransactionSource.TASK, + ) + scope.clear_breadcrumbs() + + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_TASK_ARQ, + name=DEFAULT_TRANSACTION_NAME, + source=TransactionSource.TASK, + origin=ArqIntegration.origin, + ) as span: + return_value = await old_run_job(self, job_id, score) + + if span.status is None: + span.set_status(SPANSTATUS.OK) + + return return_value + + Worker.run_job = _sentry_run_job + + +def _capture_exception(exc_info): + # type: (ExcInfo) -> None + scope = sentry_sdk_alpha.get_current_scope() + + if scope.root_span is not None: + if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS: + scope.root_span.set_status(SPANSTATUS.ABORTED) + return + + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + + event, hint = event_from_exception( + exc_info, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": ArqIntegration.identifier, "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _make_event_processor(ctx, *args, **kwargs): + # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor + def event_processor(event, hint): + # type: (Event, Hint) -> Optional[Event] + + with capture_internal_exceptions(): + scope = sentry_sdk_alpha.get_current_scope() + if scope.root_span is not None: + scope.root_span.name = ctx["job_name"] + event["transaction"] = ctx["job_name"] + + tags = event.setdefault("tags", {}) + tags["arq_task_id"] = ctx["job_id"] + tags["arq_task_retry"] = ctx["job_try"] > 1 + extra = event.setdefault("extra", {}) + extra["arq-job"] = { + "task": ctx["job_name"], + "args": ( + args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE + ), + "kwargs": ( + kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE + ), + "retry": ctx["job_try"], + } + + return event + + return event_processor + + +def _wrap_coroutine(name, coroutine): + # type: (str, WorkerCoroutine) -> WorkerCoroutine + + async def _sentry_coroutine(ctx, *args, **kwargs): + # type: (Dict[Any, Any], *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration) + if integration is None: + return await coroutine(ctx, *args, **kwargs) + + sentry_sdk_alpha.get_isolation_scope().add_event_processor( + _make_event_processor({**ctx, "job_name": name}, *args, **kwargs) + ) + + try: + result = await coroutine(ctx, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result + + return _sentry_coroutine + + +def patch_create_worker(): + # type: () -> None + old_create_worker = arq.worker.create_worker + + @ensure_integration_enabled(ArqIntegration, old_create_worker) + def _sentry_create_worker(*args, **kwargs): + # type: (*Any, **Any) -> Worker + settings_cls = args[0] + + if isinstance(settings_cls, dict): + if "functions" in settings_cls: + settings_cls["functions"] = [ + _get_arq_function(func) + for func in settings_cls.get("functions", []) + ] + if "cron_jobs" in settings_cls: + settings_cls["cron_jobs"] = [ + _get_arq_cron_job(cron_job) + for cron_job in settings_cls.get("cron_jobs", []) + ] + + if hasattr(settings_cls, "functions"): + settings_cls.functions = [ + _get_arq_function(func) for func in settings_cls.functions + ] + if hasattr(settings_cls, "cron_jobs"): + settings_cls.cron_jobs = [ + _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs + ] + + if "functions" in kwargs: + kwargs["functions"] = [ + _get_arq_function(func) for func in kwargs.get("functions", []) + ] + if "cron_jobs" in kwargs: + kwargs["cron_jobs"] = [ + _get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", []) + ] + + return old_create_worker(*args, **kwargs) + + arq.worker.create_worker = _sentry_create_worker + + +def _get_arq_function(func): + # type: (Union[str, Function, WorkerCoroutine]) -> Function + arq_func = arq.worker.func(func) + arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine) + + return arq_func + + +def _get_arq_cron_job(cron_job): + # type: (CronJob) -> CronJob + cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine) + + return cron_job diff --git a/src/sentry_sdk_alpha/integrations/asgi.py b/src/sentry_sdk_alpha/integrations/asgi.py new file mode 100644 index 00000000000000..aa696992d9b607 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/asgi.py @@ -0,0 +1,357 @@ +""" +An ASGI middleware. + +Based on Tom Christie's `sentry-asgi `. +""" + +import asyncio +import inspect +from copy import deepcopy +from functools import partial + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource + +from sentry_sdk_alpha.integrations._asgi_common import ( + _get_headers, + _get_query, + _get_request_data, + _get_url, +) +from sentry_sdk_alpha.integrations._wsgi_common import ( + DEFAULT_HTTP_METHODS_TO_CAPTURE, + _request_headers_to_span_attributes, +) +from sentry_sdk_alpha.sessions import track_session +from sentry_sdk_alpha.utils import ( + ContextVar, + capture_internal_exceptions, + event_from_exception, + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, + logger, + transaction_from_function, + _get_installed_modules, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Dict + from typing import Optional + from typing import Tuple + + from sentry_sdk_alpha._types import Event, Hint + + +_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied") + +_DEFAULT_TRANSACTION_NAME = "generic ASGI request" + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + +ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE = { + "http_version": "network.protocol.version", + "method": "http.request.method", + "path": "url.path", + "scheme": "url.scheme", + "type": "network.protocol.name", +} + + +def _capture_exception(exc, mechanism_type="asgi"): + # type: (Any, str) -> None + + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": mechanism_type, "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _looks_like_asgi3(app): + # type: (Any) -> bool + """ + Try to figure out if an application object supports ASGI3. + + This is how uvicorn figures out the application version as well. + """ + if inspect.isclass(app): + return hasattr(app, "__await__") + elif inspect.isfunction(app): + return asyncio.iscoroutinefunction(app) + else: + call = getattr(app, "__call__", None) # noqa + return asyncio.iscoroutinefunction(call) + + +class SentryAsgiMiddleware: + __slots__ = ( + "app", + "__call__", + "transaction_style", + "mechanism_type", + "span_origin", + "http_methods_to_capture", + ) + + def __init__( + self, + app, # type: Any + unsafe_context_data=False, # type: bool + transaction_style="endpoint", # type: str + mechanism_type="asgi", # type: str + span_origin=None, # type: Optional[str] + http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] + ): + # type: (...) -> None + """ + Instrument an ASGI application with Sentry. Provides HTTP/websocket + data to sent events and basic handling for exceptions bubbling up + through the middleware. + + :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default. + """ + if not unsafe_context_data and not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + raise RuntimeError( + "The ASGI middleware for Sentry requires Python 3.7+ " + "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE + ) + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + + asgi_middleware_while_using_starlette_or_fastapi = ( + mechanism_type == "asgi" and "starlette" in _get_installed_modules() + ) + if asgi_middleware_while_using_starlette_or_fastapi: + logger.warning( + "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. " + "Please remove 'SentryAsgiMiddleware' from your project. " + "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information." + ) + + self.transaction_style = transaction_style + self.mechanism_type = mechanism_type + self.span_origin = span_origin + self.app = app + self.http_methods_to_capture = http_methods_to_capture + + if _looks_like_asgi3(app): + self.__call__ = self._run_asgi3 # type: Callable[..., Any] + else: + self.__call__ = self._run_asgi2 + + def _run_asgi2(self, scope): + # type: (Any) -> Any + async def inner(receive, send): + # type: (Any, Any) -> Any + return await self._run_app(scope, receive, send, asgi_version=2) + + return inner + + async def _run_asgi3(self, scope, receive, send): + # type: (Any, Any, Any) -> Any + return await self._run_app(scope, receive, send, asgi_version=3) + + async def _run_original_app(self, scope, receive, send, asgi_version): + # type: (Any, Any, Any, Any, int) -> Any + try: + if asgi_version == 2: + return await self.app(scope)(receive, send) + else: + return await self.app(scope, receive, send) + + except Exception as exc: + _capture_exception(exc, mechanism_type=self.mechanism_type) + raise exc from None + + async def _run_app(self, scope, receive, send, asgi_version): + # type: (Any, Any, Any, Any, int) -> Any + is_recursive_asgi_middleware = _asgi_middleware_applied.get(False) + is_lifespan = scope["type"] == "lifespan" + if is_recursive_asgi_middleware or is_lifespan: + return await self._run_original_app(scope, receive, send, asgi_version) + + _asgi_middleware_applied.set(True) + try: + with sentry_sdk_alpha.isolation_scope() as sentry_scope: + ( + transaction_name, + transaction_source, + ) = self._get_transaction_name_and_source( + self.transaction_style, + scope, + ) + sentry_scope.set_transaction_name( + transaction_name, + source=transaction_source, + ) + + with track_session(sentry_scope, session_mode="request"): + sentry_scope.clear_breadcrumbs() + sentry_scope._name = "asgi" + processor = partial(self.event_processor, asgi_scope=scope) + sentry_scope.add_event_processor(processor) + + ty = scope["type"] + + method = scope.get("method", "").upper() + should_trace = ty == "websocket" or ( + ty == "http" and method in self.http_methods_to_capture + ) + if not should_trace: + return await self._run_original_app( + scope, receive, send, asgi_version + ) + + with sentry_sdk_alpha.continue_trace(_get_headers(scope)): + with sentry_sdk_alpha.start_span( + op=( + OP.WEBSOCKET_SERVER + if ty == "websocket" + else OP.HTTP_SERVER + ), + name=transaction_name, + source=transaction_source, + origin=self.span_origin, + attributes=_prepopulate_attributes(scope), + ) as span: + if span is not None: + logger.debug("[ASGI] Started transaction: %s", span) + span.set_tag("asgi.type", ty) + + async def _sentry_wrapped_send(event): + # type: (Dict[str, Any]) -> Any + is_http_response = ( + event.get("type") == "http.response.start" + and span is not None + and "status" in event + ) + if is_http_response: + span.set_http_status(event["status"]) + + return await send(event) + + return await self._run_original_app( + scope, receive, _sentry_wrapped_send, asgi_version + ) + finally: + _asgi_middleware_applied.set(False) + + def event_processor(self, event, hint, asgi_scope): + # type: (Event, Hint, Any) -> Optional[Event] + request_data = event.get("request", {}) + request_data.update(_get_request_data(asgi_scope)) + event["request"] = deepcopy(request_data) + + # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks) + transaction = event.get("transaction") + transaction_source = (event.get("transaction_info") or {}).get("source") + already_set = ( + transaction is not None + and transaction != _DEFAULT_TRANSACTION_NAME + and transaction_source + in [ + TransactionSource.COMPONENT, + TransactionSource.ROUTE, + TransactionSource.CUSTOM, + ] + ) + if not already_set: + name, source = self._get_transaction_name_and_source( + self.transaction_style, asgi_scope + ) + event["transaction"] = name + event["transaction_info"] = {"source": source} + + logger.debug( + "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'", + event["transaction"], + event["transaction_info"]["source"], + ) + + return event + + # Helper functions. + # + # Note: Those functions are not public API. If you want to mutate request + # data to your liking it's recommended to use the `before_send` callback + # for that. + + def _get_transaction_name_and_source(self, transaction_style, asgi_scope): + # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str] + name = None + source = SOURCE_FOR_STYLE[transaction_style] + ty = asgi_scope.get("type") + + if transaction_style == "endpoint": + endpoint = asgi_scope.get("endpoint") + # Webframeworks like Starlette mutate the ASGI env once routing is + # done, which is sometime after the request has started. If we have + # an endpoint, overwrite our generic transaction name. + if endpoint: + name = transaction_from_function(endpoint) or "" + else: + name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) + source = TransactionSource.URL + + elif transaction_style == "url": + # FastAPI includes the route object in the scope to let Sentry extract the + # path from it for the transaction name + route = asgi_scope.get("route") + if route: + path = getattr(route, "path", None) + if path is not None: + name = path + else: + name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None) + source = TransactionSource.URL + + if name is None: + name = _DEFAULT_TRANSACTION_NAME + source = TransactionSource.ROUTE + return name, source + + return name, source + + +def _prepopulate_attributes(scope): + # type: (Any) -> dict[str, Any] + """Unpack ASGI scope into serializable OTel attributes.""" + scope = scope or {} + + attributes = {} + for attr, key in ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE.items(): + if scope.get(attr): + attributes[key] = scope[attr] + + for attr in ("client", "server"): + if scope.get(attr): + try: + host, port = scope[attr] + attributes[f"{attr}.address"] = host + if port is not None: + attributes[f"{attr}.port"] = port + except Exception: + pass + + with capture_internal_exceptions(): + full_url = _get_url(scope) + query = _get_query(scope) + if query: + attributes["url.query"] = query + full_url = f"{full_url}?{query}" + + attributes["url.full"] = full_url + + attributes.update(_request_headers_to_span_attributes(_get_headers(scope))) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/asyncio.py b/src/sentry_sdk_alpha/integrations/asyncio.py new file mode 100644 index 00000000000000..58e4ffe0c1cf0e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/asyncio.py @@ -0,0 +1,128 @@ +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.utils import event_from_exception, logger, reraise + +try: + import asyncio + from asyncio.tasks import Task +except ImportError: + raise DidNotEnable("asyncio not available") + +from typing import cast, TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from collections.abc import Coroutine + + from sentry_sdk_alpha._types import ExcInfo + + +def get_name(coro): + # type: (Any) -> str + return ( + getattr(coro, "__qualname__", None) + or getattr(coro, "__name__", None) + or "coroutine without __name__" + ) + + +def patch_asyncio(): + # type: () -> None + orig_task_factory = None + try: + loop = asyncio.get_running_loop() + orig_task_factory = loop.get_task_factory() + + def _sentry_task_factory(loop, coro, **kwargs): + # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any] + + async def _task_with_sentry_span_creation(): + # type: () -> Any + result = None + + with sentry_sdk_alpha.isolation_scope(): + with sentry_sdk_alpha.start_span( + op=OP.FUNCTION, + name=get_name(coro), + origin=AsyncioIntegration.origin, + only_if_parent=True, + ): + try: + result = await coro + except Exception: + reraise(*_capture_exception()) + + return result + + task = None + + # Trying to use user set task factory (if there is one) + if orig_task_factory: + task = orig_task_factory( + loop, _task_with_sentry_span_creation(), **kwargs + ) + + if task is None: + # The default task factory in `asyncio` does not have its own function + # but is just a couple of lines in `asyncio.base_events.create_task()` + # Those lines are copied here. + + # WARNING: + # If the default behavior of the task creation in asyncio changes, + # this will break! + task = Task(_task_with_sentry_span_creation(), loop=loop, **kwargs) + if task._source_traceback: # type: ignore + del task._source_traceback[-1] # type: ignore + + # Set the task name to include the original coroutine's name + try: + cast("asyncio.Task[Any]", task).set_name( + f"{get_name(coro)} (Sentry-wrapped)" + ) + except AttributeError: + # set_name might not be available in all Python versions + pass + + return task + + loop.set_task_factory(_sentry_task_factory) # type: ignore + + except RuntimeError: + # When there is no running loop, we have nothing to patch. + logger.warning( + "There is no running asyncio loop so there is nothing Sentry can patch. " + "Please make sure you call sentry_sdk.init() within a running " + "asyncio loop for the AsyncioIntegration to work. " + "See https://docs.sentry.io/platforms/python/integrations/asyncio/" + ) + + +def _capture_exception(): + # type: () -> ExcInfo + exc_info = sys.exc_info() + + client = sentry_sdk_alpha.get_client() + + integration = client.get_integration(AsyncioIntegration) + if integration is not None: + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "asyncio", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return exc_info + + +class AsyncioIntegration(Integration): + identifier = "asyncio" + origin = f"auto.function.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + patch_asyncio() diff --git a/src/sentry_sdk_alpha/integrations/asyncpg.py b/src/sentry_sdk_alpha/integrations/asyncpg.py new file mode 100644 index 00000000000000..440e4ca029804f --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/asyncpg.py @@ -0,0 +1,223 @@ +from __future__ import annotations +import contextlib +from typing import Any, TypeVar, Callable, Awaitable, Iterator, Optional + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries +from sentry_sdk_alpha.utils import ( + _serialize_span_attribute, + ensure_integration_enabled, + parse_version, + capture_internal_exceptions, +) + +try: + import asyncpg # type: ignore[import-not-found] + from asyncpg.cursor import BaseCursor # type: ignore + +except ImportError: + raise DidNotEnable("asyncpg not installed.") + + +class AsyncPGIntegration(Integration): + identifier = "asyncpg" + origin = f"auto.db.{identifier}" + _record_params = False + + def __init__(self, *, record_params: bool = False): + AsyncPGIntegration._record_params = record_params + + @staticmethod + def setup_once() -> None: + # asyncpg.__version__ is a string containing the semantic version in the form of ".." + asyncpg_version = parse_version(asyncpg.__version__) + _check_minimum_version(AsyncPGIntegration, asyncpg_version) + + asyncpg.Connection.execute = _wrap_execute( + asyncpg.Connection.execute, + ) + asyncpg.Connection._execute = _wrap_connection_method( + asyncpg.Connection._execute + ) + asyncpg.Connection._executemany = _wrap_connection_method( + asyncpg.Connection._executemany, executemany=True + ) + asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor) + asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare) + asyncpg.connect_utils._connect_addr = _wrap_connect_addr( + asyncpg.connect_utils._connect_addr + ) + + +T = TypeVar("T") + + +def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None: + return await f(*args, **kwargs) + + # Avoid recording calls to _execute twice. + # Calls to Connection.execute with args also call + # Connection._execute, which is recorded separately + # args[0] = the connection object, args[1] is the query + if len(args) > 2: + return await f(*args, **kwargs) + + query = args[1] + with record_sql_queries( + cursor=None, + query=query, + params_list=None, + paramstyle=None, + executemany=False, + span_origin=AsyncPGIntegration.origin, + ) as span: + res = await f(*args, **kwargs) + + with capture_internal_exceptions(): + add_query_source(span) + + return res + + return _inner + + +SubCursor = TypeVar("SubCursor", bound=BaseCursor) + + +@contextlib.contextmanager +def _record( + cursor: SubCursor | None, + query: str, + params_list: tuple[Any, ...] | None, + *, + executemany: bool = False, +) -> Iterator[Span]: + integration = sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) + if integration is not None and not integration._record_params: + params_list = None + + param_style = "pyformat" if params_list else None + + with record_sql_queries( + cursor=cursor, + query=query, + params_list=params_list, + paramstyle=param_style, + executemany=executemany, + record_cursor_repr=cursor is not None, + span_origin=AsyncPGIntegration.origin, + ) as span: + yield span + + +def _wrap_connection_method( + f: Callable[..., Awaitable[T]], *, executemany: bool = False +) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None: + return await f(*args, **kwargs) + + query = args[1] + params_list = args[2] if len(args) > 2 else None + + with _record(None, query, params_list, executemany=executemany) as span: + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) + res = await f(*args, **kwargs) + + return res + + return _inner + + +def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]: + @ensure_integration_enabled(AsyncPGIntegration, f) + def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807 + query = args[1] + params_list = args[2] if len(args) > 2 else None + + with _record( + None, + query, + params_list, + executemany=False, + ) as span: + data = _get_db_data(conn=args[0]) + _set_on_span(span, data) + res = f(*args, **kwargs) + span.set_attribute("db.cursor", _serialize_span_attribute(res)) + + return res + + return _inner + + +def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]: + async def _inner(*args: Any, **kwargs: Any) -> T: + if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None: + return await f(*args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=OP.DB, + name="connect", + origin=AsyncPGIntegration.origin, + only_if_parent=True, + ) as span: + data = _get_db_data( + addr=kwargs.get("addr"), + database=kwargs["params"].database, + user=kwargs["params"].user, + ) + _set_on_span(span, data) + + with capture_internal_exceptions(): + sentry_sdk_alpha.add_breadcrumb( + message="connect", category="query", data=data + ) + + res = await f(*args, **kwargs) + + return res + + return _inner + + +def _get_db_data( + conn: Any = None, + addr: Optional[tuple[str, ...]] = None, + database: Optional[str] = None, + user: Optional[str] = None, +) -> dict[str, str]: + if conn is not None: + addr = conn._addr + database = conn._params.database + user = conn._params.user + + data = { + SPANDATA.DB_SYSTEM: "postgresql", + } + + if addr: + try: + data[SPANDATA.SERVER_ADDRESS] = addr[0] + data[SPANDATA.SERVER_PORT] = addr[1] + except IndexError: + pass + + if database: + data[SPANDATA.DB_NAME] = database + + if user: + data[SPANDATA.DB_USER] = user + + return data + + +def _set_on_span(span: Span, data: dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, value) diff --git a/src/sentry_sdk_alpha/integrations/atexit.py b/src/sentry_sdk_alpha/integrations/atexit.py new file mode 100644 index 00000000000000..20fcc05bc13e4b --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/atexit.py @@ -0,0 +1,57 @@ +import os +import sys +import atexit + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import logger +from sentry_sdk_alpha.integrations import Integration +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Optional + + +def default_callback(pending, timeout): + # type: (int, int) -> None + """This is the default shutdown callback that is set on the options. + It prints out a message to stderr that informs the user that some events + are still pending and the process is waiting for them to flush out. + """ + + def echo(msg): + # type: (str) -> None + sys.stderr.write(msg + "\n") + + echo("Sentry is attempting to send %i pending events" % pending) + echo("Waiting up to %s seconds" % timeout) + echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C")) + sys.stderr.flush() + + +class AtexitIntegration(Integration): + identifier = "atexit" + + def __init__(self, callback=None): + # type: (Optional[Any]) -> None + if callback is None: + callback = default_callback + self.callback = callback + + @staticmethod + def setup_once(): + # type: () -> None + @atexit.register + def _shutdown(): + # type: () -> None + client = sentry_sdk_alpha.get_client() + integration = client.get_integration(AtexitIntegration) + + if integration is None: + return + + logger.debug("atexit: got shutdown signal") + logger.debug("atexit: shutting down client") + sentry_sdk_alpha.get_isolation_scope().end_session() + + client.close(callback=integration.callback) diff --git a/src/sentry_sdk_alpha/integrations/aws_lambda.py b/src/sentry_sdk_alpha/integrations/aws_lambda.py new file mode 100644 index 00000000000000..e77ff907f55297 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/aws_lambda.py @@ -0,0 +1,516 @@ +import functools +import json +import re +import sys +from copy import deepcopy +from datetime import datetime, timedelta, timezone +from os import environ +from urllib.parse import urlencode + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + logger, + TimeoutThread, + reraise, +) +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import TypeVar + from typing import Callable + from typing import Optional + + from sentry_sdk_alpha._types import EventProcessor, Event, Hint + + F = TypeVar("F", bound=Callable[..., Any]) + +# Constants +TIMEOUT_WARNING_BUFFER = 1500 # Buffer time required to send timeout warning to Sentry +MILLIS_TO_SECONDS = 1000.0 + + +EVENT_TO_ATTRIBUTES = { + "httpMethod": "http.request.method", + "queryStringParameters": "url.query", + "path": "url.path", +} + +CONTEXT_TO_ATTRIBUTES = { + "function_name": "faas.name", +} + + +def _wrap_init_error(init_error): + # type: (F) -> F + @ensure_integration_enabled(AwsLambdaIntegration, init_error) + def sentry_init_error(*args, **kwargs): + # type: (*Any, **Any) -> Any + client = sentry_sdk_alpha.get_client() + + with capture_internal_exceptions(): + sentry_sdk_alpha.get_isolation_scope().clear_breadcrumbs() + + exc_info = sys.exc_info() + if exc_info and all(exc_info): + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + sentry_sdk_alpha.capture_event(sentry_event, hint=hint) + + else: + # Fall back to AWS lambdas JSON representation of the error + error_info = args[1] + if isinstance(error_info, str): + error_info = json.loads(error_info) + sentry_event = _event_from_error_json(error_info) + sentry_sdk_alpha.capture_event(sentry_event) + + return init_error(*args, **kwargs) + + return sentry_init_error # type: ignore + + +def _wrap_handler(handler): + # type: (F) -> F + @functools.wraps(handler) + def sentry_handler(aws_event, aws_context, *args, **kwargs): + # type: (Any, Any, *Any, **Any) -> Any + + # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html, + # `event` here is *likely* a dictionary, but also might be a number of + # other types (str, int, float, None). + # + # In some cases, it is a list (if the user is batch-invoking their + # function, for example), in which case we'll use the first entry as a + # representative from which to try pulling request data. (Presumably it + # will be the same for all events in the list, since they're all hitting + # the lambda in the same request.) + + client = sentry_sdk_alpha.get_client() + integration = client.get_integration(AwsLambdaIntegration) + + if integration is None: + return handler(aws_event, aws_context, *args, **kwargs) + + if isinstance(aws_event, list) and len(aws_event) >= 1: + request_data = aws_event[0] + batch_size = len(aws_event) + else: + request_data = aws_event + batch_size = 1 + + if not isinstance(request_data, dict): + # If we're not dealing with a dictionary, we won't be able to get + # headers, path, http method, etc in any case, so it's fine that + # this is empty + request_data = {} + + configured_time = aws_context.get_remaining_time_in_millis() + + with sentry_sdk_alpha.isolation_scope() as scope: + scope.set_transaction_name( + aws_context.function_name, source=TransactionSource.COMPONENT + ) + timeout_thread = None + with capture_internal_exceptions(): + scope.clear_breadcrumbs() + scope.add_event_processor( + _make_request_event_processor( + request_data, aws_context, configured_time + ) + ) + scope.set_tag( + "aws_region", aws_context.invoked_function_arn.split(":")[3] + ) + if batch_size > 1: + scope.set_tag("batch_request", True) + scope.set_tag("batch_size", batch_size) + + # Starting the Timeout thread only if the configured time is greater than Timeout warning + # buffer and timeout_warning parameter is set True. + if ( + integration.timeout_warning + and configured_time > TIMEOUT_WARNING_BUFFER + ): + waiting_time = ( + configured_time - TIMEOUT_WARNING_BUFFER + ) / MILLIS_TO_SECONDS + + timeout_thread = TimeoutThread( + waiting_time, + configured_time / MILLIS_TO_SECONDS, + ) + + # Starting the thread to raise timeout warning exception + timeout_thread.start() + + headers = request_data.get("headers", {}) + # Some AWS Services (ie. EventBridge) set headers as a list + # or None, so we must ensure it is a dict + if not isinstance(headers, dict): + headers = {} + + with sentry_sdk_alpha.continue_trace(headers): + with sentry_sdk_alpha.start_span( + op=OP.FUNCTION_AWS, + name=aws_context.function_name, + source=TransactionSource.COMPONENT, + origin=AwsLambdaIntegration.origin, + attributes=_prepopulate_attributes(request_data, aws_context), + ): + try: + return handler(aws_event, aws_context, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "aws_lambda", "handled": False}, + ) + sentry_sdk_alpha.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() + + return sentry_handler # type: ignore + + +def _drain_queue(): + # type: () -> None + with capture_internal_exceptions(): + client = sentry_sdk_alpha.get_client() + integration = client.get_integration(AwsLambdaIntegration) + if integration is not None: + # Flush out the event queue before AWS kills the + # process. + client.flush() + + +class AwsLambdaIntegration(Integration): + identifier = "aws_lambda" + origin = f"auto.function.{identifier}" + + def __init__(self, timeout_warning=False): + # type: (bool) -> None + self.timeout_warning = timeout_warning + + @staticmethod + def setup_once(): + # type: () -> None + + lambda_bootstrap = get_lambda_bootstrap() + if not lambda_bootstrap: + logger.warning( + "Not running in AWS Lambda environment, " + "AwsLambdaIntegration disabled (could not find bootstrap module)" + ) + return + + if not hasattr(lambda_bootstrap, "handle_event_request"): + logger.warning( + "Not running in AWS Lambda environment, " + "AwsLambdaIntegration disabled (could not find handle_event_request)" + ) + return + + lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error( + lambda_bootstrap.LambdaRuntimeClient.post_init_error + ) + + old_handle_event_request = lambda_bootstrap.handle_event_request + + def sentry_handle_event_request( # type: ignore + lambda_runtime_client, request_handler, *args, **kwargs + ): + request_handler = _wrap_handler(request_handler) + return old_handle_event_request( + lambda_runtime_client, request_handler, *args, **kwargs + ) + + lambda_bootstrap.handle_event_request = sentry_handle_event_request + + # Patch the runtime client to drain the queue. This should work + # even when the SDK is initialized inside of the handler + + def _wrap_post_function(f): + # type: (F) -> F + def inner(*args, **kwargs): + # type: (*Any, **Any) -> Any + _drain_queue() + return f(*args, **kwargs) + + return inner # type: ignore + + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_result + ) + ) + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = ( + _wrap_post_function( + lambda_bootstrap.LambdaRuntimeClient.post_invocation_error + ) + ) + + +def get_lambda_bootstrap(): + # type: () -> Optional[Any] + + # Python 3.7: If the bootstrap module is *already imported*, it is the + # one we actually want to use (no idea what's in __main__) + # + # Python 3.8: bootstrap is also importable, but will be the same file + # as __main__ imported under a different name: + # + # sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__ + # sys.modules['__main__'] is not sys.modules['bootstrap'] + # + # Python 3.9: bootstrap is in __main__.awslambdaricmain + # + # On container builds using the `aws-lambda-python-runtime-interface-client` + # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap + # + # Such a setup would then make all monkeypatches useless. + if "bootstrap" in sys.modules: + return sys.modules["bootstrap"] + elif "__main__" in sys.modules: + module = sys.modules["__main__"] + # python3.9 runtime + if hasattr(module, "awslambdaricmain") and hasattr( + module.awslambdaricmain, "bootstrap" + ): + return module.awslambdaricmain.bootstrap + elif hasattr(module, "bootstrap"): + # awslambdaric python module in container builds + return module.bootstrap + + # python3.8 runtime + return module + else: + return None + + +def _make_request_event_processor(aws_event, aws_context, configured_timeout): + # type: (Any, Any, Any) -> EventProcessor + start_time = datetime.now(timezone.utc) + + def event_processor(sentry_event, hint, start_time=start_time): + # type: (Event, Hint, datetime) -> Optional[Event] + remaining_time_in_milis = aws_context.get_remaining_time_in_millis() + exec_duration = configured_timeout - remaining_time_in_milis + + extra = sentry_event.setdefault("extra", {}) + extra["lambda"] = { + "function_name": aws_context.function_name, + "function_version": aws_context.function_version, + "invoked_function_arn": aws_context.invoked_function_arn, + "aws_request_id": aws_context.aws_request_id, + "execution_duration_in_millis": exec_duration, + "remaining_time_in_millis": remaining_time_in_milis, + } + + extra["cloudwatch logs"] = { + "url": _get_cloudwatch_logs_url(aws_context, start_time), + "log_group": aws_context.log_group_name, + "log_stream": aws_context.log_stream_name, + } + + request = sentry_event.get("request", {}) + + if "httpMethod" in aws_event: + request["method"] = aws_event["httpMethod"] + + request["url"] = _get_url(aws_event, aws_context) + + if "queryStringParameters" in aws_event: + request["query_string"] = urlencode(aws_event["queryStringParameters"]) + + if "headers" in aws_event: + request["headers"] = _filter_headers(aws_event["headers"]) + + if should_send_default_pii(): + user_info = sentry_event.setdefault("user", {}) + + identity = aws_event.get("identity") + if identity is None: + identity = {} + + id = identity.get("userArn") + if id is not None: + user_info.setdefault("id", id) + + ip = identity.get("sourceIp") + if ip is not None: + user_info.setdefault("ip_address", ip) + + if "body" in aws_event: + request["data"] = aws_event.get("body", "") + else: + if aws_event.get("body", None): + # Unfortunately couldn't find a way to get structured body from AWS + # event. Meaning every body is unstructured to us. + request["data"] = AnnotatedValue.removed_because_raw_data() + + sentry_event["request"] = deepcopy(request) + + return sentry_event + + return event_processor + + +def _get_url(aws_event, aws_context): + # type: (Any, Any) -> str + path = aws_event.get("path", None) + + headers = aws_event.get("headers") + # Some AWS Services (ie. EventBridge) set headers as a list + # or None, so we must ensure it is a dict + if not isinstance(headers, dict): + headers = {} + + host = headers.get("Host", None) + proto = headers.get("X-Forwarded-Proto", None) + if proto and host and path: + return "{}://{}{}".format(proto, host, path) + return "awslambda:///{}".format(aws_context.function_name) + + +def _get_cloudwatch_logs_url(aws_context, start_time): + # type: (Any, datetime) -> str + """ + Generates a CloudWatchLogs console URL based on the context object + + Arguments: + aws_context {Any} -- context from lambda handler + + Returns: + str -- AWS Console URL to logs. + """ + formatstring = "%Y-%m-%dT%H:%M:%SZ" + region = environ.get("AWS_REGION", "") + + url = ( + "https://console.{domain}/cloudwatch/home?region={region}" + "#logEventViewer:group={log_group};stream={log_stream}" + ";start={start_time};end={end_time}" + ).format( + domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com", + region=region, + log_group=aws_context.log_group_name, + log_stream=aws_context.log_stream_name, + start_time=(start_time - timedelta(seconds=1)).strftime(formatstring), + end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime( + formatstring + ), + ) + + return url + + +def _parse_formatted_traceback(formatted_tb): + # type: (list[str]) -> list[dict[str, Any]] + frames = [] + for frame in formatted_tb: + match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip()) + if match: + file_name, line_number, func_name = match.groups() + line_number = int(line_number) + frames.append( + { + "filename": file_name, + "function": func_name, + "lineno": line_number, + "vars": None, + "pre_context": None, + "context_line": None, + "post_context": None, + } + ) + return frames + + +def _event_from_error_json(error_json): + # type: (dict[str, Any]) -> Event + """ + Converts the error JSON from AWS Lambda into a Sentry error event. + This is not a full fletched event, but better than nothing. + + This is an example of where AWS creates the error JSON: + https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479 + """ + event = { + "level": "error", + "exception": { + "values": [ + { + "type": error_json.get("errorType"), + "value": error_json.get("errorMessage"), + "stacktrace": { + "frames": _parse_formatted_traceback( + error_json.get("stackTrace", []) + ), + }, + "mechanism": { + "type": "aws_lambda", + "handled": False, + }, + } + ], + }, + } # type: Event + + return event + + +def _prepopulate_attributes(aws_event, aws_context): + # type: (Any, Any) -> dict[str, Any] + attributes = { + "cloud.provider": "aws", + } + + for prop, attr in EVENT_TO_ATTRIBUTES.items(): + if aws_event.get(prop) is not None: + if prop == "queryStringParameters": + attributes[attr] = urlencode(aws_event[prop]) + else: + attributes[attr] = aws_event[prop] + + for prop, attr in CONTEXT_TO_ATTRIBUTES.items(): + if getattr(aws_context, prop, None) is not None: + attributes[attr] = getattr(aws_context, prop) + + url = _get_url(aws_event, aws_context) + if url: + if aws_event.get("queryStringParameters"): + url += f"?{urlencode(aws_event['queryStringParameters'])}" + attributes["url.full"] = url + + headers = {} + if aws_event.get("headers") and isinstance(aws_event["headers"], dict): + headers = aws_event["headers"] + + if headers.get("X-Forwarded-Proto"): + attributes["network.protocol.name"] = headers["X-Forwarded-Proto"] + if headers.get("Host"): + attributes["server.address"] = headers["Host"] + + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/beam.py b/src/sentry_sdk_alpha/integrations/beam.py new file mode 100644 index 00000000000000..d7ce271d9b5979 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/beam.py @@ -0,0 +1,176 @@ +import sys +import types +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Iterator + from typing import TypeVar + from typing import Callable + + from sentry_sdk_alpha._types import ExcInfo + + T = TypeVar("T") + F = TypeVar("F", bound=Callable[..., Any]) + + +WRAPPED_FUNC = "_wrapped_{}_" +INSPECT_FUNC = "_inspect_{}" # Required format per apache_beam/transforms/core.py +USED_FUNC = "_sentry_used_" + + +class BeamIntegration(Integration): + identifier = "beam" + + @staticmethod + def setup_once(): + # type: () -> None + from apache_beam.transforms.core import DoFn, ParDo # type: ignore + + ignore_logger("root") + ignore_logger("bundle_processor.create") + + function_patches = ["process", "start_bundle", "finish_bundle", "setup"] + for func_name in function_patches: + setattr( + DoFn, + INSPECT_FUNC.format(func_name), + _wrap_inspect_call(DoFn, func_name), + ) + + old_init = ParDo.__init__ + + def sentry_init_pardo(self, fn, *args, **kwargs): + # type: (ParDo, Any, *Any, **Any) -> Any + # Do not monkey patch init twice + if not getattr(self, "_sentry_is_patched", False): + for func_name in function_patches: + if not hasattr(fn, func_name): + continue + wrapped_func = WRAPPED_FUNC.format(func_name) + + # Check to see if inspect is set and process is not + # to avoid monkey patching process twice. + # Check to see if function is part of object for + # backwards compatibility. + process_func = getattr(fn, func_name) + inspect_func = getattr(fn, INSPECT_FUNC.format(func_name)) + if not getattr(inspect_func, USED_FUNC, False) and not getattr( + process_func, USED_FUNC, False + ): + setattr(fn, wrapped_func, process_func) + setattr(fn, func_name, _wrap_task_call(process_func)) + + self._sentry_is_patched = True + old_init(self, fn, *args, **kwargs) + + ParDo.__init__ = sentry_init_pardo + + +def _wrap_inspect_call(cls, func_name): + # type: (Any, Any) -> Any + + if not hasattr(cls, func_name): + return None + + def _inspect(self): + # type: (Any) -> Any + """ + Inspect function overrides the way Beam gets argspec. + """ + wrapped_func = WRAPPED_FUNC.format(func_name) + if hasattr(self, wrapped_func): + process_func = getattr(self, wrapped_func) + else: + process_func = getattr(self, func_name) + setattr(self, func_name, _wrap_task_call(process_func)) + setattr(self, wrapped_func, process_func) + + # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults + # (which uses Signatures internally) should be used instead. + try: + from apache_beam.transforms.core import get_function_args_defaults + + return get_function_args_defaults(process_func) + except ImportError: + from apache_beam.typehints.decorators import getfullargspec # type: ignore + + return getfullargspec(process_func) + + setattr(_inspect, USED_FUNC, True) + return _inspect + + +def _wrap_task_call(func): + # type: (F) -> F + """ + Wrap task call with a try catch to get exceptions. + """ + + @wraps(func) + def _inner(*args, **kwargs): + # type: (*Any, **Any) -> Any + try: + gen = func(*args, **kwargs) + except Exception: + raise_exception() + + if not isinstance(gen, types.GeneratorType): + return gen + return _wrap_generator_call(gen) + + setattr(_inner, USED_FUNC, True) + return _inner # type: ignore + + +@ensure_integration_enabled(BeamIntegration) +def _capture_exception(exc_info): + # type: (ExcInfo) -> None + """ + Send Beam exception to Sentry. + """ + client = sentry_sdk_alpha.get_client() + + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "beam", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def raise_exception(): + # type: () -> None + """ + Raise an exception. + """ + exc_info = sys.exc_info() + with capture_internal_exceptions(): + _capture_exception(exc_info) + reraise(*exc_info) + + +def _wrap_generator_call(gen): + # type: (Iterator[T]) -> Iterator[T] + """ + Wrap the generator to handle any failures. + """ + while True: + try: + yield next(gen) + except StopIteration: + break + except Exception: + raise_exception() diff --git a/src/sentry_sdk_alpha/integrations/boto3.py b/src/sentry_sdk_alpha/integrations/boto3.py new file mode 100644 index 00000000000000..2bc70689af92fa --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/boto3.py @@ -0,0 +1,166 @@ +from functools import partial + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + parse_url, + parse_version, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Optional + from typing import Type + + from sentry_sdk_alpha.tracing import Span + +try: + from botocore import __version__ as BOTOCORE_VERSION # type: ignore + from botocore.client import BaseClient # type: ignore + from botocore.response import StreamingBody # type: ignore + from botocore.awsrequest import AWSRequest # type: ignore +except ImportError: + raise DidNotEnable("botocore is not installed") + + +class Boto3Integration(Integration): + identifier = "boto3" + origin = f"auto.http.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(BOTOCORE_VERSION) + _check_minimum_version(Boto3Integration, version, "botocore") + + orig_init = BaseClient.__init__ + + def sentry_patched_init(self, *args, **kwargs): + # type: (Type[BaseClient], *Any, **Any) -> None + orig_init(self, *args, **kwargs) + meta = self.meta + service_id = meta.service_model.service_id.hyphenize() + meta.events.register( + "request-created", + partial(_sentry_request_created, service_id=service_id), + ) + meta.events.register("after-call", _sentry_after_call) + meta.events.register("after-call-error", _sentry_after_call_error) + + BaseClient.__init__ = sentry_patched_init + + +@ensure_integration_enabled(Boto3Integration) +def _sentry_request_created(service_id, request, operation_name, **kwargs): + # type: (str, AWSRequest, str, **Any) -> None + description = "aws.%s.%s" % (service_id, operation_name) + span = sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT, + name=description, + origin=Boto3Integration.origin, + only_if_parent=True, + ) + + data = { + SPANDATA.HTTP_METHOD: request.method, + } + with capture_internal_exceptions(): + parsed_url = parse_url(request.url, sanitize=False) + data["aws.request.url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) + + span.set_tag("aws.service_id", service_id) + span.set_tag("aws.operation_name", operation_name) + + # We do it in order for subsequent http calls/retries be + # attached to this span. + span.__enter__() + + # request.context is an open-ended data-structure + # where we can add anything useful in request life cycle. + request.context["_sentrysdk_span"] = span + request.context["_sentrysdk_span_data"] = data + + +def _sentry_after_call(context, parsed, **kwargs): + # type: (Dict[str, Any], Dict[str, Any], **Any) -> None + span = context.pop("_sentrysdk_span", None) # type: Optional[Span] + + # Span could be absent if the integration is disabled. + if span is None: + return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + + body = parsed.get("Body") + if not isinstance(body, StreamingBody): + span.__exit__(None, None, None) + return + + streaming_span = sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT_STREAM, + name=span.name, + origin=Boto3Integration.origin, + only_if_parent=True, + ) + + orig_read = body.read + + def sentry_streaming_body_read(*args, **kwargs): + # type: (*Any, **Any) -> bytes + try: + ret = orig_read(*args, **kwargs) + if not ret: + streaming_span.finish() + return ret + except Exception: + streaming_span.finish() + raise + + body.read = sentry_streaming_body_read + + orig_close = body.close + + def sentry_streaming_body_close(*args, **kwargs): + # type: (*Any, **Any) -> None + streaming_span.finish() + orig_close(*args, **kwargs) + + body.close = sentry_streaming_body_close + + span.__exit__(None, None, None) + + +def _sentry_after_call_error(context, exception, **kwargs): + # type: (Dict[str, Any], Type[BaseException], **Any) -> None + span = context.pop("_sentrysdk_span", None) # type: Optional[Span] + + # Span could be absent if the integration is disabled. + if span is None: + return + + span_data = context.pop("_sentrysdk_span_data", {}) + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + ) + + span.__exit__(type(exception), exception, None) diff --git a/src/sentry_sdk_alpha/integrations/bottle.py b/src/sentry_sdk_alpha/integrations/bottle.py new file mode 100644 index 00000000000000..47619bab09b945 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/bottle.py @@ -0,0 +1,221 @@ +import functools + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + parse_version, + transaction_from_function, +) +from sentry_sdk_alpha.integrations import ( + Integration, + DidNotEnable, + _DEFAULT_FAILED_REQUEST_STATUS_CODES, + _check_minimum_version, +) +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Set + + from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse + from typing import Any + from typing import Dict + from typing import Callable + from typing import Optional + from bottle import FileUpload, FormsDict, LocalRequest # type: ignore + + from sentry_sdk_alpha._types import EventProcessor, Event + +try: + from bottle import ( + Bottle, + HTTPResponse, + Route, + request as bottle_request, + __version__ as BOTTLE_VERSION, + ) +except ImportError: + raise DidNotEnable("Bottle not installed") + + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class BottleIntegration(Integration): + identifier = "bottle" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__( + self, + transaction_style="endpoint", # type: str + *, + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] + ): + # type: (...) -> None + + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + self.failed_request_status_codes = failed_request_status_codes + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(BOTTLE_VERSION) + _check_minimum_version(BottleIntegration, version) + + old_app = Bottle.__call__ + + @ensure_integration_enabled(BottleIntegration, old_app) + def sentry_patched_wsgi_app(self, environ, start_response): + # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + middleware = SentryWsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=BottleIntegration.origin, + ) + + return middleware(environ, start_response) + + Bottle.__call__ = sentry_patched_wsgi_app + + old_handle = Bottle._handle + + @functools.wraps(old_handle) + def _patched_handle(self, environ): + # type: (Bottle, Dict[str, Any]) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(BottleIntegration) + if integration is None: + return old_handle(self, environ) + + scope = sentry_sdk_alpha.get_isolation_scope() + scope._name = "bottle" + scope.add_event_processor( + _make_request_event_processor(self, bottle_request, integration) + ) + res = old_handle(self, environ) + + return res + + Bottle._handle = _patched_handle + + old_make_callback = Route._make_callback + + @functools.wraps(old_make_callback) + def patched_make_callback(self, *args, **kwargs): + # type: (Route, *object, **object) -> Any + prepared_callback = old_make_callback(self, *args, **kwargs) + + integration = sentry_sdk_alpha.get_client().get_integration(BottleIntegration) + if integration is None: + return prepared_callback + + def wrapped_callback(*args, **kwargs): + # type: (*object, **object) -> Any + try: + res = prepared_callback(*args, **kwargs) + except Exception as exception: + _capture_exception(exception, handled=False) + raise exception + + if ( + isinstance(res, HTTPResponse) + and res.status_code in integration.failed_request_status_codes + ): + _capture_exception(res, handled=True) + + return res + + return wrapped_callback + + Route._make_callback = patched_make_callback + + +class BottleRequestExtractor(RequestExtractor): + def env(self): + # type: () -> Dict[str, str] + return self.request.environ + + def cookies(self): + # type: () -> Dict[str, str] + return self.request.cookies + + def raw_data(self): + # type: () -> bytes + return self.request.body.read() + + def form(self): + # type: () -> FormsDict + if self.is_json(): + return None + return self.request.forms.decode() + + def files(self): + # type: () -> Optional[Dict[str, str]] + if self.is_json(): + return None + + return self.request.files + + def size_of_file(self, file): + # type: (FileUpload) -> int + return file.content_length + + +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Event, str, Any) -> None + name = "" + + if transaction_style == "url": + try: + name = request.route.rule or "" + except RuntimeError: + pass + + elif transaction_style == "endpoint": + try: + name = ( + request.route.name + or transaction_from_function(request.route.callback) + or "" + ) + except RuntimeError: + pass + + event["transaction"] = name + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + +def _make_request_event_processor(app, request, integration): + # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor + + def event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + _set_transaction_name_and_source(event, integration.transaction_style, request) + + with capture_internal_exceptions(): + BottleRequestExtractor(request).extract_into_event(event) + + return event + + return event_processor + + +def _capture_exception(exception, handled): + # type: (BaseException, bool) -> None + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "bottle", "handled": handled}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) diff --git a/src/sentry_sdk_alpha/integrations/celery/__init__.py b/src/sentry_sdk_alpha/integrations/celery/__init__.py new file mode 100644 index 00000000000000..e1d9a9d93a46e5 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/celery/__init__.py @@ -0,0 +1,538 @@ +import sys +from collections.abc import Mapping +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha import isolation_scope +from sentry_sdk_alpha.consts import OP, SPANSTATUS, SPANDATA, BAGGAGE_HEADER_NAME +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations.celery.beat import ( + _patch_beat_apply_entry, + _patch_redbeat_maybe_due, + _setup_celery_beat_signals, +) +from sentry_sdk_alpha.integrations.celery.utils import _now_seconds_since_epoch +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.tracing_utils import Baggage +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import List + from typing import Optional + from typing import TypeVar + from typing import Union + + from sentry_sdk_alpha._types import EventProcessor, Event, Hint, ExcInfo + from sentry_sdk_alpha.tracing import Span + + F = TypeVar("F", bound=Callable[..., Any]) + + +try: + from celery import VERSION as CELERY_VERSION # type: ignore + from celery.app.task import Task # type: ignore + from celery.app.trace import task_has_custom + from celery.exceptions import ( # type: ignore + Ignore, + Reject, + Retry, + SoftTimeLimitExceeded, + ) + from kombu import Producer # type: ignore +except ImportError: + raise DidNotEnable("Celery not installed") + + +CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject) + + +class CeleryIntegration(Integration): + identifier = "celery" + origin = f"auto.queue.{identifier}" + + def __init__( + self, + propagate_traces=True, + monitor_beat_tasks=False, + exclude_beat_tasks=None, + ): + # type: (bool, bool, Optional[List[str]]) -> None + self.propagate_traces = propagate_traces + self.monitor_beat_tasks = monitor_beat_tasks + self.exclude_beat_tasks = exclude_beat_tasks + + _patch_beat_apply_entry() + _patch_redbeat_maybe_due() + _setup_celery_beat_signals(monitor_beat_tasks) + + @staticmethod + def setup_once(): + # type: () -> None + _check_minimum_version(CeleryIntegration, CELERY_VERSION) + + _patch_build_tracer() + _patch_task_apply_async() + _patch_celery_send_task() + _patch_worker_exit() + _patch_producer_publish() + + # This logger logs every status of every task that ran on the worker. + # Meaning that every task's breadcrumbs are full of stuff like "Task + # raised unexpected ". + ignore_logger("celery.worker.job") + ignore_logger("celery.app.trace") + + # This is stdout/err redirected to a logger, can't deal with this + # (need event_level=logging.WARN to reproduce) + ignore_logger("celery.redirected") + + +def _set_status(status): + # type: (str) -> None + with capture_internal_exceptions(): + scope = sentry_sdk_alpha.get_current_scope() + if scope.span is not None: + scope.span.set_status(status) + + +def _capture_exception(task, exc_info): + # type: (Any, ExcInfo) -> None + client = sentry_sdk_alpha.get_client() + if client.get_integration(CeleryIntegration) is None: + return + + if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS): + _set_status("aborted") + return + + _set_status("internal_error") + + if hasattr(task, "throws") and isinstance(exc_info[1], task.throws): + return + + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "celery", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _make_event_processor(task, uuid, args, kwargs, request=None): + # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor + def event_processor(event, hint): + # type: (Event, Hint) -> Optional[Event] + + with capture_internal_exceptions(): + tags = event.setdefault("tags", {}) + tags["celery_task_id"] = uuid + extra = event.setdefault("extra", {}) + extra["celery-job"] = { + "task_name": task.name, + "args": args, + "kwargs": kwargs, + } + + if "exc_info" in hint: + with capture_internal_exceptions(): + if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded): + event["fingerprint"] = [ + "celery", + "SoftTimeLimitExceeded", + getattr(task, "name", task), + ] + + return event + + return event_processor + + +def _update_celery_task_headers(original_headers, span, monitor_beat_tasks): + # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any] + """ + Updates the headers of the Celery task with the tracing information + and eventually Sentry Crons monitoring information for beat tasks. + """ + updated_headers = original_headers.copy() + with capture_internal_exceptions(): + # if span is None (when the task was started by Celery Beat) + # this will return the trace headers from the scope. + headers = dict( + sentry_sdk_alpha.get_isolation_scope().iter_trace_propagation_headers(span=span) + ) + + if monitor_beat_tasks: + headers.update( + { + "sentry-monitor-start-timestamp-s": "%.9f" + % _now_seconds_since_epoch(), + } + ) + + # Add the time the task was enqueued to the headers + # This is used in the consumer to calculate the latency + updated_headers.update( + {"sentry-task-enqueued-time": _now_seconds_since_epoch()} + ) + + if headers: + existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME) + sentry_baggage = headers.get(BAGGAGE_HEADER_NAME) + + combined_baggage = sentry_baggage or existing_baggage + if sentry_baggage and existing_baggage: + # Merge incoming and sentry baggage, where the sentry trace information + # in the incoming baggage takes precedence and the third-party items + # are concatenated. + incoming = Baggage.from_incoming_header(existing_baggage) + combined = Baggage.from_incoming_header(sentry_baggage) + combined.sentry_items.update(incoming.sentry_items) + combined.third_party_items = ",".join( + [ + x + for x in [ + combined.third_party_items, + incoming.third_party_items, + ] + if x is not None and x != "" + ] + ) + combined_baggage = combined.serialize(include_third_party=True) + + updated_headers.update(headers) + if combined_baggage: + updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage + + # https://github.com/celery/celery/issues/4875 + # + # Need to setdefault the inner headers too since other + # tracing tools (dd-trace-py) also employ this exact + # workaround and we don't want to break them. + updated_headers.setdefault("headers", {}).update(headers) + if combined_baggage: + updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage + + # Add the Sentry options potentially added in `sentry_apply_entry` + # to the headers (done when auto-instrumenting Celery Beat tasks) + for key, value in updated_headers.items(): + if key.startswith("sentry-"): + updated_headers["headers"][key] = value + + return updated_headers + + +class NoOpMgr: + def __enter__(self): + # type: () -> None + return None + + def __exit__(self, exc_type, exc_value, traceback): + # type: (Any, Any, Any) -> None + return None + + +def _wrap_task_run(f): + # type: (F) -> F + @wraps(f) + def apply_async(*args, **kwargs): + # type: (*Any, **Any) -> Any + # Note: kwargs can contain headers=None, so no setdefault! + # Unsure which backend though. + integration = sentry_sdk_alpha.get_client().get_integration(CeleryIntegration) + if integration is None: + return f(*args, **kwargs) + + kwarg_headers = kwargs.get("headers") or {} + propagate_traces = kwarg_headers.pop( + "sentry-propagate-traces", integration.propagate_traces + ) + + if not propagate_traces: + return f(*args, **kwargs) + + if isinstance(args[0], Task): + task_name = args[0].name # type: str + elif len(args) > 1 and isinstance(args[1], str): + task_name = args[1] + else: + task_name = "" + + task_started_from_beat = sentry_sdk_alpha.get_isolation_scope()._name == "celery-beat" + + span_mgr = ( + sentry_sdk_alpha.start_span( + op=OP.QUEUE_SUBMIT_CELERY, + name=task_name, + origin=CeleryIntegration.origin, + only_if_parent=True, + ) + if not task_started_from_beat + else NoOpMgr() + ) # type: Union[Span, NoOpMgr] + + with span_mgr as span: + kwargs["headers"] = _update_celery_task_headers( + kwarg_headers, span, integration.monitor_beat_tasks + ) + return f(*args, **kwargs) + + return apply_async # type: ignore + + +def _wrap_tracer(task, f): + # type: (Any, F) -> F + + # Need to wrap tracer for pushing the scope before prerun is sent, and + # popping it after postrun is sent. + # + # This is the reason we don't use signals for hooking in the first place. + # Also because in Celery 3, signal dispatch returns early if one handler + # crashes. + @wraps(f) + @ensure_integration_enabled(CeleryIntegration, f) + def _inner(*args, **kwargs): + # type: (*Any, **Any) -> Any + with isolation_scope() as scope: + scope._name = "celery" + scope.clear_breadcrumbs() + scope.set_transaction_name(task.name, source=TransactionSource.TASK) + scope.add_event_processor(_make_event_processor(task, *args, **kwargs)) + + # Celery task objects are not a thing to be trusted. Even + # something such as attribute access can fail. + headers = args[3].get("headers") or {} + + with sentry_sdk_alpha.continue_trace(headers): + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_TASK_CELERY, + name=task.name, + source=TransactionSource.TASK, + origin=CeleryIntegration.origin, + # for some reason, args[1] is a list if non-empty but a + # tuple if empty + attributes=_prepopulate_attributes(task, list(args[1]), args[2]), + ) as root_span: + return_value = f(*args, **kwargs) + + if root_span.status is None: + root_span.set_status(SPANSTATUS.OK) + + return return_value + + return _inner # type: ignore + + +def _set_messaging_destination_name(task, span): + # type: (Any, Span) -> None + """Set "messaging.destination.name" tag for span""" + with capture_internal_exceptions(): + delivery_info = task.request.delivery_info + if delivery_info: + routing_key = delivery_info.get("routing_key") + if delivery_info.get("exchange") == "" and routing_key is not None: + # Empty exchange indicates the default exchange, meaning the tasks + # are sent to the queue with the same name as the routing key. + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + + +def _wrap_task_call(task, f): + # type: (Any, F) -> F + + # Need to wrap task call because the exception is caught before we get to + # see it. Also celery's reported stacktrace is untrustworthy. + + # functools.wraps is important here because celery-once looks at this + # method's name. @ensure_integration_enabled internally calls functools.wraps, + # but if we ever remove the @ensure_integration_enabled decorator, we need + # to add @functools.wraps(f) here. + # https://github.com/getsentry/sentry-python/issues/421 + @ensure_integration_enabled(CeleryIntegration, f) + def _inner(*args, **kwargs): + # type: (*Any, **Any) -> Any + try: + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_PROCESS, + name=task.name, + origin=CeleryIntegration.origin, + only_if_parent=True, + ) as span: + _set_messaging_destination_name(task, span) + + latency = None + with capture_internal_exceptions(): + if ( + task.request.headers is not None + and "sentry-task-enqueued-time" in task.request.headers + ): + latency = _now_seconds_since_epoch() - task.request.headers.pop( + "sentry-task-enqueued-time" + ) + + if latency is not None: + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency + ) + + with capture_internal_exceptions(): + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id) + + with capture_internal_exceptions(): + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries + ) + + with capture_internal_exceptions(): + span.set_attribute( + SPANDATA.MESSAGING_SYSTEM, + task.app.connection().transport.driver_type, + ) + + return f(*args, **kwargs) + + except Exception: + exc_info = sys.exc_info() + with capture_internal_exceptions(): + _capture_exception(task, exc_info) + reraise(*exc_info) + + return _inner # type: ignore + + +def _patch_build_tracer(): + # type: () -> None + import celery.app.trace as trace # type: ignore + + original_build_tracer = trace.build_tracer + + def sentry_build_tracer(name, task, *args, **kwargs): + # type: (Any, Any, *Any, **Any) -> Any + if not getattr(task, "_sentry_is_patched", False): + # determine whether Celery will use __call__ or run and patch + # accordingly + if task_has_custom(task, "__call__"): + type(task).__call__ = _wrap_task_call(task, type(task).__call__) + else: + task.run = _wrap_task_call(task, task.run) + + # `build_tracer` is apparently called for every task + # invocation. Can't wrap every celery task for every invocation + # or we will get infinitely nested wrapper functions. + task._sentry_is_patched = True + + return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs)) + + trace.build_tracer = sentry_build_tracer + + +def _patch_task_apply_async(): + # type: () -> None + Task.apply_async = _wrap_task_run(Task.apply_async) + + +def _patch_celery_send_task(): + # type: () -> None + from celery import Celery + + Celery.send_task = _wrap_task_run(Celery.send_task) + + +def _patch_worker_exit(): + # type: () -> None + + # Need to flush queue before worker shutdown because a crashing worker will + # call os._exit + from billiard.pool import Worker # type: ignore + + original_workloop = Worker.workloop + + def sentry_workloop(*args, **kwargs): + # type: (*Any, **Any) -> Any + try: + return original_workloop(*args, **kwargs) + finally: + with capture_internal_exceptions(): + if ( + sentry_sdk_alpha.get_client().get_integration(CeleryIntegration) + is not None + ): + sentry_sdk_alpha.flush() + + Worker.workloop = sentry_workloop + + +def _patch_producer_publish(): + # type: () -> None + original_publish = Producer.publish + + @ensure_integration_enabled(CeleryIntegration, original_publish) + def sentry_publish(self, *args, **kwargs): + # type: (Producer, *Any, **Any) -> Any + kwargs_headers = kwargs.get("headers", {}) + if not isinstance(kwargs_headers, Mapping): + # Ensure kwargs_headers is a Mapping, so we can safely call get(). + # We don't expect this to happen, but it's better to be safe. Even + # if it does happen, only our instrumentation breaks. This line + # does not overwrite kwargs["headers"], so the original publish + # method will still work. + kwargs_headers = {} + + task_name = kwargs_headers.get("task") + task_id = kwargs_headers.get("id") + retries = kwargs_headers.get("retries") + + routing_key = kwargs.get("routing_key") + exchange = kwargs.get("exchange") + + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_PUBLISH, + name=task_name, + origin=CeleryIntegration.origin, + only_if_parent=True, + ) as span: + if task_id is not None: + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task_id) + + if exchange == "" and routing_key is not None: + # Empty exchange indicates the default exchange, meaning messages are + # routed to the queue with the same name as the routing key. + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key) + + if retries is not None: + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries) + + with capture_internal_exceptions(): + span.set_attribute( + SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type + ) + + return original_publish(self, *args, **kwargs) + + Producer.publish = sentry_publish + + +def _prepopulate_attributes(task, args, kwargs): + # type: (Any, *Any, **Any) -> dict[str, str] + attributes = { + "celery.job.task": task.name, + } + + for i, arg in enumerate(args): + with capture_internal_exceptions(): + attributes[f"celery.job.args.{i}"] = str(arg) + + for kwarg, value in kwargs.items(): + with capture_internal_exceptions(): + attributes[f"celery.job.kwargs.{kwarg}"] = str(value) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/celery/beat.py b/src/sentry_sdk_alpha/integrations/celery/beat.py new file mode 100644 index 00000000000000..8fa67f448df9ae --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/celery/beat.py @@ -0,0 +1,293 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.crons import capture_checkin, MonitorStatus +from sentry_sdk_alpha.integrations import DidNotEnable +from sentry_sdk_alpha.integrations.celery.utils import ( + _get_humanized_interval, + _now_seconds_since_epoch, +) +from sentry_sdk_alpha.utils import ( + logger, + match_regex_list, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any, Optional, TypeVar, Union + from sentry_sdk_alpha._types import ( + MonitorConfig, + MonitorConfigScheduleType, + MonitorConfigScheduleUnit, + ) + + F = TypeVar("F", bound=Callable[..., Any]) + + +try: + from celery import Task, Celery # type: ignore + from celery.beat import Scheduler # type: ignore + from celery.schedules import crontab, schedule # type: ignore + from celery.signals import ( # type: ignore + task_failure, + task_success, + task_retry, + ) +except ImportError: + raise DidNotEnable("Celery not installed") + +try: + from redbeat.schedulers import RedBeatScheduler # type: ignore +except ImportError: + RedBeatScheduler = None + + +def _get_headers(task): + # type: (Task) -> dict[str, Any] + headers = task.request.get("headers") or {} + + # flatten nested headers + if "headers" in headers: + headers.update(headers["headers"]) + del headers["headers"] + + headers.update(task.request.get("properties") or {}) + + return headers + + +def _get_monitor_config(celery_schedule, app, monitor_name): + # type: (Any, Celery, str) -> MonitorConfig + monitor_config = {} # type: MonitorConfig + schedule_type = None # type: Optional[MonitorConfigScheduleType] + schedule_value = None # type: Optional[Union[str, int]] + schedule_unit = None # type: Optional[MonitorConfigScheduleUnit] + + if isinstance(celery_schedule, crontab): + schedule_type = "crontab" + schedule_value = ( + "{0._orig_minute} " + "{0._orig_hour} " + "{0._orig_day_of_month} " + "{0._orig_month_of_year} " + "{0._orig_day_of_week}".format(celery_schedule) + ) + elif isinstance(celery_schedule, schedule): + schedule_type = "interval" + (schedule_value, schedule_unit) = _get_humanized_interval( + celery_schedule.seconds + ) + + if schedule_unit == "second": + logger.warning( + "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.", + monitor_name, + schedule_value, + ) + return {} + + else: + logger.warning( + "Celery schedule type '%s' not supported by Sentry Crons.", + type(celery_schedule), + ) + return {} + + monitor_config["schedule"] = {} + monitor_config["schedule"]["type"] = schedule_type + monitor_config["schedule"]["value"] = schedule_value + + if schedule_unit is not None: + monitor_config["schedule"]["unit"] = schedule_unit + + monitor_config["timezone"] = ( + ( + hasattr(celery_schedule, "tz") + and celery_schedule.tz is not None + and str(celery_schedule.tz) + ) + or app.timezone + or "UTC" + ) + + return monitor_config + + +def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration): + # type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None + """ + Add Sentry Crons information to the schedule_entry headers. + """ + if not integration.monitor_beat_tasks: + return + + monitor_name = schedule_entry.name + + task_should_be_excluded = match_regex_list( + monitor_name, integration.exclude_beat_tasks + ) + if task_should_be_excluded: + return + + celery_schedule = schedule_entry.schedule + app = scheduler.app + + monitor_config = _get_monitor_config(celery_schedule, app, monitor_name) + + is_supported_schedule = bool(monitor_config) + if not is_supported_schedule: + return + + headers = schedule_entry.options.pop("headers", {}) + headers.update( + { + "sentry-monitor-slug": monitor_name, + "sentry-monitor-config": monitor_config, + } + ) + + check_in_id = capture_checkin( + monitor_slug=monitor_name, + monitor_config=monitor_config, + status=MonitorStatus.IN_PROGRESS, + ) + headers.update({"sentry-monitor-check-in-id": check_in_id}) + + # Set the Sentry configuration in the options of the ScheduleEntry. + # Those will be picked up in `apply_async` and added to the headers. + schedule_entry.options["headers"] = headers + + +def _wrap_beat_scheduler(original_function): + # type: (Callable[..., Any]) -> Callable[..., Any] + """ + Makes sure that: + - a new Sentry trace is started for each task started by Celery Beat and + it is propagated to the task. + - the Sentry Crons information is set in the Celery Beat task's + headers so that is is monitored with Sentry Crons. + + After the patched function is called, + Celery Beat will call apply_async to put the task in the queue. + """ + # Patch only once + # Can't use __name__ here, because some of our tests mock original_apply_entry + already_patched = "sentry_patched_scheduler" in str(original_function) + if already_patched: + return original_function + + from sentry_sdk_alpha.integrations.celery import CeleryIntegration + + def sentry_patched_scheduler(*args, **kwargs): + # type: (*Any, **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(CeleryIntegration) + if integration is None: + return original_function(*args, **kwargs) + + # Tasks started by Celery Beat start a new Trace + scope = sentry_sdk_alpha.get_isolation_scope() + scope.set_new_propagation_context() + scope._name = "celery-beat" + + scheduler, schedule_entry = args + _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration) + + return original_function(*args, **kwargs) + + return sentry_patched_scheduler + + +def _patch_beat_apply_entry(): + # type: () -> None + Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry) + + +def _patch_redbeat_maybe_due(): + # type: () -> None + if RedBeatScheduler is None: + return + + RedBeatScheduler.maybe_due = _wrap_beat_scheduler(RedBeatScheduler.maybe_due) + + +def _setup_celery_beat_signals(monitor_beat_tasks): + # type: (bool) -> None + if monitor_beat_tasks: + task_success.connect(crons_task_success) + task_failure.connect(crons_task_failure) + task_retry.connect(crons_task_retry) + + +def crons_task_success(sender, **kwargs): + # type: (Task, dict[Any, Any]) -> None + logger.debug("celery_task_success %s", sender) + headers = _get_headers(sender) + + if "sentry-monitor-slug" not in headers: + return + + monitor_config = headers.get("sentry-monitor-config", {}) + + start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") + + capture_checkin( + monitor_slug=headers["sentry-monitor-slug"], + monitor_config=monitor_config, + check_in_id=headers["sentry-monitor-check-in-id"], + duration=( + _now_seconds_since_epoch() - float(start_timestamp_s) + if start_timestamp_s + else None + ), + status=MonitorStatus.OK, + ) + + +def crons_task_failure(sender, **kwargs): + # type: (Task, dict[Any, Any]) -> None + logger.debug("celery_task_failure %s", sender) + headers = _get_headers(sender) + + if "sentry-monitor-slug" not in headers: + return + + monitor_config = headers.get("sentry-monitor-config", {}) + + start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") + + capture_checkin( + monitor_slug=headers["sentry-monitor-slug"], + monitor_config=monitor_config, + check_in_id=headers["sentry-monitor-check-in-id"], + duration=( + _now_seconds_since_epoch() - float(start_timestamp_s) + if start_timestamp_s + else None + ), + status=MonitorStatus.ERROR, + ) + + +def crons_task_retry(sender, **kwargs): + # type: (Task, dict[Any, Any]) -> None + logger.debug("celery_task_retry %s", sender) + headers = _get_headers(sender) + + if "sentry-monitor-slug" not in headers: + return + + monitor_config = headers.get("sentry-monitor-config", {}) + + start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s") + + capture_checkin( + monitor_slug=headers["sentry-monitor-slug"], + monitor_config=monitor_config, + check_in_id=headers["sentry-monitor-check-in-id"], + duration=( + _now_seconds_since_epoch() - float(start_timestamp_s) + if start_timestamp_s + else None + ), + status=MonitorStatus.ERROR, + ) diff --git a/src/sentry_sdk_alpha/integrations/celery/utils.py b/src/sentry_sdk_alpha/integrations/celery/utils.py new file mode 100644 index 00000000000000..9da8d118abd418 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/celery/utils.py @@ -0,0 +1,43 @@ +import time +from typing import TYPE_CHECKING, cast + +if TYPE_CHECKING: + from typing import Any, Tuple + from sentry_sdk_alpha._types import MonitorConfigScheduleUnit + + +def _now_seconds_since_epoch(): + # type: () -> float + # We cannot use `time.perf_counter()` when dealing with the duration + # of a Celery task, because the start of a Celery task and + # the end are recorded in different processes. + # Start happens in the Celery Beat process, + # the end in a Celery Worker process. + return time.time() + + +def _get_humanized_interval(seconds): + # type: (float) -> Tuple[int, MonitorConfigScheduleUnit] + TIME_UNITS = ( # noqa: N806 + ("day", 60 * 60 * 24.0), + ("hour", 60 * 60.0), + ("minute", 60.0), + ) + + seconds = float(seconds) + for unit, divider in TIME_UNITS: + if seconds >= divider: + interval = int(seconds / divider) + return (interval, cast("MonitorConfigScheduleUnit", unit)) + + return (int(seconds), "second") + + +class NoOpMgr: + def __enter__(self): + # type: () -> None + return None + + def __exit__(self, exc_type, exc_value, traceback): + # type: (Any, Any, Any) -> None + return None diff --git a/src/sentry_sdk_alpha/integrations/chalice.py b/src/sentry_sdk_alpha/integrations/chalice.py new file mode 100644 index 00000000000000..6f532e20f8c8a4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/chalice.py @@ -0,0 +1,134 @@ +import sys +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.integrations.aws_lambda import _make_request_event_processor +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, + parse_version, + reraise, +) + +try: + import chalice # type: ignore + from chalice import __version__ as CHALICE_VERSION + from chalice import Chalice, ChaliceViewError + from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore +except ImportError: + raise DidNotEnable("Chalice is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import TypeVar + from typing import Callable + + F = TypeVar("F", bound=Callable[..., Any]) + + +class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore + def __call__(self, event, context): + # type: (Any, Any) -> Any + client = sentry_sdk_alpha.get_client() + + with sentry_sdk_alpha.isolation_scope() as scope: + with capture_internal_exceptions(): + configured_time = context.get_remaining_time_in_millis() + scope.add_event_processor( + _make_request_event_processor(event, context, configured_time) + ) + try: + return ChaliceEventSourceHandler.__call__(self, event, context) + except Exception: + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "chalice", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + client.flush() + reraise(*exc_info) + + +def _get_view_function_response(app, view_function, function_args): + # type: (Any, F, Any) -> F + @wraps(view_function) + def wrapped_view_function(**function_args): + # type: (**Any) -> Any + client = sentry_sdk_alpha.get_client() + with sentry_sdk_alpha.isolation_scope() as scope: + with capture_internal_exceptions(): + configured_time = app.lambda_context.get_remaining_time_in_millis() + scope.set_transaction_name( + app.lambda_context.function_name, + source=TransactionSource.COMPONENT, + ) + + scope.add_event_processor( + _make_request_event_processor( + app.current_request.to_dict(), + app.lambda_context, + configured_time, + ) + ) + try: + return view_function(**function_args) + except Exception as exc: + if isinstance(exc, ChaliceViewError): + raise + exc_info = sys.exc_info() + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "chalice", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + client.flush() + raise + + return wrapped_view_function # type: ignore + + +class ChaliceIntegration(Integration): + identifier = "chalice" + + @staticmethod + def setup_once(): + # type: () -> None + + version = parse_version(CHALICE_VERSION) + + if version is None: + raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION)) + + if version < (1, 20): + old_get_view_function_response = Chalice._get_view_function_response + else: + from chalice.app import RestAPIEventHandler + + old_get_view_function_response = ( + RestAPIEventHandler._get_view_function_response + ) + + def sentry_event_response(app, view_function, function_args): + # type: (Any, F, Dict[str, Any]) -> Any + wrapped_view_function = _get_view_function_response( + app, view_function, function_args + ) + + return old_get_view_function_response( + app, wrapped_view_function, function_args + ) + + if version < (1, 20): + Chalice._get_view_function_response = sentry_event_response + else: + RestAPIEventHandler._get_view_function_response = sentry_event_response + # for everything else (like events) + chalice.app.EventSourceHandler = EventSourceHandler diff --git a/src/sentry_sdk_alpha/integrations/clickhouse_driver.py b/src/sentry_sdk_alpha/integrations/clickhouse_driver.py new file mode 100644 index 00000000000000..76c5f42e2ff056 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/clickhouse_driver.py @@ -0,0 +1,188 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + _serialize_span_attribute, + capture_internal_exceptions, + ensure_integration_enabled, +) + +from typing import TYPE_CHECKING, cast, Any, Dict, TypeVar + +# Hack to get new Python features working in older versions +# without introducing a hard dependency on `typing_extensions` +# from: https://stackoverflow.com/a/71944042/300572 +if TYPE_CHECKING: + from typing import ParamSpec, Callable +else: + # Fake ParamSpec + class ParamSpec: + def __init__(self, _): + self.args = None + self.kwargs = None + + # Callable[anything] will return None + class _Callable: + def __getitem__(self, _): + return None + + # Make instances + Callable = _Callable() + + +try: + import clickhouse_driver # type: ignore[import-not-found] + +except ImportError: + raise DidNotEnable("clickhouse-driver not installed.") + + +class ClickhouseDriverIntegration(Integration): + identifier = "clickhouse_driver" + origin = f"auto.db.{identifier}" + + @staticmethod + def setup_once() -> None: + _check_minimum_version(ClickhouseDriverIntegration, clickhouse_driver.VERSION) + + # Every query is done using the Connection's `send_query` function + clickhouse_driver.connection.Connection.send_query = _wrap_start( + clickhouse_driver.connection.Connection.send_query + ) + + # If the query contains parameters then the send_data function is used to send those parameters to clickhouse + clickhouse_driver.client.Client.send_data = _wrap_send_data( + clickhouse_driver.client.Client.send_data + ) + + # Every query ends either with the Client's `receive_end_of_query` (no result expected) + # or its `receive_result` (result expected) + clickhouse_driver.client.Client.receive_end_of_query = _wrap_end( + clickhouse_driver.client.Client.receive_end_of_query + ) + if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"): + # In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query` + clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end( + clickhouse_driver.client.Client.receive_end_of_insert_query + ) + clickhouse_driver.client.Client.receive_result = _wrap_end( + clickhouse_driver.client.Client.receive_result + ) + + +P = ParamSpec("P") +T = TypeVar("T") + + +def _wrap_start(f: Callable[P, T]) -> Callable[P, T]: + @ensure_integration_enabled(ClickhouseDriverIntegration, f) + def _inner(*args: P.args, **kwargs: P.kwargs) -> T: + connection = args[0] + query = args[1] + query_id = args[2] if len(args) > 2 else kwargs.get("query_id") + params = args[3] if len(args) > 3 else kwargs.get("params") + + span = sentry_sdk_alpha.start_span( + op=OP.DB, + name=query, + origin=ClickhouseDriverIntegration.origin, + only_if_parent=True, + ) + + connection._sentry_span = span # type: ignore[attr-defined] + + data = _get_db_data(connection) + data = cast("dict[str, Any]", data) + data["db.query.text"] = query + + if query_id: + data["db.query_id"] = query_id + + if params and should_send_default_pii(): + data["db.params"] = params + + connection._sentry_db_data = data # type: ignore[attr-defined] + _set_on_span(span, data) + + # run the original code + ret = f(*args, **kwargs) + + return ret + + return _inner + + +def _wrap_end(f: Callable[P, T]) -> Callable[P, T]: + def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T: + res = f(*args, **kwargs) + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection + + span = getattr(connection, "_sentry_span", None) + if span is not None: + data = getattr(connection, "_sentry_db_data", {}) + + if res is not None and should_send_default_pii(): + data["db.result"] = res + span.set_attribute("db.result", _serialize_span_attribute(res)) + + with capture_internal_exceptions(): + query = data.pop("db.query.text", None) + if query: + sentry_sdk_alpha.add_breadcrumb( + message=query, category="query", data=data + ) + + span.finish() + + try: + del connection._sentry_db_data + del connection._sentry_span + except AttributeError: + pass + + return res + + return _inner_end + + +def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]: + def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T: + client = cast("clickhouse_driver.client.Client", args[0]) + connection = client.connection + db_params_data = cast("list[Any]", args[2]) + span = getattr(connection, "_sentry_span", None) + + if span is not None: + data = _get_db_data(connection) + _set_on_span(span, data) + + if should_send_default_pii(): + saved_db_data = getattr( + connection, "_sentry_db_data", {} + ) # type: dict[str, Any] + db_params = saved_db_data.get("db.params") or [] # type: list[Any] + db_params.extend(db_params_data) + saved_db_data["db.params"] = db_params + span.set_attribute("db.params", _serialize_span_attribute(db_params)) + + return f(*args, **kwargs) + + return _inner_send_data + + +def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[str, str]: + return { + SPANDATA.DB_SYSTEM: "clickhouse", + SPANDATA.SERVER_ADDRESS: connection.host, + SPANDATA.SERVER_PORT: connection.port, + SPANDATA.DB_NAME: connection.database, + SPANDATA.DB_USER: connection.user, + } + + +def _set_on_span(span: Span, data: Dict[str, Any]) -> None: + for key, value in data.items(): + span.set_attribute(key, _serialize_span_attribute(value)) diff --git a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py new file mode 100644 index 00000000000000..1eb9452eb6fb8c --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py @@ -0,0 +1,280 @@ +import json +import urllib3 + +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.api import set_context +from sentry_sdk_alpha.utils import logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Dict + + +CONTEXT_TYPE = "cloud_resource" + +HTTP_TIMEOUT = 2.0 + +AWS_METADATA_HOST = "169.254.169.254" +AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST) +AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format( + AWS_METADATA_HOST +) + +GCP_METADATA_HOST = "metadata.google.internal" +GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format( + GCP_METADATA_HOST +) + + +class CLOUD_PROVIDER: # noqa: N801 + """ + Name of the cloud provider. + see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ + """ + + ALIBABA = "alibaba_cloud" + AWS = "aws" + AZURE = "azure" + GCP = "gcp" + IBM = "ibm_cloud" + TENCENT = "tencent_cloud" + + +class CLOUD_PLATFORM: # noqa: N801 + """ + The cloud platform. + see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/ + """ + + AWS_EC2 = "aws_ec2" + GCP_COMPUTE_ENGINE = "gcp_compute_engine" + + +class CloudResourceContextIntegration(Integration): + """ + Adds cloud resource context to the Senty scope + """ + + identifier = "cloudresourcecontext" + + cloud_provider = "" + + aws_token = "" + http = urllib3.PoolManager(timeout=HTTP_TIMEOUT) + + gcp_metadata = None + + def __init__(self, cloud_provider=""): + # type: (str) -> None + CloudResourceContextIntegration.cloud_provider = cloud_provider + + @classmethod + def _is_aws(cls): + # type: () -> bool + try: + r = cls.http.request( + "PUT", + AWS_TOKEN_URL, + headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"}, + ) + + if r.status != 200: + return False + + cls.aws_token = r.data.decode() + return True + + except urllib3.exceptions.TimeoutError: + logger.debug( + "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT + ) + return False + except Exception as e: + logger.debug("Error checking AWS metadata service: %s", str(e)) + return False + + @classmethod + def _get_aws_context(cls): + # type: () -> Dict[str, str] + ctx = { + "cloud.provider": CLOUD_PROVIDER.AWS, + "cloud.platform": CLOUD_PLATFORM.AWS_EC2, + } + + try: + r = cls.http.request( + "GET", + AWS_METADATA_URL, + headers={"X-aws-ec2-metadata-token": cls.aws_token}, + ) + + if r.status != 200: + return ctx + + data = json.loads(r.data.decode("utf-8")) + + try: + ctx["cloud.account.id"] = data["accountId"] + except Exception: + pass + + try: + ctx["cloud.availability_zone"] = data["availabilityZone"] + except Exception: + pass + + try: + ctx["cloud.region"] = data["region"] + except Exception: + pass + + try: + ctx["host.id"] = data["instanceId"] + except Exception: + pass + + try: + ctx["host.type"] = data["instanceType"] + except Exception: + pass + + except urllib3.exceptions.TimeoutError: + logger.debug( + "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT + ) + except Exception as e: + logger.debug("Error fetching AWS metadata: %s", str(e)) + + return ctx + + @classmethod + def _is_gcp(cls): + # type: () -> bool + try: + r = cls.http.request( + "GET", + GCP_METADATA_URL, + headers={"Metadata-Flavor": "Google"}, + ) + + if r.status != 200: + return False + + cls.gcp_metadata = json.loads(r.data.decode("utf-8")) + return True + + except urllib3.exceptions.TimeoutError: + logger.debug( + "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT + ) + return False + except Exception as e: + logger.debug("Error checking GCP metadata service: %s", str(e)) + return False + + @classmethod + def _get_gcp_context(cls): + # type: () -> Dict[str, str] + ctx = { + "cloud.provider": CLOUD_PROVIDER.GCP, + "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE, + } + + try: + if cls.gcp_metadata is None: + r = cls.http.request( + "GET", + GCP_METADATA_URL, + headers={"Metadata-Flavor": "Google"}, + ) + + if r.status != 200: + return ctx + + cls.gcp_metadata = json.loads(r.data.decode("utf-8")) + + try: + ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"] + except Exception: + pass + + try: + ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][ + "zone" + ].split("/")[-1] + except Exception: + pass + + try: + # only populated in google cloud run + ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[ + -1 + ] + except Exception: + pass + + try: + ctx["host.id"] = cls.gcp_metadata["instance"]["id"] + except Exception: + pass + + except urllib3.exceptions.TimeoutError: + logger.debug( + "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT + ) + except Exception as e: + logger.debug("Error fetching GCP metadata: %s", str(e)) + + return ctx + + @classmethod + def _get_cloud_provider(cls): + # type: () -> str + if cls._is_aws(): + return CLOUD_PROVIDER.AWS + + if cls._is_gcp(): + return CLOUD_PROVIDER.GCP + + return "" + + @classmethod + def _get_cloud_resource_context(cls): + # type: () -> Dict[str, str] + cloud_provider = ( + cls.cloud_provider + if cls.cloud_provider != "" + else CloudResourceContextIntegration._get_cloud_provider() + ) + if cloud_provider in context_getters.keys(): + return context_getters[cloud_provider]() + + return {} + + @staticmethod + def setup_once(): + # type: () -> None + cloud_provider = CloudResourceContextIntegration.cloud_provider + unsupported_cloud_provider = ( + cloud_provider != "" and cloud_provider not in context_getters.keys() + ) + + if unsupported_cloud_provider: + logger.warning( + "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...", + CloudResourceContextIntegration.cloud_provider, + list(context_getters.keys()), + ) + + context = CloudResourceContextIntegration._get_cloud_resource_context() + if context != {}: + set_context(CONTEXT_TYPE, context) + + +# Map with the currently supported cloud providers +# mapping to functions extracting the context +context_getters = { + CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context, + CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context, +} diff --git a/src/sentry_sdk_alpha/integrations/cohere.py b/src/sentry_sdk_alpha/integrations/cohere.py new file mode 100644 index 00000000000000..aab650cb20e55c --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/cohere.py @@ -0,0 +1,272 @@ +from functools import wraps + +from sentry_sdk_alpha import consts +from sentry_sdk_alpha.ai.monitoring import record_token_usage +from sentry_sdk_alpha.consts import SPANDATA +from sentry_sdk_alpha.ai.utils import set_data_normalized + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Iterator + from sentry_sdk_alpha.tracing import Span + +import sentry_sdk_alpha +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception + +try: + from cohere.client import Client + from cohere.base_client import BaseCohere + from cohere import ( + ChatStreamEndEvent, + NonStreamedChatResponse, + ) + + if TYPE_CHECKING: + from cohere import StreamedChatResponse +except ImportError: + raise DidNotEnable("Cohere not installed") + +try: + # cohere 5.9.3+ + from cohere import StreamEndStreamedChatResponse +except ImportError: + from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse + + +COLLECTED_CHAT_PARAMS = { + "model": SPANDATA.AI_MODEL_ID, + "k": SPANDATA.AI_TOP_K, + "p": SPANDATA.AI_TOP_P, + "seed": SPANDATA.AI_SEED, + "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY, + "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY, + "raw_prompting": SPANDATA.AI_RAW_PROMPTING, +} + +COLLECTED_PII_CHAT_PARAMS = { + "tools": SPANDATA.AI_TOOLS, + "preamble": SPANDATA.AI_PREAMBLE, +} + +COLLECTED_CHAT_RESP_ATTRS = { + "generation_id": SPANDATA.AI_GENERATION_ID, + "is_search_required": SPANDATA.AI_SEARCH_REQUIRED, + "finish_reason": SPANDATA.AI_FINISH_REASON, +} + +COLLECTED_PII_CHAT_RESP_ATTRS = { + "citations": SPANDATA.AI_CITATIONS, + "documents": SPANDATA.AI_DOCUMENTS, + "search_queries": SPANDATA.AI_SEARCH_QUERIES, + "search_results": SPANDATA.AI_SEARCH_RESULTS, + "tool_calls": SPANDATA.AI_TOOL_CALLS, +} + + +class CohereIntegration(Integration): + identifier = "cohere" + origin = f"auto.ai.{identifier}" + + def __init__(self, include_prompts=True): + # type: (CohereIntegration, bool) -> None + self.include_prompts = include_prompts + + @staticmethod + def setup_once(): + # type: () -> None + BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False) + Client.embed = _wrap_embed(Client.embed) + BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True) + + +def _capture_exception(exc): + # type: (Any) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "cohere", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _wrap_chat(f, streaming): + # type: (Callable[..., Any], bool) -> Callable[..., Any] + + def collect_chat_response_fields(span, res, include_pii): + # type: (Span, NonStreamedChatResponse, bool) -> None + if include_pii: + if hasattr(res, "text"): + set_data_normalized( + span, + SPANDATA.AI_RESPONSES, + [res.text], + ) + for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS: + if hasattr(res, pii_attr): + set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr)) + + for attr in COLLECTED_CHAT_RESP_ATTRS: + if hasattr(res, attr): + set_data_normalized(span, "ai." + attr, getattr(res, attr)) + + if hasattr(res, "meta"): + if hasattr(res.meta, "billed_units"): + record_token_usage( + span, + prompt_tokens=res.meta.billed_units.input_tokens, + completion_tokens=res.meta.billed_units.output_tokens, + ) + elif hasattr(res.meta, "tokens"): + record_token_usage( + span, + prompt_tokens=res.meta.tokens.input_tokens, + completion_tokens=res.meta.tokens.output_tokens, + ) + + if hasattr(res.meta, "warnings"): + set_data_normalized(span, SPANDATA.AI_WARNINGS, res.meta.warnings) + + @wraps(f) + def new_chat(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(CohereIntegration) + + if ( + integration is None + or "message" not in kwargs + or not isinstance(kwargs.get("message"), str) + ): + return f(*args, **kwargs) + + message = kwargs.get("message") + + span = sentry_sdk_alpha.start_span( + op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE, + name="cohere.client.Chat", + origin=CohereIntegration.origin, + only_if_parent=True, + ) + span.__enter__() + try: + res = f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + span.__exit__(None, None, None) + raise e from None + + with capture_internal_exceptions(): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, + SPANDATA.AI_INPUT_MESSAGES, + list( + map( + lambda x: { + "role": getattr(x, "role", "").lower(), + "content": getattr(x, "message", ""), + }, + kwargs.get("chat_history", []), + ) + ) + + [{"role": "user", "content": message}], + ) + for k, v in COLLECTED_PII_CHAT_PARAMS.items(): + if k in kwargs: + set_data_normalized(span, v, kwargs[k]) + + for k, v in COLLECTED_CHAT_PARAMS.items(): + if k in kwargs: + set_data_normalized(span, v, kwargs[k]) + set_data_normalized(span, SPANDATA.AI_STREAMING, False) + + if streaming: + old_iterator = res + + def new_iterator(): + # type: () -> Iterator[StreamedChatResponse] + + with capture_internal_exceptions(): + for x in old_iterator: + if isinstance(x, ChatStreamEndEvent) or isinstance( + x, StreamEndStreamedChatResponse + ): + collect_chat_response_fields( + span, + x.response, + include_pii=should_send_default_pii() + and integration.include_prompts, + ) + yield x + + span.__exit__(None, None, None) + + return new_iterator() + elif isinstance(res, NonStreamedChatResponse): + collect_chat_response_fields( + span, + res, + include_pii=should_send_default_pii() + and integration.include_prompts, + ) + span.__exit__(None, None, None) + else: + set_data_normalized(span, "unknown_response", True) + span.__exit__(None, None, None) + return res + + return new_chat + + +def _wrap_embed(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + + @wraps(f) + def new_embed(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(CohereIntegration) + if integration is None: + return f(*args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=consts.OP.COHERE_EMBEDDINGS_CREATE, + name="Cohere Embedding Creation", + origin=CohereIntegration.origin, + only_if_parent=True, + ) as span: + if "texts" in kwargs and ( + should_send_default_pii() and integration.include_prompts + ): + if isinstance(kwargs["texts"], str): + set_data_normalized(span, SPANDATA.AI_TEXTS, [kwargs["texts"]]) + elif ( + isinstance(kwargs["texts"], list) + and len(kwargs["texts"]) > 0 + and isinstance(kwargs["texts"][0], str) + ): + set_data_normalized( + span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"] + ) + + if "model" in kwargs: + set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"]) + try: + res = f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + raise e from None + if ( + hasattr(res, "meta") + and hasattr(res.meta, "billed_units") + and hasattr(res.meta.billed_units, "input_tokens") + ): + record_token_usage( + span, + prompt_tokens=res.meta.billed_units.input_tokens, + total_tokens=res.meta.billed_units.input_tokens, + ) + return res + + return new_embed diff --git a/src/sentry_sdk_alpha/integrations/dedupe.py b/src/sentry_sdk_alpha/integrations/dedupe.py new file mode 100644 index 00000000000000..d47dc411e888c8 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/dedupe.py @@ -0,0 +1,51 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import ContextVar +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import add_global_event_processor + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + from sentry_sdk_alpha._types import Event, Hint + + +class DedupeIntegration(Integration): + identifier = "dedupe" + + def __init__(self): + # type: () -> None + self._last_seen = ContextVar("last-seen") + + @staticmethod + def setup_once(): + # type: () -> None + @add_global_event_processor + def processor(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if hint is None: + return event + + integration = sentry_sdk_alpha.get_client().get_integration(DedupeIntegration) + if integration is None: + return event + + exc_info = hint.get("exc_info", None) + if exc_info is None: + return event + + exc = exc_info[1] + if integration._last_seen.get(None) is exc: + return None + integration._last_seen.set(exc) + return event + + @staticmethod + def reset_last_seen(): + # type: () -> None + integration = sentry_sdk_alpha.get_client().get_integration(DedupeIntegration) + if integration is None: + return + + integration._last_seen.set(None) diff --git a/src/sentry_sdk_alpha/integrations/django/__init__.py b/src/sentry_sdk_alpha/integrations/django/__init__.py new file mode 100644 index 00000000000000..86658874986a71 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/__init__.py @@ -0,0 +1,726 @@ +import functools +import inspect +import sys +import threading +import weakref +from importlib import import_module + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA, SOURCE_FOR_STYLE, TransactionSource +from sentry_sdk_alpha.scope import add_global_event_processor, should_send_default_pii +from sentry_sdk_alpha.serializer import add_global_repr_processor +from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, + SENSITIVE_DATA_SUBSTITUTE, + logger, + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + transaction_from_function, + walk_exception_chain, +) +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.integrations._wsgi_common import ( + DEFAULT_HTTP_METHODS_TO_CAPTURE, + RequestExtractor, +) + +try: + from django import VERSION as DJANGO_VERSION + from django.conf import settings as django_settings + from django.core import signals + from django.conf import settings + + try: + from django.urls import resolve + except ImportError: + from django.core.urlresolvers import resolve + + try: + from django.urls import Resolver404 + except ImportError: + from django.core.urlresolvers import Resolver404 + + # Only available in Django 3.0+ + try: + from django.core.handlers.asgi import ASGIRequest + except Exception: + ASGIRequest = None + +except ImportError: + raise DidNotEnable("Django not installed") + +from sentry_sdk_alpha.integrations.django.caching import patch_caching +from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER +from sentry_sdk_alpha.integrations.django.templates import ( + get_template_frame_from_exception, + patch_templates, +) +from sentry_sdk_alpha.integrations.django.middleware import patch_django_middlewares +from sentry_sdk_alpha.integrations.django.signals_handlers import patch_signals +from sentry_sdk_alpha.integrations.django.views import patch_views + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Dict + from typing import Optional + from typing import Union + from typing import List + + from django.core.handlers.wsgi import WSGIRequest + from django.http.response import HttpResponse + from django.http.request import QueryDict + from django.utils.datastructures import MultiValueDict + + from sentry_sdk_alpha.tracing import Span + from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse + from sentry_sdk_alpha._types import Event, Hint, EventProcessor, NotImplementedType + + +TRANSACTION_STYLE_VALUES = ("function_name", "url") + + +class DjangoIntegration(Integration): + """ + Auto instrument a Django application. + + :param transaction_style: How to derive transaction names. Either `"function_name"` or `"url"`. Defaults to `"url"`. + :param middleware_spans: Whether to create spans for middleware. Defaults to `True`. + :param signals_spans: Whether to create spans for signals. Defaults to `True`. + :param signals_denylist: A list of signals to ignore when creating spans. + :param cache_spans: Whether to create spans for cache operations. Defaults to `False`. + """ + + identifier = "django" + origin = f"auto.http.{identifier}" + origin_db = f"auto.db.{identifier}" + + transaction_style = "" + middleware_spans = None + signals_spans = None + cache_spans = None + signals_denylist = [] # type: list[signals.Signal] + + def __init__( + self, + transaction_style="url", # type: str + middleware_spans=True, # type: bool + signals_spans=True, # type: bool + cache_spans=True, # type: bool + signals_denylist=None, # type: Optional[list[signals.Signal]] + http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] + ): + # type: (...) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + self.middleware_spans = middleware_spans + + self.signals_spans = signals_spans + self.signals_denylist = signals_denylist or [] + + self.cache_spans = cache_spans + + self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) + + @staticmethod + def setup_once(): + # type: () -> None + _check_minimum_version(DjangoIntegration, DJANGO_VERSION) + + install_sql_hook() + # Patch in our custom middleware. + + # logs an error for every 500 + ignore_logger("django.server") + ignore_logger("django.request") + + from django.core.handlers.wsgi import WSGIHandler + + old_app = WSGIHandler.__call__ + + @ensure_integration_enabled(DjangoIntegration, old_app) + def sentry_patched_wsgi_handler(self, environ, start_response): + # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + bound_old_app = old_app.__get__(self, WSGIHandler) + + from django.conf import settings + + use_x_forwarded_for = settings.USE_X_FORWARDED_HOST + + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + + middleware = SentryWsgiMiddleware( + bound_old_app, + use_x_forwarded_for, + span_origin=DjangoIntegration.origin, + http_methods_to_capture=( + integration.http_methods_to_capture + if integration + else DEFAULT_HTTP_METHODS_TO_CAPTURE + ), + ) + return middleware(environ, start_response) + + WSGIHandler.__call__ = sentry_patched_wsgi_handler + + _patch_get_response() + + _patch_django_asgi_handler() + + signals.got_request_exception.connect(_got_request_exception) + + @add_global_event_processor + def process_django_templates(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if hint is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception, (_, exc_value, _) in zip( + reversed(values), walk_exception_chain(exc_info) + ): + frame = get_template_frame_from_exception(exc_value) + if frame is not None: + frames = exception.get("stacktrace", {}).get("frames", []) + + for i in reversed(range(len(frames))): + f = frames[i] + if ( + f.get("function") in ("Parser.parse", "parse", "render") + and f.get("module") == "django.template.base" + ): + i += 1 + break + else: + i = len(frames) + + frames.insert(i, frame) + + return event + + @add_global_repr_processor + def _django_queryset_repr(value, hint): + # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str] + try: + # Django 1.6 can fail to import `QuerySet` when Django settings + # have not yet been initialized. + # + # If we fail to import, return `NotImplemented`. It's at least + # unlikely that we have a query set in `value` when importing + # `QuerySet` fails. + from django.db.models.query import QuerySet + except Exception: + return NotImplemented + + if not isinstance(value, QuerySet) or value._result_cache: + return NotImplemented + + return "<%s from %s at 0x%x>" % ( + value.__class__.__name__, + value.__module__, + id(value), + ) + + _patch_channels() + patch_django_middlewares() + patch_views() + patch_templates() + patch_signals() + + if patch_caching is not None: + patch_caching() + + +_DRF_PATCHED = False +_DRF_PATCH_LOCK = threading.Lock() + + +def _patch_drf(): + # type: () -> None + """ + Patch Django Rest Framework for more/better request data. DRF's request + type is a wrapper around Django's request type. The attribute we're + interested in is `request.data`, which is a cached property containing a + parsed request body. Reading a request body from that property is more + reliable than reading from any of Django's own properties, as those don't + hold payloads in memory and therefore can only be accessed once. + + We patch the Django request object to include a weak backreference to the + DRF request object, such that we can later use either in + `DjangoRequestExtractor`. + + This function is not called directly on SDK setup, because importing almost + any part of Django Rest Framework will try to access Django settings (where + `sentry_sdk.init()` might be called from in the first place). Instead we + run this function on every request and do the patching on the first + request. + """ + + global _DRF_PATCHED + + if _DRF_PATCHED: + # Double-checked locking + return + + with _DRF_PATCH_LOCK: + if _DRF_PATCHED: + return + + # We set this regardless of whether the code below succeeds or fails. + # There is no point in trying to patch again on the next request. + _DRF_PATCHED = True + + with capture_internal_exceptions(): + try: + from rest_framework.views import APIView # type: ignore + except ImportError: + pass + else: + old_drf_initial = APIView.initial + + @functools.wraps(old_drf_initial) + def sentry_patched_drf_initial(self, request, *args, **kwargs): + # type: (APIView, Any, *Any, **Any) -> Any + with capture_internal_exceptions(): + request._request._sentry_drf_request_backref = weakref.ref( + request + ) + pass + return old_drf_initial(self, request, *args, **kwargs) + + APIView.initial = sentry_patched_drf_initial + + +def _patch_channels(): + # type: () -> None + try: + from channels.http import AsgiHandler # type: ignore + except ImportError: + return + + if not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + # + # We cannot hard-raise here because channels may not be used at all in + # the current process. That is the case when running traditional WSGI + # workers in gunicorn+gevent and the websocket stuff in a separate + # process. + logger.warning( + "We detected that you are using Django channels 2.0." + + CONTEXTVARS_ERROR_MESSAGE + ) + + from sentry_sdk_alpha.integrations.django.asgi import patch_channels_asgi_handler_impl + + patch_channels_asgi_handler_impl(AsgiHandler) + + +def _patch_django_asgi_handler(): + # type: () -> None + try: + from django.core.handlers.asgi import ASGIHandler + except ImportError: + return + + if not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + # + # We cannot hard-raise here because Django's ASGI stuff may not be used + # at all. + logger.warning( + "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE + ) + + from sentry_sdk_alpha.integrations.django.asgi import patch_django_asgi_handler_impl + + patch_django_asgi_handler_impl(ASGIHandler) + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, WSGIRequest) -> None + try: + transaction_name = None + if transaction_style == "function_name": + fn = resolve(request.path).func + transaction_name = transaction_from_function(getattr(fn, "view_class", fn)) + + elif transaction_style == "url": + if hasattr(request, "urlconf"): + transaction_name = LEGACY_RESOLVER.resolve( + request.path_info, urlconf=request.urlconf + ) + else: + transaction_name = LEGACY_RESOLVER.resolve(request.path_info) + + if transaction_name is None: + transaction_name = request.path_info + source = TransactionSource.URL + else: + source = SOURCE_FOR_STYLE[transaction_style] + + scope.set_transaction_name( + transaction_name, + source=source, + ) + except Resolver404: + urlconf = import_module(settings.ROOT_URLCONF) + # This exception only gets thrown when transaction_style is `function_name` + # So we don't check here what style is configured + if hasattr(urlconf, "handler404"): + handler = urlconf.handler404 + if isinstance(handler, str): + scope.set_transaction_name(handler) + else: + name = transaction_from_function( + getattr(handler, "view_class", handler) + ) + if isinstance(name, str): + scope.set_transaction_name(name) + except Exception: + pass + + +def _before_get_response(request): + # type: (WSGIRequest) -> None + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is None: + return + + _patch_drf() + + scope = sentry_sdk_alpha.get_current_scope() + # Rely on WSGI middleware to start a trace + _set_transaction_name_and_source(scope, integration.transaction_style, request) + + scope.add_event_processor( + _make_wsgi_request_event_processor(weakref.ref(request), integration) + ) + + +def _attempt_resolve_again(request, scope, transaction_style): + # type: (WSGIRequest, sentry_sdk.Scope, str) -> None + """ + Some django middlewares overwrite request.urlconf + so we need to respect that contract, + so we try to resolve the url again. + """ + if not hasattr(request, "urlconf"): + return + + _set_transaction_name_and_source(scope, transaction_style, request) + + +def _after_get_response(request): + # type: (WSGIRequest) -> None + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is None or integration.transaction_style != "url": + return + + scope = sentry_sdk_alpha.get_current_scope() + _attempt_resolve_again(request, scope, integration.transaction_style) + + +def _patch_get_response(): + # type: () -> None + """ + patch get_response, because at that point we have the Django request object + """ + from django.core.handlers.base import BaseHandler + + old_get_response = BaseHandler.get_response + + @functools.wraps(old_get_response) + def sentry_patched_get_response(self, request): + # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException] + _before_get_response(request) + rv = old_get_response(self, request) + _after_get_response(request) + return rv + + BaseHandler.get_response = sentry_patched_get_response + + if hasattr(BaseHandler, "get_response_async"): + from sentry_sdk_alpha.integrations.django.asgi import patch_get_response_async + + patch_get_response_async(BaseHandler, _before_get_response) + + +def _make_wsgi_request_event_processor(weak_request, integration): + # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor + def wsgi_request_event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + # if the request is gone we are fine not logging the data from + # it. This might happen if the processor is pushed away to + # another thread. + request = weak_request() + if request is None: + return event + + django_3 = ASGIRequest is not None + if django_3 and type(request) == ASGIRequest: + # We have a `asgi_request_event_processor` for this. + return event + + with capture_internal_exceptions(): + DjangoRequestExtractor(request).extract_into_event(event) + + if should_send_default_pii(): + with capture_internal_exceptions(): + _set_user_info(request, event) + + return event + + return wsgi_request_event_processor + + +def _got_request_exception(request=None, **kwargs): + # type: (WSGIRequest, **Any) -> None + client = sentry_sdk_alpha.get_client() + integration = client.get_integration(DjangoIntegration) + if integration is None: + return + + if request is not None and integration.transaction_style == "url": + scope = sentry_sdk_alpha.get_current_scope() + _attempt_resolve_again(request, scope, integration.transaction_style) + + event, hint = event_from_exception( + sys.exc_info(), + client_options=client.options, + mechanism={"type": "django", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +class DjangoRequestExtractor(RequestExtractor): + def __init__(self, request): + # type: (Union[WSGIRequest, ASGIRequest]) -> None + try: + drf_request = request._sentry_drf_request_backref() + if drf_request is not None: + request = drf_request + except AttributeError: + pass + self.request = request + + def env(self): + # type: () -> Dict[str, str] + return self.request.META + + def cookies(self): + # type: () -> Dict[str, Union[str, AnnotatedValue]] + privacy_cookies = [ + django_settings.CSRF_COOKIE_NAME, + django_settings.SESSION_COOKIE_NAME, + ] + + clean_cookies = {} # type: Dict[str, Union[str, AnnotatedValue]] + for key, val in self.request.COOKIES.items(): + if key in privacy_cookies: + clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE + else: + clean_cookies[key] = val + + return clean_cookies + + def raw_data(self): + # type: () -> bytes + return self.request.body + + def form(self): + # type: () -> QueryDict + return self.request.POST + + def files(self): + # type: () -> MultiValueDict + return self.request.FILES + + def size_of_file(self, file): + # type: (Any) -> int + return file.size + + def parsed_body(self): + # type: () -> Optional[Dict[str, Any]] + try: + return self.request.data + except Exception: + return RequestExtractor.parsed_body(self) + + +def _set_user_info(request, event): + # type: (WSGIRequest, Event) -> None + user_info = event.setdefault("user", {}) + + user = getattr(request, "user", None) + + if user is None or not user.is_authenticated: + return + + try: + user_info.setdefault("id", str(user.pk)) + except Exception: + pass + + try: + user_info.setdefault("email", user.email) + except Exception: + pass + + try: + user_info.setdefault("username", user.get_username()) + except Exception: + pass + + +def install_sql_hook(): + # type: () -> None + """If installed this causes Django's queries to be captured.""" + try: + from django.db.backends.utils import CursorWrapper + except ImportError: + from django.db.backends.util import CursorWrapper + + from django.db.backends.base.base import BaseDatabaseWrapper + + real_execute = CursorWrapper.execute + real_executemany = CursorWrapper.executemany + real_connect = BaseDatabaseWrapper.connect + + @ensure_integration_enabled(DjangoIntegration, real_execute) + def execute(self, sql, params=None): + # type: (CursorWrapper, Any, Optional[Any]) -> Any + with record_sql_queries( + cursor=self.cursor, + query=sql, + params_list=params, + paramstyle="format", + executemany=False, + span_origin=DjangoIntegration.origin_db, + ) as span: + _set_db_data(span, self) + result = real_execute(self, sql, params) + + with capture_internal_exceptions(): + add_query_source(span) + + return result + + @ensure_integration_enabled(DjangoIntegration, real_executemany) + def executemany(self, sql, param_list): + # type: (CursorWrapper, Any, List[Any]) -> Any + with record_sql_queries( + cursor=self.cursor, + query=sql, + params_list=param_list, + paramstyle="format", + executemany=True, + span_origin=DjangoIntegration.origin_db, + ) as span: + _set_db_data(span, self) + + result = real_executemany(self, sql, param_list) + + with capture_internal_exceptions(): + add_query_source(span) + + return result + + @ensure_integration_enabled(DjangoIntegration, real_connect) + def connect(self): + # type: (BaseDatabaseWrapper) -> None + with capture_internal_exceptions(): + sentry_sdk_alpha.add_breadcrumb(message="connect", category="query") + + with sentry_sdk_alpha.start_span( + op=OP.DB, + name="connect", + origin=DjangoIntegration.origin_db, + only_if_parent=True, + ) as span: + _set_db_data(span, self) + return real_connect(self) + + CursorWrapper.execute = execute + CursorWrapper.executemany = executemany + BaseDatabaseWrapper.connect = connect + ignore_logger("django.db.backends") + + +def _set_db_data(span, cursor_or_db): + # type: (Span, Any) -> None + db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db + vendor = db.vendor + span.set_attribute(SPANDATA.DB_SYSTEM, vendor) + + # Some custom backends override `__getattr__`, making it look like `cursor_or_db` + # actually has a `connection` and the `connection` has a `get_dsn_parameters` + # attribute, only to throw an error once you actually want to call it. + # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable + # function. + is_psycopg2 = ( + hasattr(cursor_or_db, "connection") + and hasattr(cursor_or_db.connection, "get_dsn_parameters") + and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters) + ) + if is_psycopg2: + connection_params = cursor_or_db.connection.get_dsn_parameters() + else: + try: + # psycopg3, only extract needed params as get_parameters + # can be slow because of the additional logic to filter out default + # values + connection_params = { + "dbname": cursor_or_db.connection.info.dbname, + "port": cursor_or_db.connection.info.port, + } + # PGhost returns host or base dir of UNIX socket as an absolute path + # starting with /, use it only when it contains host + pg_host = cursor_or_db.connection.info.host + if pg_host and not pg_host.startswith("/"): + connection_params["host"] = pg_host + except Exception: + connection_params = db.get_connection_params() + + db_name = connection_params.get("dbname") or connection_params.get("database") + if db_name is not None: + span.set_attribute(SPANDATA.DB_NAME, db_name) + + server_address = connection_params.get("host") + if server_address is not None: + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) + + server_port = connection_params.get("port") + if server_port is not None: + span.set_attribute(SPANDATA.SERVER_PORT, str(server_port)) + + server_socket_address = connection_params.get("unix_socket") + if server_socket_address is not None: + span.set_attribute(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address) diff --git a/src/sentry_sdk_alpha/integrations/django/asgi.py b/src/sentry_sdk_alpha/integrations/django/asgi.py new file mode 100644 index 00000000000000..a56f956dbd71af --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/asgi.py @@ -0,0 +1,249 @@ +""" +Instrumentation for Django 3.0 + +Since this file contains `async def` it is conditionally imported in +`sentry_sdk.integrations.django` (depending on the existence of +`django.core.handlers.asgi`. +""" + +import asyncio +import functools +import inspect + +from django.core.handlers.wsgi import WSGIRequest + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP + +from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Union, TypeVar + + from django.core.handlers.asgi import ASGIRequest + from django.http.response import HttpResponse + + from sentry_sdk_alpha._types import Event, EventProcessor + + _F = TypeVar("_F", bound=Callable[..., Any]) + + +# Python 3.12 deprecates asyncio.iscoroutinefunction() as an alias for +# inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker. +# The latter is replaced with the inspect.markcoroutinefunction decorator. +# Until 3.12 is the minimum supported Python version, provide a shim. +# This was copied from https://github.com/django/asgiref/blob/main/asgiref/sync.py +if hasattr(inspect, "markcoroutinefunction"): + iscoroutinefunction = inspect.iscoroutinefunction + markcoroutinefunction = inspect.markcoroutinefunction +else: + iscoroutinefunction = asyncio.iscoroutinefunction # type: ignore[assignment] + + def markcoroutinefunction(func: "_F") -> "_F": + func._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore + return func + + +def _make_asgi_request_event_processor(request): + # type: (ASGIRequest) -> EventProcessor + def asgi_request_event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + # if the request is gone we are fine not logging the data from + # it. This might happen if the processor is pushed away to + # another thread. + from sentry_sdk_alpha.integrations.django import ( + DjangoRequestExtractor, + _set_user_info, + ) + + if request is None: + return event + + if type(request) == WSGIRequest: + return event + + with capture_internal_exceptions(): + DjangoRequestExtractor(request).extract_into_event(event) + + if should_send_default_pii(): + with capture_internal_exceptions(): + _set_user_info(request, event) + + return event + + return asgi_request_event_processor + + +def patch_django_asgi_handler_impl(cls): + # type: (Any) -> None + + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + old_app = cls.__call__ + + @functools.wraps(old_app) + async def sentry_patched_asgi_handler(self, scope, receive, send): + # type: (Any, Any, Any, Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is None: + return await old_app(self, scope, receive, send) + + middleware = SentryAsgiMiddleware( + old_app.__get__(self, cls), + unsafe_context_data=True, + span_origin=DjangoIntegration.origin, + http_methods_to_capture=integration.http_methods_to_capture, + )._run_asgi3 + + return await middleware(scope, receive, send) + + cls.__call__ = sentry_patched_asgi_handler + + modern_django_asgi_support = hasattr(cls, "create_request") + if modern_django_asgi_support: + old_create_request = cls.create_request + + @ensure_integration_enabled(DjangoIntegration, old_create_request) + def sentry_patched_create_request(self, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + request, error_response = old_create_request(self, *args, **kwargs) + scope = sentry_sdk_alpha.get_isolation_scope() + scope.add_event_processor(_make_asgi_request_event_processor(request)) + + return request, error_response + + cls.create_request = sentry_patched_create_request + + +def patch_get_response_async(cls, _before_get_response): + # type: (Any, Any) -> None + old_get_response_async = cls.get_response_async + + @functools.wraps(old_get_response_async) + async def sentry_patched_get_response_async(self, request): + # type: (Any, Any) -> Union[HttpResponse, BaseException] + _before_get_response(request) + return await old_get_response_async(self, request) + + cls.get_response_async = sentry_patched_get_response_async + + +def patch_channels_asgi_handler_impl(cls): + # type: (Any) -> None + import channels # type: ignore + + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + if channels.__version__ < "3.0.0": + old_app = cls.__call__ + + @functools.wraps(old_app) + async def sentry_patched_asgi_handler(self, receive, send): + # type: (Any, Any, Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is None: + return await old_app(self, receive, send) + + middleware = SentryAsgiMiddleware( + lambda _scope: old_app.__get__(self, cls), + unsafe_context_data=True, + span_origin=DjangoIntegration.origin, + http_methods_to_capture=integration.http_methods_to_capture, + ) + + return await middleware(self.scope)(receive, send) + + cls.__call__ = sentry_patched_asgi_handler + + else: + # The ASGI handler in Channels >= 3 has the same signature as + # the Django handler. + patch_django_asgi_handler_impl(cls) + + +def wrap_async_view(callback): + # type: (Any) -> Any + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + @functools.wraps(callback) + async def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + current_scope = sentry_sdk_alpha.get_current_scope() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + if sentry_scope.profile is not None: + sentry_scope.profile.update_active_thread_id() + + with sentry_sdk_alpha.start_span( + op=OP.VIEW_RENDER, + name=request.resolver_match.view_name, + origin=DjangoIntegration.origin, + only_if_parent=True, + ): + return await callback(request, *args, **kwargs) + + return sentry_wrapped_callback + + +def _asgi_middleware_mixin_factory(_check_middleware_span): + # type: (Callable[..., Any]) -> Any + """ + Mixin class factory that generates a middleware mixin for handling requests + in async mode. + """ + + class SentryASGIMixin: + if TYPE_CHECKING: + _inner = None + + def __init__(self, get_response): + # type: (Callable[..., Any]) -> None + self.get_response = get_response + self._acall_method = None + self._async_check() + + def _async_check(self): + # type: () -> None + """ + If get_response is a coroutine function, turns us into async mode so + a thread is not consumed during a whole request. + Taken from django.utils.deprecation::MiddlewareMixin._async_check + """ + if iscoroutinefunction(self.get_response): + markcoroutinefunction(self) + + def async_route_check(self): + # type: () -> bool + """ + Function that checks if we are in async mode, + and if we are forwards the handling of requests to __acall__ + """ + return iscoroutinefunction(self.get_response) + + async def __acall__(self, *args, **kwargs): + # type: (*Any, **Any) -> Any + f = self._acall_method + if f is None: + if hasattr(self._inner, "__acall__"): + self._acall_method = f = self._inner.__acall__ # type: ignore + else: + self._acall_method = f = self._inner + + middleware_span = _check_middleware_span(old_method=f) + + if middleware_span is None: + return await f(*args, **kwargs) + + with middleware_span: + return await f(*args, **kwargs) + + return SentryASGIMixin diff --git a/src/sentry_sdk_alpha/integrations/django/caching.py b/src/sentry_sdk_alpha/integrations/django/caching.py new file mode 100644 index 00000000000000..634418e4631d38 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/caching.py @@ -0,0 +1,182 @@ +import functools +from typing import TYPE_CHECKING +from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string +from urllib3.util import parse_url as urlparse + +from django import VERSION as DJANGO_VERSION +from django.core.cache import CacheHandler + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, +) + + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Optional + + +METHODS_TO_INSTRUMENT = [ + "set", + "set_many", + "get", + "get_many", +] + + +def _get_span_description(method_name, args, kwargs): + # type: (str, tuple[Any], dict[str, Any]) -> str + return _key_as_string(_get_safe_key(method_name, args, kwargs)) + + +def _patch_cache_method(cache, method_name, address, port): + # type: (CacheHandler, str, Optional[str], Optional[int]) -> None + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + original_method = getattr(cache, method_name) + + @ensure_integration_enabled(DjangoIntegration, original_method) + def _instrument_call( + cache, method_name, original_method, args, kwargs, address, port + ): + # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any + is_set_operation = method_name.startswith("set") + is_get_operation = not is_set_operation + + op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET + description = _get_span_description(method_name, args, kwargs) + + with sentry_sdk_alpha.start_span( + op=op, + name=description, + origin=DjangoIntegration.origin, + only_if_parent=True, + ) as span: + value = original_method(*args, **kwargs) + + with capture_internal_exceptions(): + if address is not None: + span.set_attribute(SPANDATA.NETWORK_PEER_ADDRESS, address) + + if port is not None: + span.set_attribute(SPANDATA.NETWORK_PEER_PORT, port) + + key = _get_safe_key(method_name, args, kwargs) + if key is not None: + span.set_attribute(SPANDATA.CACHE_KEY, key) + + item_size = None + if is_get_operation: + if value: + item_size = len(str(value)) + span.set_attribute(SPANDATA.CACHE_HIT, True) + else: + span.set_attribute(SPANDATA.CACHE_HIT, False) + else: # TODO: We don't handle `get_or_set` which we should + arg_count = len(args) + if arg_count >= 2: + # 'set' command + item_size = len(str(args[1])) + elif arg_count == 1: + # 'set_many' command + item_size = len(str(args[0])) + + if item_size is not None: + span.set_attribute(SPANDATA.CACHE_ITEM_SIZE, item_size) + + return value + + @functools.wraps(original_method) + def sentry_method(*args, **kwargs): + # type: (*Any, **Any) -> Any + return _instrument_call( + cache, method_name, original_method, args, kwargs, address, port + ) + + setattr(cache, method_name, sentry_method) + + +def _patch_cache(cache, address=None, port=None): + # type: (CacheHandler, Optional[str], Optional[int]) -> None + if not hasattr(cache, "_sentry_patched"): + for method_name in METHODS_TO_INSTRUMENT: + _patch_cache_method(cache, method_name, address, port) + cache._sentry_patched = True + + +def _get_address_port(settings): + # type: (dict[str, Any]) -> tuple[Optional[str], Optional[int]] + location = settings.get("LOCATION") + + # TODO: location can also be an array of locations + # see: https://docs.djangoproject.com/en/5.0/topics/cache/#redis + # GitHub issue: https://github.com/getsentry/sentry-python/issues/3062 + if not isinstance(location, str): + return None, None + + if "://" in location: + parsed_url = urlparse(location) + # remove the username and password from URL to not leak sensitive data. + address = "{}://{}{}".format( + parsed_url.scheme or "", + parsed_url.hostname or "", + parsed_url.path or "", + ) + port = parsed_url.port + else: + address = location + port = None + + return address, int(port) if port is not None else None + + +def patch_caching(): + # type: () -> None + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + if not hasattr(CacheHandler, "_sentry_patched"): + if DJANGO_VERSION < (3, 2): + original_get_item = CacheHandler.__getitem__ + + @functools.wraps(original_get_item) + def sentry_get_item(self, alias): + # type: (CacheHandler, str) -> Any + cache = original_get_item(self, alias) + + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: + from django.conf import settings + + address, port = _get_address_port( + settings.CACHES[alias or "default"] + ) + + _patch_cache(cache, address, port) + + return cache + + CacheHandler.__getitem__ = sentry_get_item + CacheHandler._sentry_patched = True + + else: + original_create_connection = CacheHandler.create_connection + + @functools.wraps(original_create_connection) + def sentry_create_connection(self, alias): + # type: (CacheHandler, str) -> Any + cache = original_create_connection(self, alias) + + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.cache_spans: + address, port = _get_address_port(self.settings[alias or "default"]) + + _patch_cache(cache, address, port) + + return cache + + CacheHandler.create_connection = sentry_create_connection + CacheHandler._sentry_patched = True diff --git a/src/sentry_sdk_alpha/integrations/django/middleware.py b/src/sentry_sdk_alpha/integrations/django/middleware.py new file mode 100644 index 00000000000000..22ad0cd9b81a30 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/middleware.py @@ -0,0 +1,188 @@ +""" +Create spans from Django middleware invocations +""" + +from functools import wraps + +from django import VERSION as DJANGO_VERSION + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.utils import ( + ContextVar, + transaction_from_function, + capture_internal_exceptions, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Optional + from typing import TypeVar + + from sentry_sdk_alpha.tracing import Span + + F = TypeVar("F", bound=Callable[..., Any]) + +_import_string_should_wrap_middleware = ContextVar( + "import_string_should_wrap_middleware" +) + +DJANGO_SUPPORTS_ASYNC_MIDDLEWARE = DJANGO_VERSION >= (3, 1) + +if not DJANGO_SUPPORTS_ASYNC_MIDDLEWARE: + _asgi_middleware_mixin_factory = lambda _: object +else: + from .asgi import _asgi_middleware_mixin_factory + + +def patch_django_middlewares(): + # type: () -> None + from django.core.handlers import base + + old_import_string = base.import_string + + def sentry_patched_import_string(dotted_path): + # type: (str) -> Any + rv = old_import_string(dotted_path) + + if _import_string_should_wrap_middleware.get(None): + rv = _wrap_middleware(rv, dotted_path) + + return rv + + base.import_string = sentry_patched_import_string + + old_load_middleware = base.BaseHandler.load_middleware + + def sentry_patched_load_middleware(*args, **kwargs): + # type: (Any, Any) -> Any + _import_string_should_wrap_middleware.set(True) + try: + return old_load_middleware(*args, **kwargs) + finally: + _import_string_should_wrap_middleware.set(False) + + base.BaseHandler.load_middleware = sentry_patched_load_middleware + + +def _wrap_middleware(middleware, middleware_name): + # type: (Any, str) -> Any + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + def _check_middleware_span(old_method): + # type: (Callable[..., Any]) -> Optional[Span] + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is None or not integration.middleware_spans: + return None + + function_name = transaction_from_function(old_method) + + description = middleware_name + function_basename = getattr(old_method, "__name__", None) + if function_basename: + description = "{}.{}".format(description, function_basename) + + middleware_span = sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_DJANGO, + name=description, + origin=DjangoIntegration.origin, + only_if_parent=True, + ) + middleware_span.set_tag("django.function_name", function_name) + middleware_span.set_tag("django.middleware_name", middleware_name) + + return middleware_span + + def _get_wrapped_method(old_method): + # type: (F) -> F + with capture_internal_exceptions(): + + def sentry_wrapped_method(*args, **kwargs): + # type: (*Any, **Any) -> Any + middleware_span = _check_middleware_span(old_method) + + if middleware_span is None: + return old_method(*args, **kwargs) + + with middleware_span: + return old_method(*args, **kwargs) + + try: + # fails for __call__ of function on Python 2 (see py2.7-django-1.11) + sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method) + + # Necessary for Django 3.1 + sentry_wrapped_method.__self__ = old_method.__self__ # type: ignore + except Exception: + pass + + return sentry_wrapped_method # type: ignore + + return old_method + + class SentryWrappingMiddleware( + _asgi_middleware_mixin_factory(_check_middleware_span) # type: ignore + ): + sync_capable = getattr(middleware, "sync_capable", True) + async_capable = DJANGO_SUPPORTS_ASYNC_MIDDLEWARE and getattr( + middleware, "async_capable", False + ) + + def __init__(self, get_response=None, *args, **kwargs): + # type: (Optional[Callable[..., Any]], *Any, **Any) -> None + if get_response: + self._inner = middleware(get_response, *args, **kwargs) + else: + self._inner = middleware(*args, **kwargs) + self.get_response = get_response + self._call_method = None + if self.async_capable: + super().__init__(get_response) + + # We need correct behavior for `hasattr()`, which we can only determine + # when we have an instance of the middleware we're wrapping. + def __getattr__(self, method_name): + # type: (str) -> Any + if method_name not in ( + "process_request", + "process_view", + "process_template_response", + "process_response", + "process_exception", + ): + raise AttributeError() + + old_method = getattr(self._inner, method_name) + rv = _get_wrapped_method(old_method) + self.__dict__[method_name] = rv + return rv + + def __call__(self, *args, **kwargs): + # type: (*Any, **Any) -> Any + if hasattr(self, "async_route_check") and self.async_route_check(): + return self.__acall__(*args, **kwargs) + + f = self._call_method + if f is None: + self._call_method = f = self._inner.__call__ + + middleware_span = _check_middleware_span(old_method=f) + + if middleware_span is None: + return f(*args, **kwargs) + + with middleware_span: + return f(*args, **kwargs) + + for attr in ( + "__name__", + "__module__", + "__qualname__", + ): + if hasattr(middleware, attr): + setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr)) + + return SentryWrappingMiddleware diff --git a/src/sentry_sdk_alpha/integrations/django/signals_handlers.py b/src/sentry_sdk_alpha/integrations/django/signals_handlers.py new file mode 100644 index 00000000000000..aeecc673b3088d --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/signals_handlers.py @@ -0,0 +1,93 @@ +from functools import wraps + +from django.dispatch import Signal + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations.django import DJANGO_VERSION + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any, Union + + +def _get_receiver_name(receiver): + # type: (Callable[..., Any]) -> str + name = "" + + if hasattr(receiver, "__qualname__"): + name = receiver.__qualname__ + elif hasattr(receiver, "__name__"): # Python 2.7 has no __qualname__ + name = receiver.__name__ + elif hasattr( + receiver, "func" + ): # certain functions (like partials) dont have a name + if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"): + name = "partial()" + + if ( + name == "" + ): # In case nothing was found, return the string representation (this is the slowest case) + return str(receiver) + + if hasattr(receiver, "__module__"): # prepend with module, if there is one + name = receiver.__module__ + "." + name + + return name + + +def patch_signals(): + # type: () -> None + """ + Patch django signal receivers to create a span. + + This only wraps sync receivers. Django>=5.0 introduced async receivers, but + since we don't create transactions for ASGI Django, we don't wrap them. + """ + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + old_live_receivers = Signal._live_receivers + + @wraps(old_live_receivers) + def _sentry_live_receivers(self, sender): + # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]] + if DJANGO_VERSION >= (5, 0): + sync_receivers, async_receivers = old_live_receivers(self, sender) + else: + sync_receivers = old_live_receivers(self, sender) + async_receivers = [] + + def sentry_sync_receiver_wrapper(receiver): + # type: (Callable[..., Any]) -> Callable[..., Any] + @wraps(receiver) + def wrapper(*args, **kwargs): + # type: (Any, Any) -> Any + signal_name = _get_receiver_name(receiver) + with sentry_sdk_alpha.start_span( + op=OP.EVENT_DJANGO, + name=signal_name, + origin=DjangoIntegration.origin, + only_if_parent=True, + ) as span: + span.set_attribute("signal", signal_name) + return receiver(*args, **kwargs) + + return wrapper + + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if ( + integration + and integration.signals_spans + and self not in integration.signals_denylist + ): + for idx, receiver in enumerate(sync_receivers): + sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver) + + if DJANGO_VERSION >= (5, 0): + return sync_receivers, async_receivers + else: + return sync_receivers + + Signal._live_receivers = _sentry_live_receivers diff --git a/src/sentry_sdk_alpha/integrations/django/templates.py b/src/sentry_sdk_alpha/integrations/django/templates.py new file mode 100644 index 00000000000000..4bdeffb4facf4c --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/templates.py @@ -0,0 +1,184 @@ +import functools + +from django.template import TemplateSyntaxError +from django.template.base import Origin +from django.utils.safestring import mark_safe + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.utils import ensure_integration_enabled + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Optional + from typing import Iterator + from typing import Tuple + + +def get_template_frame_from_exception(exc_value): + # type: (Optional[BaseException]) -> Optional[Dict[str, Any]] + + # As of Django 1.9 or so the new template debug thing showed up. + if hasattr(exc_value, "template_debug"): + return _get_template_frame_from_debug(exc_value.template_debug) # type: ignore + + # As of r16833 (Django) all exceptions may contain a + # ``django_template_source`` attribute (rather than the legacy + # ``TemplateSyntaxError.source`` check) + if hasattr(exc_value, "django_template_source"): + return _get_template_frame_from_source( + exc_value.django_template_source # type: ignore + ) + + if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"): + source = exc_value.source + if isinstance(source, (tuple, list)) and isinstance(source[0], Origin): + return _get_template_frame_from_source(source) # type: ignore + + return None + + +def _get_template_name_description(template_name): + # type: (str) -> str + if isinstance(template_name, (list, tuple)): + if template_name: + return "[{}, ...]".format(template_name[0]) + else: + return template_name + + +def patch_templates(): + # type: () -> None + from django.template.response import SimpleTemplateResponse + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + real_rendered_content = SimpleTemplateResponse.rendered_content + + @property # type: ignore + @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget) + def rendered_content(self): + # type: (SimpleTemplateResponse) -> str + with sentry_sdk_alpha.start_span( + op=OP.TEMPLATE_RENDER, + name=_get_template_name_description(self.template_name), + origin=DjangoIntegration.origin, + only_if_parent=True, + ) as span: + if isinstance(self.context_data, dict): + for k, v in self.context_data.items(): + span.set_attribute(f"context.{k}", v) + return real_rendered_content.fget(self) + + SimpleTemplateResponse.rendered_content = rendered_content + + import django.shortcuts + + real_render = django.shortcuts.render + + @functools.wraps(real_render) + @ensure_integration_enabled(DjangoIntegration, real_render) + def render(request, template_name, context=None, *args, **kwargs): + # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse + + # Inject trace meta tags into template context + context = context or {} + if "sentry_trace_meta" not in context: + context["sentry_trace_meta"] = mark_safe( + sentry_sdk_alpha.get_current_scope().trace_propagation_meta() + ) + + with sentry_sdk_alpha.start_span( + op=OP.TEMPLATE_RENDER, + name=_get_template_name_description(template_name), + origin=DjangoIntegration.origin, + only_if_parent=True, + ) as span: + for k, v in context.items(): + span.set_attribute(f"context.{k}", v) + return real_render(request, template_name, context, *args, **kwargs) + + django.shortcuts.render = render + + +def _get_template_frame_from_debug(debug): + # type: (Dict[str, Any]) -> Dict[str, Any] + if debug is None: + return None + + lineno = debug["line"] + filename = debug["name"] + if filename is None: + filename = "" + + pre_context = [] + post_context = [] + context_line = None + + for i, line in debug["source_lines"]: + if i < lineno: + pre_context.append(line) + elif i > lineno: + post_context.append(line) + else: + context_line = line + + return { + "filename": filename, + "lineno": lineno, + "pre_context": pre_context[-5:], + "post_context": post_context[:5], + "context_line": context_line, + "in_app": True, + } + + +def _linebreak_iter(template_source): + # type: (str) -> Iterator[int] + yield 0 + p = template_source.find("\n") + while p >= 0: + yield p + 1 + p = template_source.find("\n", p + 1) + + +def _get_template_frame_from_source(source): + # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]] + if not source: + return None + + origin, (start, end) = source + filename = getattr(origin, "loadname", None) + if filename is None: + filename = "" + template_source = origin.reload() + lineno = None + upto = 0 + pre_context = [] + post_context = [] + context_line = None + + for num, next in enumerate(_linebreak_iter(template_source)): + line = template_source[upto:next] + if start >= upto and end <= next: + lineno = num + context_line = line + elif lineno is None: + pre_context.append(line) + else: + post_context.append(line) + + upto = next + + if context_line is None or lineno is None: + return None + + return { + "filename": filename, + "lineno": lineno, + "pre_context": pre_context[-5:], + "post_context": post_context[:5], + "context_line": context_line, + } diff --git a/src/sentry_sdk_alpha/integrations/django/transactions.py b/src/sentry_sdk_alpha/integrations/django/transactions.py new file mode 100644 index 00000000000000..78b972bc375cce --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/transactions.py @@ -0,0 +1,154 @@ +""" +Copied from raven-python. + +Despite being called "legacy" in some places this resolver is very much still +in use. +""" + +import re + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from django.urls.resolvers import URLResolver + from typing import Dict + from typing import List + from typing import Optional + from django.urls.resolvers import URLPattern + from typing import Tuple + from typing import Union + from re import Pattern + +from django.urls.resolvers import RoutePattern + +try: + from django.urls import get_resolver +except ImportError: + from django.core.urlresolvers import get_resolver + + +def get_regex(resolver_or_pattern): + # type: (Union[URLPattern, URLResolver]) -> Pattern[str] + """Utility method for django's deprecated resolver.regex""" + try: + regex = resolver_or_pattern.regex + except AttributeError: + regex = resolver_or_pattern.pattern.regex + return regex + + +class RavenResolver: + _new_style_group_matcher = re.compile( + r"<(?:([^>:]+):)?([^>]+)>" + ) # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247 + _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)") + _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+") + _non_named_group_matcher = re.compile(r"\([^\)]+\)") + # [foo|bar|baz] + _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]") + _camel_re = re.compile(r"([A-Z]+)([a-z])") + + _cache = {} # type: Dict[URLPattern, str] + + def _simplify(self, pattern): + # type: (Union[URLPattern, URLResolver]) -> str + r""" + Clean up urlpattern regexes into something readable by humans: + + From: + > "^(?P\w+)/athletes/(?P\w+)/$" + + To: + > "{sport_slug}/athletes/{athlete_slug}/" + """ + # "new-style" path patterns can be parsed directly without turning them + # into regexes first + if ( + RoutePattern is not None + and hasattr(pattern, "pattern") + and isinstance(pattern.pattern, RoutePattern) + ): + return self._new_style_group_matcher.sub( + lambda m: "{%s}" % m.group(2), str(pattern.pattern._route) + ) + + result = get_regex(pattern).pattern + + # remove optional params + # TODO(dcramer): it'd be nice to change these into [%s] but it currently + # conflicts with the other rules because we're doing regexp matches + # rather than parsing tokens + result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result) + + # handle named groups first + result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result) + + # handle non-named groups + result = self._non_named_group_matcher.sub("{var}", result) + + # handle optional params + result = self._either_option_matcher.sub(lambda m: m.group(1), result) + + # clean up any outstanding regex-y characters. + result = ( + result.replace("^", "") + .replace("$", "") + .replace("?", "") + .replace("\\A", "") + .replace("\\Z", "") + .replace("//", "/") + .replace("\\", "") + ) + + return result + + def _resolve(self, resolver, path, parents=None): + # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str] + + match = get_regex(resolver).search(path) # Django < 2.0 + + if not match: + return None + + if parents is None: + parents = [resolver] + elif resolver not in parents: + parents = parents + [resolver] + + new_path = path[match.end() :] + for pattern in resolver.url_patterns: + # this is an include() + if not pattern.callback: + match_ = self._resolve(pattern, new_path, parents) + if match_: + return match_ + continue + elif not get_regex(pattern).search(new_path): + continue + + try: + return self._cache[pattern] + except KeyError: + pass + + prefix = "".join(self._simplify(p) for p in parents) + result = prefix + self._simplify(pattern) + if not result.startswith("/"): + result = "/" + result + self._cache[pattern] = result + return result + + return None + + def resolve( + self, + path, # type: str + urlconf=None, # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]] + ): + # type: (...) -> Optional[str] + resolver = get_resolver(urlconf) + match = self._resolve(resolver, path) + return match + + +LEGACY_RESOLVER = RavenResolver() diff --git a/src/sentry_sdk_alpha/integrations/django/views.py b/src/sentry_sdk_alpha/integrations/django/views.py new file mode 100644 index 00000000000000..4023f8eb90de89 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/django/views.py @@ -0,0 +1,99 @@ +import functools + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + + +try: + from asyncio import iscoroutinefunction +except ImportError: + iscoroutinefunction = None # type: ignore + + +try: + from sentry_sdk_alpha.integrations.django.asgi import wrap_async_view +except (ImportError, SyntaxError): + wrap_async_view = None # type: ignore + + +def patch_views(): + # type: () -> None + + from django.core.handlers.base import BaseHandler + from django.template.response import SimpleTemplateResponse + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + old_make_view_atomic = BaseHandler.make_view_atomic + old_render = SimpleTemplateResponse.render + + @functools.wraps(old_render) + def sentry_patched_render(self): + # type: (SimpleTemplateResponse) -> Any + with sentry_sdk_alpha.start_span( + op=OP.VIEW_RESPONSE_RENDER, + name="serialize response", + origin=DjangoIntegration.origin, + only_if_parent=True, + ): + return old_render(self) + + @functools.wraps(old_make_view_atomic) + def sentry_patched_make_view_atomic(self, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + callback = old_make_view_atomic(self, *args, **kwargs) + + # XXX: The wrapper function is created for every request. Find more + # efficient way to wrap views (or build a cache?) + + integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration) + if integration is not None and integration.middleware_spans: + is_async_view = ( + iscoroutinefunction is not None + and wrap_async_view is not None + and iscoroutinefunction(callback) + ) + if is_async_view: + sentry_wrapped_callback = wrap_async_view(callback) + else: + sentry_wrapped_callback = _wrap_sync_view(callback) + + else: + sentry_wrapped_callback = callback + + return sentry_wrapped_callback + + SimpleTemplateResponse.render = sentry_patched_render + BaseHandler.make_view_atomic = sentry_patched_make_view_atomic + + +def _wrap_sync_view(callback): + # type: (Any) -> Any + from sentry_sdk_alpha.integrations.django import DjangoIntegration + + @functools.wraps(callback) + def sentry_wrapped_callback(request, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + current_scope = sentry_sdk_alpha.get_current_scope() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + # set the active thread id to the handler thread for sync views + # this isn't necessary for async views since that runs on main + if sentry_scope.profile is not None: + sentry_scope.profile.update_active_thread_id() + + with sentry_sdk_alpha.start_span( + op=OP.VIEW_RENDER, + name=request.resolver_match.view_name, + origin=DjangoIntegration.origin, + only_if_parent=True, + ): + return callback(request, *args, **kwargs) + + return sentry_wrapped_callback diff --git a/src/sentry_sdk_alpha/integrations/dramatiq.py b/src/sentry_sdk_alpha/integrations/dramatiq.py new file mode 100644 index 00000000000000..6ee7f32fa537ae --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/dramatiq.py @@ -0,0 +1,168 @@ +import json + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + capture_internal_exceptions, + event_from_exception, +) + +from dramatiq.broker import Broker # type: ignore +from dramatiq.message import Message # type: ignore +from dramatiq.middleware import Middleware, default_middleware # type: ignore +from dramatiq.errors import Retry # type: ignore + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Dict, Optional, Union + from sentry_sdk_alpha._types import Event, Hint + + +class DramatiqIntegration(Integration): + """ + Dramatiq integration for Sentry + + Please make sure that you call `sentry_sdk.init` *before* initializing + your broker, as it monkey patches `Broker.__init__`. + + This integration was originally developed and maintained + by https://github.com/jacobsvante and later donated to the Sentry + project. + """ + + identifier = "dramatiq" + + @staticmethod + def setup_once(): + # type: () -> None + _patch_dramatiq_broker() + + +def _patch_dramatiq_broker(): + # type: () -> None + original_broker__init__ = Broker.__init__ + + def sentry_patched_broker__init__(self, *args, **kw): + # type: (Broker, *Any, **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration) + + try: + middleware = kw.pop("middleware") + except KeyError: + # Unfortunately Broker and StubBroker allows middleware to be + # passed in as positional arguments, whilst RabbitmqBroker and + # RedisBroker does not. + if len(args) == 1: + middleware = args[0] + args = [] # type: ignore + else: + middleware = None + + if middleware is None: + middleware = list(m() for m in default_middleware) + else: + middleware = list(middleware) + + if integration is not None: + middleware = [m for m in middleware if not isinstance(m, SentryMiddleware)] + middleware.insert(0, SentryMiddleware()) + + kw["middleware"] = middleware + original_broker__init__(self, *args, **kw) + + Broker.__init__ = sentry_patched_broker__init__ + + +class SentryMiddleware(Middleware): # type: ignore[misc] + """ + A Dramatiq middleware that automatically captures and sends + exceptions to Sentry. + + This is automatically added to every instantiated broker via the + DramatiqIntegration. + """ + + def before_process_message(self, broker, message): + # type: (Broker, Message) -> None + integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration) + if integration is None: + return + + message._scope_manager = sentry_sdk_alpha.new_scope() + message._scope_manager.__enter__() + + scope = sentry_sdk_alpha.get_current_scope() + scope.set_transaction_name(message.actor_name) + scope.set_extra("dramatiq_message_id", message.message_id) + scope.add_event_processor(_make_message_event_processor(message, integration)) + + def after_process_message(self, broker, message, *, result=None, exception=None): + # type: (Broker, Message, Any, Optional[Any], Optional[Exception]) -> None + integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration) + if integration is None: + return + + actor = broker.get_actor(message.actor_name) + throws = message.options.get("throws") or actor.options.get("throws") + + try: + if ( + exception is not None + and not (throws and isinstance(exception, throws)) + and not isinstance(exception, Retry) + ): + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={ + "type": DramatiqIntegration.identifier, + "handled": False, + }, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + finally: + message._scope_manager.__exit__(None, None, None) + + +def _make_message_event_processor(message, integration): + # type: (Message, DramatiqIntegration) -> Callable[[Event, Hint], Optional[Event]] + + def inner(event, hint): + # type: (Event, Hint) -> Optional[Event] + with capture_internal_exceptions(): + DramatiqMessageExtractor(message).extract_into_event(event) + + return event + + return inner + + +class DramatiqMessageExtractor: + def __init__(self, message): + # type: (Message) -> None + self.message_data = dict(message.asdict()) + + def content_length(self): + # type: () -> int + return len(json.dumps(self.message_data)) + + def extract_into_event(self, event): + # type: (Event) -> None + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return + + contexts = event.setdefault("contexts", {}) + request_info = contexts.setdefault("dramatiq", {}) + request_info["type"] = "dramatiq" + + data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]] + if not request_body_within_bounds(client, self.content_length()): + data = AnnotatedValue.removed_because_over_size_limit() + else: + data = self.message_data + + request_info["data"] = data diff --git a/src/sentry_sdk_alpha/integrations/excepthook.py b/src/sentry_sdk_alpha/integrations/excepthook.py new file mode 100644 index 00000000000000..82f02a4ea0a9cb --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/excepthook.py @@ -0,0 +1,83 @@ +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, +) +from sentry_sdk_alpha.integrations import Integration + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Callable + from typing import Any + from typing import Type + from typing import Optional + + from types import TracebackType + + Excepthook = Callable[ + [Type[BaseException], BaseException, Optional[TracebackType]], + Any, + ] + + +class ExcepthookIntegration(Integration): + identifier = "excepthook" + + always_run = False + + def __init__(self, always_run=False): + # type: (bool) -> None + + if not isinstance(always_run, bool): + raise ValueError( + "Invalid value for always_run: %s (must be type boolean)" + % (always_run,) + ) + self.always_run = always_run + + @staticmethod + def setup_once(): + # type: () -> None + sys.excepthook = _make_excepthook(sys.excepthook) + + +def _make_excepthook(old_excepthook): + # type: (Excepthook) -> Excepthook + def sentry_sdk_excepthook(type_, value, traceback): + # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None + integration = sentry_sdk_alpha.get_client().get_integration(ExcepthookIntegration) + + # Note: If we replace this with ensure_integration_enabled then + # we break the exceptiongroup backport; + # See: https://github.com/getsentry/sentry-python/issues/3097 + if integration is None: + return old_excepthook(type_, value, traceback) + + if _should_send(integration.always_run): + with capture_internal_exceptions(): + event, hint = event_from_exception( + (type_, value, traceback), + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "excepthook", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return old_excepthook(type_, value, traceback) + + return sentry_sdk_excepthook + + +def _should_send(always_run=False): + # type: (bool) -> bool + if always_run: + return True + + if hasattr(sys, "ps1"): + # Disable the excepthook for interactive Python shells, otherwise + # every typo gets sent to Sentry. + return False + + return True diff --git a/src/sentry_sdk_alpha/integrations/executing.py b/src/sentry_sdk_alpha/integrations/executing.py new file mode 100644 index 00000000000000..3208ddd03d27b6 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/executing.py @@ -0,0 +1,67 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.scope import add_global_event_processor +from sentry_sdk_alpha.utils import walk_exception_chain, iter_stacks + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + from sentry_sdk_alpha._types import Event, Hint + +try: + import executing +except ImportError: + raise DidNotEnable("executing is not installed") + + +class ExecutingIntegration(Integration): + identifier = "executing" + + @staticmethod + def setup_once(): + # type: () -> None + + @add_global_event_processor + def add_executing_info(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if sentry_sdk_alpha.get_client().get_integration(ExecutingIntegration) is None: + return event + + if hint is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception, (_exc_type, _exc_value, exc_tb) in zip( + reversed(values), walk_exception_chain(exc_info) + ): + sentry_frames = [ + frame + for frame in exception.get("stacktrace", {}).get("frames", []) + if frame.get("function") + ] + tbs = list(iter_stacks(exc_tb)) + if len(sentry_frames) != len(tbs): + continue + + for sentry_frame, tb in zip(sentry_frames, tbs): + frame = tb.tb_frame + source = executing.Source.for_frame(frame) + sentry_frame["function"] = source.code_qualname(frame.f_code) + + return event diff --git a/src/sentry_sdk_alpha/integrations/falcon.py b/src/sentry_sdk_alpha/integrations/falcon.py new file mode 100644 index 00000000000000..953b30fc41886e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/falcon.py @@ -0,0 +1,254 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + parse_version, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Optional + + from sentry_sdk_alpha._types import Event, EventProcessor + + +try: + import falcon # type: ignore + + from falcon import __version__ as FALCON_VERSION +except ImportError: + raise DidNotEnable("Falcon not installed") + +import falcon.app_helpers # type: ignore + +falcon_helpers = falcon.app_helpers +falcon_app_class = falcon.App + + +_FALCON_UNSET = None # type: Optional[object] +with capture_internal_exceptions(): + from falcon.request import _UNSET as _FALCON_UNSET # type: ignore[import-not-found, no-redef] + + +class FalconRequestExtractor(RequestExtractor): + def env(self): + # type: () -> Dict[str, Any] + return self.request.env + + def cookies(self): + # type: () -> Dict[str, Any] + return self.request.cookies + + def form(self): + # type: () -> None + return None # No such concept in Falcon + + def files(self): + # type: () -> None + return None # No such concept in Falcon + + def raw_data(self): + # type: () -> Optional[str] + + # As request data can only be read once we won't make this available + # to Sentry. Just send back a dummy string in case there was a + # content length. + # TODO(jmagnusson): Figure out if there's a way to support this + content_length = self.content_length() + if content_length > 0: + return "[REQUEST_CONTAINING_RAW_DATA]" + else: + return None + + def json(self): + # type: () -> Optional[Dict[str, Any]] + # fallback to cached_media = None if self.request._media is not available + cached_media = None + with capture_internal_exceptions(): + # self.request._media is the cached self.request.media + # value. It is only available if self.request.media + # has already been accessed. Therefore, reading + # self.request._media will not exhaust the raw request + # stream (self.request.bounded_stream) because it has + # already been read if self.request._media is set. + cached_media = self.request._media + + if cached_media is not _FALCON_UNSET: + return cached_media + + return None + + +class SentryFalconMiddleware: + """Captures exceptions in Falcon requests and send to Sentry""" + + def process_request(self, req, resp, *args, **kwargs): + # type: (Any, Any, *Any, **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration) + if integration is None: + return + + scope = sentry_sdk_alpha.get_isolation_scope() + scope._name = "falcon" + scope.add_event_processor(_make_request_event_processor(req, integration)) + + +TRANSACTION_STYLE_VALUES = ("uri_template", "path") + + +class FalconIntegration(Integration): + identifier = "falcon" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__(self, transaction_style="uri_template"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + + @staticmethod + def setup_once(): + # type: () -> None + + version = parse_version(FALCON_VERSION) + _check_minimum_version(FalconIntegration, version) + + _patch_wsgi_app() + _patch_handle_exception() + _patch_prepare_middleware() + + +def _patch_wsgi_app(): + # type: () -> None + original_wsgi_app = falcon_app_class.__call__ + + def sentry_patched_wsgi_app(self, env, start_response): + # type: (falcon.API, Any, Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration) + if integration is None: + return original_wsgi_app(self, env, start_response) + + sentry_wrapped = SentryWsgiMiddleware( + lambda envi, start_resp: original_wsgi_app(self, envi, start_resp), + span_origin=FalconIntegration.origin, + ) + + return sentry_wrapped(env, start_response) + + falcon_app_class.__call__ = sentry_patched_wsgi_app + + +def _patch_handle_exception(): + # type: () -> None + original_handle_exception = falcon_app_class._handle_exception + + @ensure_integration_enabled(FalconIntegration, original_handle_exception) + def sentry_patched_handle_exception(self, *args): + # type: (falcon.API, *Any) -> Any + # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception + # method signature from `(ex, req, resp, params)` to + # `(req, resp, ex, params)` + ex = response = None + with capture_internal_exceptions(): + ex = next(argument for argument in args if isinstance(argument, Exception)) + response = next( + argument for argument in args if isinstance(argument, falcon.Response) + ) + + was_handled = original_handle_exception(self, *args) + + if ex is None or response is None: + # Both ex and response should have a non-None value at this point; otherwise, + # there is an error with the SDK that will have been captured in the + # capture_internal_exceptions block above. + return was_handled + + if _exception_leads_to_http_5xx(ex, response): + event, hint = event_from_exception( + ex, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "falcon", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return was_handled + + falcon_app_class._handle_exception = sentry_patched_handle_exception + + +def _patch_prepare_middleware(): + # type: () -> None + original_prepare_middleware = falcon_helpers.prepare_middleware + + def sentry_patched_prepare_middleware( + middleware=None, independent_middleware=False, asgi=False + ): + # type: (Any, Any, bool) -> Any + if asgi: + # We don't support ASGI Falcon apps, so we don't patch anything here + return original_prepare_middleware(middleware, independent_middleware, asgi) + + integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration) + if integration is not None: + middleware = [SentryFalconMiddleware()] + (middleware or []) + + # We intentionally omit the asgi argument here, since the default is False anyways, + # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions. + return original_prepare_middleware(middleware, independent_middleware) + + falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware + + +def _exception_leads_to_http_5xx(ex, response): + # type: (Exception, falcon.Response) -> bool + is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith( + "5" + ) + is_unhandled_error = not isinstance( + ex, (falcon.HTTPError, falcon.http_status.HTTPStatus) + ) + + return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response) + + +def _has_http_5xx_status(response): + # type: (falcon.Response) -> bool + return response.status.startswith("5") + + +def _set_transaction_name_and_source(event, transaction_style, request): + # type: (Event, str, falcon.Request) -> None + name_for_style = { + "uri_template": request.uri_template, + "path": request.path, + } + event["transaction"] = name_for_style[transaction_style] + event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]} + + +def _make_request_event_processor(req, integration): + # type: (falcon.Request, FalconIntegration) -> EventProcessor + + def event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + _set_transaction_name_and_source(event, integration.transaction_style, req) + + with capture_internal_exceptions(): + FalconRequestExtractor(req).extract_into_event(event) + + return event + + return event_processor diff --git a/src/sentry_sdk_alpha/integrations/fastapi.py b/src/sentry_sdk_alpha/integrations/fastapi.py new file mode 100644 index 00000000000000..3dcfe22da725f4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/fastapi.py @@ -0,0 +1,147 @@ +import asyncio +from copy import deepcopy +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE, TransactionSource +from sentry_sdk_alpha.integrations import DidNotEnable +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + transaction_from_function, + logger, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Dict + from sentry_sdk_alpha._types import Event + +try: + from sentry_sdk_alpha.integrations.starlette import ( + StarletteIntegration, + StarletteRequestExtractor, + ) +except DidNotEnable: + raise DidNotEnable("Starlette is not installed") + +try: + import fastapi # type: ignore +except ImportError: + raise DidNotEnable("FastAPI is not installed") + + +_DEFAULT_TRANSACTION_NAME = "generic FastAPI request" + + +class FastApiIntegration(StarletteIntegration): + identifier = "fastapi" + + @staticmethod + def setup_once(): + # type: () -> None + patch_get_request_handler() + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, Any) -> None + name = "" + + if transaction_style == "endpoint": + endpoint = request.scope.get("endpoint") + if endpoint: + name = transaction_from_function(endpoint) or "" + + elif transaction_style == "url": + route = request.scope.get("route") + if route: + path = getattr(route, "path", None) + if path is not None: + name = path + + if not name: + name = _DEFAULT_TRANSACTION_NAME + source = TransactionSource.ROUTE + else: + source = SOURCE_FOR_STYLE[transaction_style] + + scope.set_transaction_name(name, source=source) + logger.debug( + "[FastAPI] Set transaction name and source on scope: %s / %s", name, source + ) + + +def patch_get_request_handler(): + # type: () -> None + old_get_request_handler = fastapi.routing.get_request_handler + + def _sentry_get_request_handler(*args, **kwargs): + # type: (*Any, **Any) -> Any + dependant = kwargs.get("dependant") + if ( + dependant + and dependant.call is not None + and not asyncio.iscoroutinefunction(dependant.call) + ): + old_call = dependant.call + + @wraps(old_call) + def _sentry_call(*args, **kwargs): + # type: (*Any, **Any) -> Any + current_scope = sentry_sdk_alpha.get_current_scope() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + if sentry_scope.profile is not None: + sentry_scope.profile.update_active_thread_id() + + return old_call(*args, **kwargs) + + dependant.call = _sentry_call + + old_app = old_get_request_handler(*args, **kwargs) + + async def _sentry_app(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(FastApiIntegration) + if integration is None: + return await old_app(*args, **kwargs) + + request = args[0] + + _set_transaction_name_and_source( + sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request + ) + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + extractor = StarletteRequestExtractor(request) + info = await extractor.extract_request_info() + + def _make_request_event_processor(req, integration): + # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event] + def event_processor(event, hint): + # type: (Event, Dict[str, Any]) -> Event + + # Extract information from request + request_info = event.get("request", {}) + if info: + if "cookies" in info and should_send_default_pii(): + request_info["cookies"] = info["cookies"] + if "data" in info: + request_info["data"] = info["data"] + event["request"] = deepcopy(request_info) + + return event + + return event_processor + + sentry_scope._name = FastApiIntegration.identifier + sentry_scope.add_event_processor( + _make_request_event_processor(request, integration) + ) + + return await old_app(*args, **kwargs) + + return _sentry_app + + fastapi.routing.get_request_handler = _sentry_get_request_handler diff --git a/src/sentry_sdk_alpha/integrations/flask.py b/src/sentry_sdk_alpha/integrations/flask.py new file mode 100644 index 00000000000000..05112b197bce77 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/flask.py @@ -0,0 +1,275 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.integrations._wsgi_common import ( + DEFAULT_HTTP_METHODS_TO_CAPTURE, + RequestExtractor, +) +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + package_version, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Dict, Union + + from sentry_sdk_alpha._types import Event, EventProcessor + from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse + from werkzeug.datastructures import FileStorage, ImmutableMultiDict + + +try: + import flask_login # type: ignore +except ImportError: + flask_login = None + +try: + from flask import Flask, Request # type: ignore + from flask import request as flask_request + from flask.signals import ( + before_render_template, + got_request_exception, + request_started, + ) + from markupsafe import Markup +except ImportError: + raise DidNotEnable("Flask is not installed") + +try: + import blinker # noqa +except ImportError: + raise DidNotEnable("blinker is not installed") + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class FlaskIntegration(Integration): + identifier = "flask" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__( + self, + transaction_style="endpoint", # type: str + http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] + ): + # type: (...) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) + + @staticmethod + def setup_once(): + # type: () -> None + try: + from quart import Quart # type: ignore + + if Flask == Quart: + # This is Quart masquerading as Flask, don't enable the Flask + # integration. See https://github.com/getsentry/sentry-python/issues/2709 + raise DidNotEnable( + "This is not a Flask app but rather Quart pretending to be Flask" + ) + except ImportError: + pass + + version = package_version("flask") + _check_minimum_version(FlaskIntegration, version) + + before_render_template.connect(_add_sentry_trace) + request_started.connect(_request_started) + got_request_exception.connect(_capture_exception) + + old_app = Flask.__call__ + + def sentry_patched_wsgi_app(self, environ, start_response): + # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + if sentry_sdk_alpha.get_client().get_integration(FlaskIntegration) is None: + return old_app(self, environ, start_response) + + integration = sentry_sdk_alpha.get_client().get_integration(FlaskIntegration) + + middleware = SentryWsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=FlaskIntegration.origin, + http_methods_to_capture=( + integration.http_methods_to_capture + if integration + else DEFAULT_HTTP_METHODS_TO_CAPTURE + ), + ) + return middleware(environ, start_response) + + Flask.__call__ = sentry_patched_wsgi_app + + +def _add_sentry_trace(sender, template, context, **extra): + # type: (Flask, Any, Dict[str, Any], **Any) -> None + if "sentry_trace" in context: + return + + scope = sentry_sdk_alpha.get_current_scope() + trace_meta = Markup(scope.trace_propagation_meta()) + context["sentry_trace"] = trace_meta # for backwards compatibility + context["sentry_trace_meta"] = trace_meta + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, Request) -> None + try: + name_for_style = { + "url": request.url_rule.rule, + "endpoint": request.url_rule.endpoint, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + +def _request_started(app, **kwargs): + # type: (Flask, **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(FlaskIntegration) + if integration is None: + return + + request = flask_request._get_current_object() + + # Set the transaction name and source here, + # but rely on WSGI middleware to actually start the transaction + _set_transaction_name_and_source( + sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request + ) + + scope = sentry_sdk_alpha.get_isolation_scope() + evt_processor = _make_request_event_processor(app, request, integration) + scope.add_event_processor(evt_processor) + + +class FlaskRequestExtractor(RequestExtractor): + def env(self): + # type: () -> Dict[str, str] + return self.request.environ + + def cookies(self): + # type: () -> Dict[Any, Any] + return { + k: v[0] if isinstance(v, list) and len(v) == 1 else v + for k, v in self.request.cookies.items() + } + + def raw_data(self): + # type: () -> bytes + return self.request.get_data() + + def form(self): + # type: () -> ImmutableMultiDict[str, Any] + return self.request.form + + def files(self): + # type: () -> ImmutableMultiDict[str, Any] + return self.request.files + + def is_json(self): + # type: () -> bool + return self.request.is_json + + def json(self): + # type: () -> Any + return self.request.get_json(silent=True) + + def size_of_file(self, file): + # type: (FileStorage) -> int + return file.content_length + + +def _make_request_event_processor(app, request, integration): + # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor + + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + + # if the request is gone we are fine not logging the data from + # it. This might happen if the processor is pushed away to + # another thread. + if request is None: + return event + + with capture_internal_exceptions(): + FlaskRequestExtractor(request).extract_into_event(event) + + if should_send_default_pii(): + with capture_internal_exceptions(): + _add_user_to_event(event) + + return event + + return inner + + +@ensure_integration_enabled(FlaskIntegration) +def _capture_exception(sender, exception, **kwargs): + # type: (Flask, Union[ValueError, BaseException], **Any) -> None + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "flask", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _add_user_to_event(event): + # type: (Event) -> None + if flask_login is None: + return + + user = flask_login.current_user + if user is None: + return + + with capture_internal_exceptions(): + # Access this object as late as possible as accessing the user + # is relatively costly + + user_info = event.setdefault("user", {}) + + try: + user_info.setdefault("id", user.get_id()) + # TODO: more configurable user attrs here + except AttributeError: + # might happen if: + # - flask_login could not be imported + # - flask_login is not configured + # - no user is logged in + pass + + # The following attribute accesses are ineffective for the general + # Flask-Login case, because the User interface of Flask-Login does not + # care about anything but the ID. However, Flask-User (based on + # Flask-Login) documents a few optional extra attributes. + # + # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names + + try: + user_info.setdefault("email", user.email) + except Exception: + pass + + try: + user_info.setdefault("username", user.username) + except Exception: + pass diff --git a/src/sentry_sdk_alpha/integrations/gcp.py b/src/sentry_sdk_alpha/integrations/gcp.py new file mode 100644 index 00000000000000..7accee3e78d7ba --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/gcp.py @@ -0,0 +1,259 @@ +import functools +import sys +from copy import deepcopy +from datetime import datetime, timedelta, timezone +from os import environ + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.integrations._wsgi_common import ( + _filter_headers, + _request_headers_to_span_attributes, +) +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + capture_internal_exceptions, + event_from_exception, + logger, + TimeoutThread, + reraise, +) + +from typing import TYPE_CHECKING + +# Constants +TIMEOUT_WARNING_BUFFER = 1.5 # Buffer time required to send timeout warning to Sentry +MILLIS_TO_SECONDS = 1000.0 + +if TYPE_CHECKING: + from typing import Any + from typing import TypeVar + from typing import Callable + from typing import Optional + + from sentry_sdk_alpha._types import EventProcessor, Event, Hint + + F = TypeVar("F", bound=Callable[..., Any]) + + +def _wrap_func(func): + # type: (F) -> F + @functools.wraps(func) + def sentry_func(functionhandler, gcp_event, *args, **kwargs): + # type: (Any, Any, *Any, **Any) -> Any + client = sentry_sdk_alpha.get_client() + + integration = client.get_integration(GcpIntegration) + if integration is None: + return func(functionhandler, gcp_event, *args, **kwargs) + + configured_time = environ.get("FUNCTION_TIMEOUT_SEC") + if not configured_time: + logger.debug( + "The configured timeout could not be fetched from Cloud Functions configuration." + ) + return func(functionhandler, gcp_event, *args, **kwargs) + + configured_time = int(configured_time) + + initial_time = datetime.now(timezone.utc) + + with sentry_sdk_alpha.isolation_scope() as scope: + with capture_internal_exceptions(): + scope.clear_breadcrumbs() + scope.add_event_processor( + _make_request_event_processor( + gcp_event, configured_time, initial_time + ) + ) + scope.set_tag("gcp_region", environ.get("FUNCTION_REGION")) + timeout_thread = None + if ( + integration.timeout_warning + and configured_time > TIMEOUT_WARNING_BUFFER + ): + waiting_time = configured_time - TIMEOUT_WARNING_BUFFER + + timeout_thread = TimeoutThread(waiting_time, configured_time) + + # Starting the thread to raise timeout warning exception + timeout_thread.start() + + headers = {} + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers + + with sentry_sdk_alpha.continue_trace(headers): + with sentry_sdk_alpha.start_span( + op=OP.FUNCTION_GCP, + name=environ.get("FUNCTION_NAME", ""), + source=TransactionSource.COMPONENT, + origin=GcpIntegration.origin, + attributes=_prepopulate_attributes(gcp_event), + ): + try: + return func(functionhandler, gcp_event, *args, **kwargs) + except Exception: + exc_info = sys.exc_info() + sentry_event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "gcp", "handled": False}, + ) + sentry_sdk_alpha.capture_event(sentry_event, hint=hint) + reraise(*exc_info) + finally: + if timeout_thread: + timeout_thread.stop() + # Flush out the event queue + client.flush() + + return sentry_func # type: ignore + + +class GcpIntegration(Integration): + identifier = "gcp" + origin = f"auto.function.{identifier}" + + def __init__(self, timeout_warning=False): + # type: (bool) -> None + self.timeout_warning = timeout_warning + + @staticmethod + def setup_once(): + # type: () -> None + import __main__ as gcp_functions + + if not hasattr(gcp_functions, "worker_v1"): + logger.warning( + "GcpIntegration currently supports only Python 3.7 runtime environment." + ) + return + + worker1 = gcp_functions.worker_v1 + + worker1.FunctionHandler.invoke_user_function = _wrap_func( + worker1.FunctionHandler.invoke_user_function + ) + + +def _make_request_event_processor(gcp_event, configured_timeout, initial_time): + # type: (Any, Any, Any) -> EventProcessor + + def event_processor(event, hint): + # type: (Event, Hint) -> Optional[Event] + + final_time = datetime.now(timezone.utc) + time_diff = final_time - initial_time + + execution_duration_in_millis = time_diff / timedelta(milliseconds=1) + + extra = event.setdefault("extra", {}) + extra["google cloud functions"] = { + "function_name": environ.get("FUNCTION_NAME"), + "function_entry_point": environ.get("ENTRY_POINT"), + "function_identity": environ.get("FUNCTION_IDENTITY"), + "function_region": environ.get("FUNCTION_REGION"), + "function_project": environ.get("GCP_PROJECT"), + "execution_duration_in_millis": execution_duration_in_millis, + "configured_timeout_in_seconds": configured_timeout, + } + + extra["google cloud logs"] = { + "url": _get_google_cloud_logs_url(final_time), + } + + request = event.get("request", {}) + + request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME")) + + if hasattr(gcp_event, "method"): + request["method"] = gcp_event.method + + if hasattr(gcp_event, "query_string"): + request["query_string"] = gcp_event.query_string.decode("utf-8") + + if hasattr(gcp_event, "headers"): + request["headers"] = _filter_headers(gcp_event.headers) + + if should_send_default_pii(): + if hasattr(gcp_event, "data"): + request["data"] = gcp_event.data + else: + if hasattr(gcp_event, "data"): + # Unfortunately couldn't find a way to get structured body from GCP + # event. Meaning every body is unstructured to us. + request["data"] = AnnotatedValue.removed_because_raw_data() + + event["request"] = deepcopy(request) + + return event + + return event_processor + + +def _get_google_cloud_logs_url(final_time): + # type: (datetime) -> str + """ + Generates a Google Cloud Logs console URL based on the environment variables + Arguments: + final_time {datetime} -- Final time + Returns: + str -- Google Cloud Logs Console URL to logs. + """ + hour_ago = final_time - timedelta(hours=1) + formatstring = "%Y-%m-%dT%H:%M:%SZ" + + url = ( + "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function" + "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false" + "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true" + "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}" + "&interval=PT1H&scrollTimestamp={timestamp_end}" + ).format( + project=environ.get("GCP_PROJECT"), + function_name=environ.get("FUNCTION_NAME"), + region=environ.get("FUNCTION_REGION"), + timestamp_end=final_time.strftime(formatstring), + timestamp_start=hour_ago.strftime(formatstring), + ) + + return url + + +ENV_TO_ATTRIBUTE = { + "FUNCTION_NAME": "faas.name", + "ENTRY_POINT": "gcp.function.entry_point", + "FUNCTION_IDENTITY": "gcp.function.identity", + "FUNCTION_REGION": "faas.region", + "GCP_PROJECT": "gcp.function.project", +} + +EVENT_TO_ATTRIBUTE = { + "method": "http.request.method", + "query_string": "url.query", +} + + +def _prepopulate_attributes(gcp_event): + # type: (Any) -> dict[str, Any] + attributes = { + "cloud.provider": "gcp", + } + + for key, attr in ENV_TO_ATTRIBUTE.items(): + if environ.get(key): + attributes[attr] = environ[key] + + for key, attr in EVENT_TO_ATTRIBUTE.items(): + if getattr(gcp_event, key, None): + attributes[attr] = getattr(gcp_event, key) + + if hasattr(gcp_event, "headers"): + headers = gcp_event.headers + attributes.update(_request_headers_to_span_attributes(headers)) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/gnu_backtrace.py b/src/sentry_sdk_alpha/integrations/gnu_backtrace.py new file mode 100644 index 00000000000000..cc6f8cb76ce275 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/gnu_backtrace.py @@ -0,0 +1,107 @@ +import re + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import add_global_event_processor +from sentry_sdk_alpha.utils import capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from sentry_sdk_alpha._types import Event + + +MODULE_RE = r"[a-zA-Z0-9/._:\\-]+" +TYPE_RE = r"[a-zA-Z0-9._:<>,-]+" +HEXVAL_RE = r"[A-Fa-f0-9]+" + + +FRAME_RE = r""" +^(?P\d+)\.\s +(?P{MODULE_RE})\( + (?P{TYPE_RE}\ )? + ((?P{TYPE_RE}) + (?P\(.*\))? + )? + ((?P\ const)?\+0x(?P{HEXVAL_RE}))? +\)\s +\[0x(?P{HEXVAL_RE})\]$ +""".format( + MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE +) + +FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE) + + +class GnuBacktraceIntegration(Integration): + identifier = "gnu_backtrace" + + @staticmethod + def setup_once(): + # type: () -> None + @add_global_event_processor + def process_gnu_backtrace(event, hint): + # type: (Event, dict[str, Any]) -> Event + with capture_internal_exceptions(): + return _process_gnu_backtrace(event, hint) + + +def _process_gnu_backtrace(event, hint): + # type: (Event, dict[str, Any]) -> Event + if sentry_sdk_alpha.get_client().get_integration(GnuBacktraceIntegration) is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception in values: + frames = exception.get("stacktrace", {}).get("frames", []) + if not frames: + continue + + msg = exception.get("value", None) + if not msg: + continue + + additional_frames = [] + new_msg = [] + + for line in msg.splitlines(): + match = FRAME_RE.match(line) + if match: + additional_frames.append( + ( + int(match.group("index")), + { + "package": match.group("package") or None, + "function": match.group("function") or None, + "platform": "native", + }, + ) + ) + else: + # Put garbage lines back into message, not sure what to do with them. + new_msg.append(line) + + if additional_frames: + additional_frames.sort(key=lambda x: -x[0]) + for _, frame in additional_frames: + frames.append(frame) + + new_msg.append("") + exception["value"] = "\n".join(new_msg) + + return event diff --git a/src/sentry_sdk_alpha/integrations/gql.py b/src/sentry_sdk_alpha/integrations/gql.py new file mode 100644 index 00000000000000..dd00900e613174 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/gql.py @@ -0,0 +1,145 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import ( + event_from_exception, + ensure_integration_enabled, + parse_version, +) + +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii + +try: + import gql # type: ignore[import-not-found] + from graphql import ( + print_ast, + get_operation_ast, + DocumentNode, + VariableDefinitionNode, + ) + from gql.transport import Transport, AsyncTransport # type: ignore[import-not-found] + from gql.transport.exceptions import TransportQueryError # type: ignore[import-not-found] +except ImportError: + raise DidNotEnable("gql is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Dict, Tuple, Union + from sentry_sdk_alpha._types import Event, EventProcessor + + EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]] + + +class GQLIntegration(Integration): + identifier = "gql" + + @staticmethod + def setup_once(): + # type: () -> None + gql_version = parse_version(gql.__version__) + _check_minimum_version(GQLIntegration, gql_version) + + _patch_execute() + + +def _data_from_document(document): + # type: (DocumentNode) -> EventDataType + try: + operation_ast = get_operation_ast(document) + data = {"query": print_ast(document)} # type: EventDataType + + if operation_ast is not None: + data["variables"] = operation_ast.variable_definitions + if operation_ast.name is not None: + data["operationName"] = operation_ast.name.value + + return data + except (AttributeError, TypeError): + return dict() + + +def _transport_method(transport): + # type: (Union[Transport, AsyncTransport]) -> str + """ + The RequestsHTTPTransport allows defining the HTTP method; all + other transports use POST. + """ + try: + return transport.method + except AttributeError: + return "POST" + + +def _request_info_from_transport(transport): + # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str] + if transport is None: + return {} + + request_info = { + "method": _transport_method(transport), + } + + try: + request_info["url"] = transport.url + except AttributeError: + pass + + return request_info + + +def _patch_execute(): + # type: () -> None + real_execute = gql.Client.execute + + @ensure_integration_enabled(GQLIntegration, real_execute) + def sentry_patched_execute(self, document, *args, **kwargs): + # type: (gql.Client, DocumentNode, Any, Any) -> Any + scope = sentry_sdk_alpha.get_isolation_scope() + scope.add_event_processor(_make_gql_event_processor(self, document)) + + try: + return real_execute(self, document, *args, **kwargs) + except TransportQueryError as e: + event, hint = event_from_exception( + e, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "gql", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint) + raise e + + gql.Client.execute = sentry_patched_execute + + +def _make_gql_event_processor(client, document): + # type: (gql.Client, DocumentNode) -> EventProcessor + def processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + try: + errors = hint["exc_info"][1].errors + except (AttributeError, KeyError): + errors = None + + request = event.setdefault("request", {}) + request.update( + { + "api_target": "graphql", + **_request_info_from_transport(client.transport), + } + ) + + if should_send_default_pii(): + request["data"] = _data_from_document(document) + contexts = event.setdefault("contexts", {}) + response = contexts.setdefault("response", {}) + response.update( + { + "data": {"errors": errors}, + "type": response, + } + ) + + return event + + return processor diff --git a/src/sentry_sdk_alpha/integrations/graphene.py b/src/sentry_sdk_alpha/integrations/graphene.py new file mode 100644 index 00000000000000..4bbb4005d718f9 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/graphene.py @@ -0,0 +1,144 @@ +from contextlib import contextmanager + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + package_version, +) + +try: + from graphene.types import schema as graphene_schema # type: ignore +except ImportError: + raise DidNotEnable("graphene is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Generator + from typing import Any, Dict, Union + from graphene.language.source import Source # type: ignore + from graphql.execution import ExecutionResult + from graphql.type import GraphQLSchema + from sentry_sdk_alpha._types import Event + + +class GrapheneIntegration(Integration): + identifier = "graphene" + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("graphene") + _check_minimum_version(GrapheneIntegration, version) + + _patch_graphql() + + +def _patch_graphql(): + # type: () -> None + old_graphql_sync = graphene_schema.graphql_sync + old_graphql_async = graphene_schema.graphql + + @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync) + def _sentry_patched_graphql_sync(schema, source, *args, **kwargs): + # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + scope = sentry_sdk_alpha.get_isolation_scope() + scope.add_event_processor(_event_processor) + + with graphql_span(schema, source, kwargs): + result = old_graphql_sync(schema, source, *args, **kwargs) + + with capture_internal_exceptions(): + client = sentry_sdk_alpha.get_client() + for error in result.errors or []: + event, hint = event_from_exception( + error, + client_options=client.options, + mechanism={ + "type": GrapheneIntegration.identifier, + "handled": False, + }, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return result + + async def _sentry_patched_graphql_async(schema, source, *args, **kwargs): + # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult + integration = sentry_sdk_alpha.get_client().get_integration(GrapheneIntegration) + if integration is None: + return await old_graphql_async(schema, source, *args, **kwargs) + + scope = sentry_sdk_alpha.get_isolation_scope() + scope.add_event_processor(_event_processor) + + with graphql_span(schema, source, kwargs): + result = await old_graphql_async(schema, source, *args, **kwargs) + + with capture_internal_exceptions(): + client = sentry_sdk_alpha.get_client() + for error in result.errors or []: + event, hint = event_from_exception( + error, + client_options=client.options, + mechanism={ + "type": GrapheneIntegration.identifier, + "handled": False, + }, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return result + + graphene_schema.graphql_sync = _sentry_patched_graphql_sync + graphene_schema.graphql = _sentry_patched_graphql_async + + +def _event_processor(event, hint): + # type: (Event, Dict[str, Any]) -> Event + if should_send_default_pii(): + request_info = event.setdefault("request", {}) + request_info["api_target"] = "graphql" + + elif event.get("request", {}).get("data"): + del event["request"]["data"] + + return event + + +@contextmanager +def graphql_span(schema, source, kwargs): + # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None] + operation_name = kwargs.get("operation_name") + + operation_type = "query" + op = OP.GRAPHQL_QUERY + if source.strip().startswith("mutation"): + operation_type = "mutation" + op = OP.GRAPHQL_MUTATION + elif source.strip().startswith("subscription"): + operation_type = "subscription" + op = OP.GRAPHQL_SUBSCRIPTION + + sentry_sdk_alpha.add_breadcrumb( + crumb={ + "data": { + "operation_name": operation_name, + "operation_type": operation_type, + }, + "category": "graphql.operation", + }, + ) + + with sentry_sdk_alpha.start_span( + op=op, name=operation_name, only_if_parent=True + ) as graphql_span: + graphql_span.set_attribute("graphql.document", source) + graphql_span.set_attribute("graphql.operation.name", operation_name) + graphql_span.set_attribute("graphql.operation.type", operation_type) + yield diff --git a/src/sentry_sdk_alpha/integrations/grpc/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/__init__.py new file mode 100644 index 00000000000000..583bf71f80219b --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/__init__.py @@ -0,0 +1,151 @@ +from functools import wraps + +import grpc +from grpc import Channel, Server, intercept_channel +from grpc.aio import Channel as AsyncChannel +from grpc.aio import Server as AsyncServer + +from sentry_sdk_alpha.integrations import Integration + +from .client import ClientInterceptor +from .server import ServerInterceptor +from .aio.server import ServerInterceptor as AsyncServerInterceptor +from .aio.client import ( + SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor, +) +from .aio.client import ( + SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor, +) + +from typing import TYPE_CHECKING, Any, Optional, Sequence + +# Hack to get new Python features working in older versions +# without introducing a hard dependency on `typing_extensions` +# from: https://stackoverflow.com/a/71944042/300572 +if TYPE_CHECKING: + from typing import ParamSpec, Callable +else: + # Fake ParamSpec + class ParamSpec: + def __init__(self, _): + self.args = None + self.kwargs = None + + # Callable[anything] will return None + class _Callable: + def __getitem__(self, _): + return None + + # Make instances + Callable = _Callable() + +P = ParamSpec("P") + + +def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]: + "Wrapper for synchronous secure and insecure channel." + + @wraps(func) + def patched_channel(*args: Any, **kwargs: Any) -> Channel: + channel = func(*args, **kwargs) + if not ClientInterceptor._is_intercepted: + ClientInterceptor._is_intercepted = True + return intercept_channel(channel, ClientInterceptor()) + else: + return channel + + return patched_channel + + +def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]: + @wraps(func) + def patched_intercept_channel( + channel: Channel, *interceptors: grpc.ServerInterceptor + ) -> Channel: + if ClientInterceptor._is_intercepted: + interceptors = tuple( + [ + interceptor + for interceptor in interceptors + if not isinstance(interceptor, ClientInterceptor) + ] + ) + else: + interceptors = interceptors + return intercept_channel(channel, *interceptors) + + return patched_intercept_channel # type: ignore + + +def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]: + "Wrapper for asynchronous secure and insecure channel." + + @wraps(func) + def patched_channel( # type: ignore + *args: P.args, + interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None, + **kwargs: P.kwargs, + ) -> Channel: + sentry_interceptors = [ + AsyncUnaryUnaryClientInterceptor(), + AsyncUnaryStreamClientIntercetor(), + ] + interceptors = [*sentry_interceptors, *(interceptors or [])] + return func(*args, interceptors=interceptors, **kwargs) # type: ignore + + return patched_channel # type: ignore + + +def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]: + """Wrapper for synchronous server.""" + + @wraps(func) + def patched_server( # type: ignore + *args: P.args, + interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, + **kwargs: P.kwargs, + ) -> Server: + interceptors = [ + interceptor + for interceptor in interceptors or [] + if not isinstance(interceptor, ServerInterceptor) + ] + server_interceptor = ServerInterceptor() + interceptors = [server_interceptor, *(interceptors or [])] + return func(*args, interceptors=interceptors, **kwargs) # type: ignore + + return patched_server # type: ignore + + +def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]: + """Wrapper for asynchronous server.""" + + @wraps(func) + def patched_aio_server( # type: ignore + *args: P.args, + interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None, + **kwargs: P.kwargs, + ) -> Server: + server_interceptor = AsyncServerInterceptor() + interceptors = (server_interceptor, *(interceptors or [])) + return func(*args, interceptors=interceptors, **kwargs) # type: ignore + + return patched_aio_server # type: ignore + + +class GRPCIntegration(Integration): + identifier = "grpc" + + @staticmethod + def setup_once() -> None: + import grpc + + grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel) + grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel) + grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel) + + grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel) + grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel) + + grpc.server = _wrap_sync_server(grpc.server) + grpc.aio.server = _wrap_async_server(grpc.aio.server) diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py new file mode 100644 index 00000000000000..5b9e3b99495673 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py @@ -0,0 +1,7 @@ +from .server import ServerInterceptor +from .client import ClientInterceptor + +__all__ = [ + "ClientInterceptor", + "ServerInterceptor", +] diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/client.py b/src/sentry_sdk_alpha/integrations/grpc/aio/client.py new file mode 100644 index 00000000000000..47c2084a3e5316 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/aio/client.py @@ -0,0 +1,100 @@ +from typing import Callable, Union, AsyncIterable, Any + +from grpc.aio import ( + UnaryUnaryClientInterceptor, + UnaryStreamClientInterceptor, + ClientCallDetails, + UnaryUnaryCall, + UnaryStreamCall, + Metadata, +) +from google.protobuf.message import Message + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN + + +class ClientInterceptor: + @staticmethod + def _update_client_call_details_metadata_from_scope( + client_call_details: ClientCallDetails, + ) -> ClientCallDetails: + if client_call_details.metadata is None: + client_call_details = client_call_details._replace(metadata=Metadata()) + elif not isinstance(client_call_details.metadata, Metadata): + # This is a workaround for a GRPC bug, which was fixed in grpcio v1.60.0 + # See https://github.com/grpc/grpc/issues/34298. + client_call_details = client_call_details._replace( + metadata=Metadata.from_tuple(client_call_details.metadata) + ) + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(): + client_call_details.metadata.add(key, value) + return client_call_details + + +class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor): # type: ignore + async def intercept_unary_unary( + self, + continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall], + client_call_details: ClientCallDetails, + request: Message, + ) -> Union[UnaryUnaryCall, Message]: + method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() + + with sentry_sdk_alpha.start_span( + op=OP.GRPC_CLIENT, + name="unary unary call to %s" % method, + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) + + client_call_details = self._update_client_call_details_metadata_from_scope( + client_call_details + ) + + response = await continuation(client_call_details, request) + status_code = await response.code() + span.set_attribute("code", status_code.name) + + return response + + +class SentryUnaryStreamClientInterceptor( + ClientInterceptor, UnaryStreamClientInterceptor # type: ignore +): + async def intercept_unary_stream( + self, + continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall], + client_call_details: ClientCallDetails, + request: Message, + ) -> Union[AsyncIterable[Any], UnaryStreamCall]: + method = client_call_details.method + if isinstance(method, bytes): + method = method.decode() + + with sentry_sdk_alpha.start_span( + op=OP.GRPC_CLIENT, + name="unary stream call to %s" % method, + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) + + client_call_details = self._update_client_call_details_metadata_from_scope( + client_call_details + ) + + response = await continuation(client_call_details, request) + # status_code = await response.code() + # span.set_attribute("code", status_code) + + return response diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/server.py b/src/sentry_sdk_alpha/integrations/grpc/aio/server.py new file mode 100644 index 00000000000000..4dc06777ba3a12 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/aio/server.py @@ -0,0 +1,98 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import DidNotEnable +from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import event_from_exception + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Awaitable, Callable + from typing import Any, Optional + + +try: + import grpc + from grpc import HandlerCallDetails, RpcMethodHandler + from grpc.aio import AbortError, ServicerContext +except ImportError: + raise DidNotEnable("grpcio is not installed") + + +class ServerInterceptor(grpc.aio.ServerInterceptor): # type: ignore + def __init__(self, find_name=None): + # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None + self._find_method_name = find_name or self._find_name + + super().__init__() + + async def intercept_service(self, continuation, handler_call_details): + # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Optional[Awaitable[RpcMethodHandler]] + self._handler_call_details = handler_call_details + handler = await continuation(handler_call_details) + if handler is None: + return None + + if not handler.request_streaming and not handler.response_streaming: + handler_factory = grpc.unary_unary_rpc_method_handler + + async def wrapped(request, context): + # type: (Any, ServicerContext) -> Any + name = self._find_method_name(context) + if not name: + return await handler(request, context) + + # What if the headers are empty? + with sentry_sdk_alpha.continue_trace(dict(context.invocation_metadata())): + with sentry_sdk_alpha.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return await handler.unary_unary(request, context) + except AbortError: + raise + except Exception as exc: + event, hint = event_from_exception( + exc, + mechanism={"type": "grpc", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + raise + + elif not handler.request_streaming and handler.response_streaming: + handler_factory = grpc.unary_stream_rpc_method_handler + + async def wrapped(request, context): # type: ignore + # type: (Any, ServicerContext) -> Any + async for r in handler.unary_stream(request, context): + yield r + + elif handler.request_streaming and not handler.response_streaming: + handler_factory = grpc.stream_unary_rpc_method_handler + + async def wrapped(request, context): + # type: (Any, ServicerContext) -> Any + response = handler.stream_unary(request, context) + return await response + + elif handler.request_streaming and handler.response_streaming: + handler_factory = grpc.stream_stream_rpc_method_handler + + async def wrapped(request, context): # type: ignore + # type: (Any, ServicerContext) -> Any + async for r in handler.stream_stream(request, context): + yield r + + return handler_factory( + wrapped, + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + + def _find_name(self, context): + # type: (ServicerContext) -> str + return self._handler_call_details.method diff --git a/src/sentry_sdk_alpha/integrations/grpc/client.py b/src/sentry_sdk_alpha/integrations/grpc/client.py new file mode 100644 index 00000000000000..1198613d719e84 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/client.py @@ -0,0 +1,94 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import DidNotEnable +from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Iterator, Iterable, Union + +try: + import grpc + from grpc import ClientCallDetails, Call + from grpc._interceptor import _UnaryOutcome + from grpc.aio._interceptor import UnaryStreamCall + from google.protobuf.message import Message +except ImportError: + raise DidNotEnable("grpcio is not installed") + + +class ClientInterceptor( + grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor # type: ignore +): + _is_intercepted = False + + def intercept_unary_unary(self, continuation, client_call_details, request): + # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome + method = client_call_details.method + + with sentry_sdk_alpha.start_span( + op=OP.GRPC_CLIENT, + name="unary unary call to %s" % method, + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + span.set_attribute("type", "unary unary") + span.set_attribute("method", method) + + client_call_details = self._update_client_call_details_metadata_from_scope( + client_call_details + ) + + response = continuation(client_call_details, request) + span.set_attribute("code", response.code().name) + + return response + + def intercept_unary_stream(self, continuation, client_call_details, request): + # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call] + method = client_call_details.method + + with sentry_sdk_alpha.start_span( + op=OP.GRPC_CLIENT, + name="unary stream call to %s" % method, + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + span.set_attribute("type", "unary stream") + span.set_attribute("method", method) + + client_call_details = self._update_client_call_details_metadata_from_scope( + client_call_details + ) + + response = continuation( + client_call_details, request + ) # type: UnaryStreamCall + # Setting code on unary-stream leads to execution getting stuck + # span.set_attribute("code", response.code().name) + + return response + + @staticmethod + def _update_client_call_details_metadata_from_scope(client_call_details): + # type: (ClientCallDetails) -> ClientCallDetails + metadata = ( + list(client_call_details.metadata) if client_call_details.metadata else [] + ) + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(): + metadata.append((key, value)) + + client_call_details = grpc._interceptor._ClientCallDetails( + method=client_call_details.method, + timeout=client_call_details.timeout, + metadata=metadata, + credentials=client_call_details.credentials, + wait_for_ready=client_call_details.wait_for_ready, + compression=client_call_details.compression, + ) + + return client_call_details diff --git a/src/sentry_sdk_alpha/integrations/grpc/consts.py b/src/sentry_sdk_alpha/integrations/grpc/consts.py new file mode 100644 index 00000000000000..9fdb975caf4cb4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/consts.py @@ -0,0 +1 @@ +SPAN_ORIGIN = "auto.grpc.grpc" diff --git a/src/sentry_sdk_alpha/integrations/grpc/server.py b/src/sentry_sdk_alpha/integrations/grpc/server.py new file mode 100644 index 00000000000000..4467ea3075b180 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/grpc/server.py @@ -0,0 +1,64 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import DidNotEnable +from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN +from sentry_sdk_alpha.tracing import TransactionSource + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Callable, Optional + from google.protobuf.message import Message + +try: + import grpc + from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler +except ImportError: + raise DidNotEnable("grpcio is not installed") + + +class ServerInterceptor(grpc.ServerInterceptor): # type: ignore + def __init__(self, find_name=None): + # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None + self._find_method_name = find_name or ServerInterceptor._find_name + + super().__init__() + + def intercept_service(self, continuation, handler_call_details): + # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler + handler = continuation(handler_call_details) + if not handler or not handler.unary_unary: + return handler + + def behavior(request, context): + # type: (Message, ServicerContext) -> Message + with sentry_sdk_alpha.isolation_scope(): + name = self._find_method_name(context) + + if name: + metadata = dict(context.invocation_metadata()) + + with sentry_sdk_alpha.continue_trace(metadata): + with sentry_sdk_alpha.start_span( + op=OP.GRPC_SERVER, + name=name, + source=TransactionSource.CUSTOM, + origin=SPAN_ORIGIN, + ): + try: + return handler.unary_unary(request, context) + except BaseException as e: + raise e + else: + return handler.unary_unary(request, context) + + return grpc.unary_unary_rpc_method_handler( + behavior, + request_deserializer=handler.request_deserializer, + response_serializer=handler.response_serializer, + ) + + @staticmethod + def _find_name(context): + # type: (ServicerContext) -> str + return context._rpc_event.call_details.method.decode() diff --git a/src/sentry_sdk_alpha/integrations/httpx.py b/src/sentry_sdk_alpha/integrations/httpx.py new file mode 100644 index 00000000000000..68466ba00908eb --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/httpx.py @@ -0,0 +1,202 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA, BAGGAGE_HEADER_NAME +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.tracing_utils import Baggage, should_propagate_trace +from sentry_sdk_alpha.utils import ( + SENSITIVE_DATA_SUBSTITUTE, + capture_internal_exceptions, + ensure_integration_enabled, + http_client_status_to_breadcrumb_level, + logger, + parse_url, + set_thread_info_from_span, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import MutableMapping + from typing import Any + + +try: + from httpx import AsyncClient, Client, Request, Response # type: ignore +except ImportError: + raise DidNotEnable("httpx is not installed") + +__all__ = ["HttpxIntegration"] + + +class HttpxIntegration(Integration): + identifier = "httpx" + origin = f"auto.http.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + """ + httpx has its own transport layer and can be customized when needed, + so patch Client.send and AsyncClient.send to support both synchronous and async interfaces. + """ + _install_httpx_client() + _install_httpx_async_client() + + +def _install_httpx_client(): + # type: () -> None + real_send = Client.send + + @ensure_integration_enabled(HttpxIntegration, real_send) + def send(self, request, **kwargs): + # type: (Client, Request, **Any) -> Response + parsed_url = None + with capture_internal_exceptions(): + parsed_url = parse_url(str(request.url), sanitize=False) + + with sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT, + name="%s %s" + % ( + request.method, + parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, + ), + origin=HttpxIntegration.origin, + only_if_parent=True, + ) as span: + data = { + SPANDATA.HTTP_METHOD: request.method, + } + set_thread_info_from_span(data, span) + + if parsed_url is not None: + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) + + if should_propagate_trace(sentry_sdk_alpha.get_client(), str(request.url)): + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( + key=key, value=value, url=request.url + ) + ) + + if key == BAGGAGE_HEADER_NAME: + _add_sentry_baggage_to_headers(request.headers, value) + else: + request.headers[key] = value + + rv = real_send(self, request, **kwargs) + + span.set_http_status(rv.status_code) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) + + return rv + + Client.send = send + + +def _install_httpx_async_client(): + # type: () -> None + real_send = AsyncClient.send + + async def send(self, request, **kwargs): + # type: (AsyncClient, Request, **Any) -> Response + if sentry_sdk_alpha.get_client().get_integration(HttpxIntegration) is None: + return await real_send(self, request, **kwargs) + + parsed_url = None + with capture_internal_exceptions(): + parsed_url = parse_url(str(request.url), sanitize=False) + + with sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT, + name="%s %s" + % ( + request.method, + parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE, + ), + origin=HttpxIntegration.origin, + only_if_parent=True, + ) as span: + data = { + SPANDATA.HTTP_METHOD: request.method, + } + if parsed_url is not None: + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) + + if should_propagate_trace(sentry_sdk_alpha.get_client(), str(request.url)): + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format( + key=key, value=value, url=request.url + ) + ) + if key == BAGGAGE_HEADER_NAME and request.headers.get( + BAGGAGE_HEADER_NAME + ): + # do not overwrite any existing baggage, just append to it + request.headers[key] += "," + value + else: + request.headers[key] = value + + rv = await real_send(self, request, **kwargs) + + span.set_http_status(rv.status_code) + span.set_attribute("reason", rv.reason_phrase) + + data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code + data["reason"] = rv.reason_phrase + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=data, + level=http_client_status_to_breadcrumb_level(rv.status_code), + ) + + return rv + + AsyncClient.send = send + + +def _add_sentry_baggage_to_headers(headers, sentry_baggage): + # type: (MutableMapping[str, str], str) -> None + """Add the Sentry baggage to the headers. + + This function directly mutates the provided headers. The provided sentry_baggage + is appended to the existing baggage. If the baggage already contains Sentry items, + they are stripped out first. + """ + existing_baggage = headers.get(BAGGAGE_HEADER_NAME, "") + stripped_existing_baggage = Baggage.strip_sentry_baggage(existing_baggage) + + separator = "," if len(stripped_existing_baggage) > 0 else "" + + headers[BAGGAGE_HEADER_NAME] = ( + stripped_existing_baggage + separator + sentry_baggage + ) diff --git a/src/sentry_sdk_alpha/integrations/huey.py b/src/sentry_sdk_alpha/integrations/huey.py new file mode 100644 index 00000000000000..4a18aace248a03 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/huey.py @@ -0,0 +1,178 @@ +import sys +from datetime import datetime + +import sentry_sdk_alpha +from sentry_sdk_alpha.api import get_baggage, get_traceparent +from sentry_sdk_alpha.consts import ( + OP, + SPANSTATUS, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, + TransactionSource, +) +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + SENSITIVE_DATA_SUBSTITUTE, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Optional, Union, TypeVar + + from sentry_sdk_alpha._types import EventProcessor, Event, Hint + from sentry_sdk_alpha.utils import ExcInfo + + F = TypeVar("F", bound=Callable[..., Any]) + +try: + from huey.api import Huey, Result, ResultGroup, Task, PeriodicTask + from huey.exceptions import CancelExecution, RetryTask, TaskLockedException +except ImportError: + raise DidNotEnable("Huey is not installed") + + +HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException) + + +class HueyIntegration(Integration): + identifier = "huey" + origin = f"auto.queue.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + patch_enqueue() + patch_execute() + + +def patch_enqueue(): + # type: () -> None + old_enqueue = Huey.enqueue + + @ensure_integration_enabled(HueyIntegration, old_enqueue) + def _sentry_enqueue(self, task): + # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]] + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_SUBMIT_HUEY, + name=task.name, + origin=HueyIntegration.origin, + only_if_parent=True, + ): + if not isinstance(task, PeriodicTask): + # Attach trace propagation data to task kwargs. We do + # not do this for periodic tasks, as these don't + # really have an originating transaction. + task.kwargs["sentry_headers"] = { + BAGGAGE_HEADER_NAME: get_baggage(), + SENTRY_TRACE_HEADER_NAME: get_traceparent(), + } + return old_enqueue(self, task) + + Huey.enqueue = _sentry_enqueue + + +def _make_event_processor(task): + # type: (Any) -> EventProcessor + def event_processor(event, hint): + # type: (Event, Hint) -> Optional[Event] + + with capture_internal_exceptions(): + tags = event.setdefault("tags", {}) + tags["huey_task_id"] = task.id + tags["huey_task_retry"] = task.default_retries > task.retries + extra = event.setdefault("extra", {}) + extra["huey-job"] = { + "task": task.name, + "args": ( + task.args + if should_send_default_pii() + else SENSITIVE_DATA_SUBSTITUTE + ), + "kwargs": ( + task.kwargs + if should_send_default_pii() + else SENSITIVE_DATA_SUBSTITUTE + ), + "retry": (task.default_retries or 0) - task.retries, + } + + return event + + return event_processor + + +def _capture_exception(exc_info): + # type: (ExcInfo) -> None + scope = sentry_sdk_alpha.get_current_scope() + + if scope.root_span is not None: + if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS: + scope.root_span.set_status(SPANSTATUS.ABORTED) + return + + scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + + event, hint = event_from_exception( + exc_info, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": HueyIntegration.identifier, "handled": False}, + ) + scope.capture_event(event, hint=hint) + + +def _wrap_task_execute(func): + # type: (F) -> F + + @ensure_integration_enabled(HueyIntegration, func) + def _sentry_execute(*args, **kwargs): + # type: (*Any, **Any) -> Any + try: + result = func(*args, **kwargs) + except Exception: + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + root_span = sentry_sdk_alpha.get_current_scope().root_span + if root_span is not None: + root_span.set_status(SPANSTATUS.OK) + + return result + + return _sentry_execute # type: ignore + + +def patch_execute(): + # type: () -> None + old_execute = Huey._execute + + @ensure_integration_enabled(HueyIntegration, old_execute) + def _sentry_execute(self, task, timestamp=None): + # type: (Huey, Task, Optional[datetime]) -> Any + with sentry_sdk_alpha.isolation_scope() as scope: + with capture_internal_exceptions(): + scope._name = "huey" + scope.clear_breadcrumbs() + scope.add_event_processor(_make_event_processor(task)) + + if not getattr(task, "_sentry_is_patched", False): + task.execute = _wrap_task_execute(task.execute) + task._sentry_is_patched = True + + sentry_headers = task.kwargs.pop("sentry_headers", {}) + with sentry_sdk_alpha.continue_trace(sentry_headers): + with sentry_sdk_alpha.start_span( + name=task.name, + op=OP.QUEUE_TASK_HUEY, + source=TransactionSource.TASK, + origin=HueyIntegration.origin, + ): + return old_execute(self, task, timestamp) + + Huey._execute = _sentry_execute diff --git a/src/sentry_sdk_alpha/integrations/huggingface_hub.py b/src/sentry_sdk_alpha/integrations/huggingface_hub.py new file mode 100644 index 00000000000000..2ac1dedb5e50d0 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/huggingface_hub.py @@ -0,0 +1,176 @@ +from functools import wraps + +from sentry_sdk_alpha import consts +from sentry_sdk_alpha.ai.monitoring import record_token_usage +from sentry_sdk_alpha.ai.utils import set_data_normalized +from sentry_sdk_alpha.consts import SPANDATA + +from typing import Any, Iterable, Callable + +import sentry_sdk_alpha +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, +) + +try: + import huggingface_hub.inference._client + + from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput +except ImportError: + raise DidNotEnable("Huggingface not installed") + + +class HuggingfaceHubIntegration(Integration): + identifier = "huggingface_hub" + origin = f"auto.ai.{identifier}" + + def __init__(self, include_prompts=True): + # type: (HuggingfaceHubIntegration, bool) -> None + self.include_prompts = include_prompts + + @staticmethod + def setup_once(): + # type: () -> None + huggingface_hub.inference._client.InferenceClient.text_generation = ( + _wrap_text_generation( + huggingface_hub.inference._client.InferenceClient.text_generation + ) + ) + + +def _capture_exception(exc): + # type: (Any) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "huggingface_hub", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _wrap_text_generation(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + @wraps(f) + def new_text_generation(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(HuggingfaceHubIntegration) + if integration is None: + return f(*args, **kwargs) + + if "prompt" in kwargs: + prompt = kwargs["prompt"] + elif len(args) >= 2: + kwargs["prompt"] = args[1] + prompt = kwargs["prompt"] + args = (args[0],) + args[2:] + else: + # invalid call, let it return error + return f(*args, **kwargs) + + model = kwargs.get("model") + streaming = kwargs.get("stream") + + span = sentry_sdk_alpha.start_span( + op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE, + name="Text Generation", + origin=HuggingfaceHubIntegration.origin, + only_if_parent=True, + ) + span.__enter__() + try: + res = f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + span.__exit__(None, None, None) + raise e from None + + with capture_internal_exceptions(): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompt) + + set_data_normalized(span, SPANDATA.AI_MODEL_ID, model) + set_data_normalized(span, SPANDATA.AI_STREAMING, streaming) + + if isinstance(res, str): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, + SPANDATA.AI_RESPONSES, + [res], + ) + span.__exit__(None, None, None) + return res + + if isinstance(res, TextGenerationOutput): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, + SPANDATA.AI_RESPONSES, + [res.generated_text], + ) + if res.details is not None and res.details.generated_tokens > 0: + record_token_usage(span, total_tokens=res.details.generated_tokens) + span.__exit__(None, None, None) + return res + + if not isinstance(res, Iterable): + # we only know how to deal with strings and iterables, ignore + set_data_normalized(span, "unknown_response", True) + span.__exit__(None, None, None) + return res + + if kwargs.get("details", False): + # res is Iterable[TextGenerationStreamOutput] + def new_details_iterator(): + # type: () -> Iterable[ChatCompletionStreamOutput] + with capture_internal_exceptions(): + tokens_used = 0 + data_buf: list[str] = [] + for x in res: + if hasattr(x, "token") and hasattr(x.token, "text"): + data_buf.append(x.token.text) + if hasattr(x, "details") and hasattr( + x.details, "generated_tokens" + ): + tokens_used = x.details.generated_tokens + yield x + if ( + len(data_buf) > 0 + and should_send_default_pii() + and integration.include_prompts + ): + set_data_normalized( + span, SPANDATA.AI_RESPONSES, "".join(data_buf) + ) + if tokens_used > 0: + record_token_usage(span, total_tokens=tokens_used) + span.__exit__(None, None, None) + + return new_details_iterator() + else: + # res is Iterable[str] + + def new_iterator(): + # type: () -> Iterable[str] + data_buf: list[str] = [] + with capture_internal_exceptions(): + for s in res: + if isinstance(s, str): + data_buf.append(s) + yield s + if ( + len(data_buf) > 0 + and should_send_default_pii() + and integration.include_prompts + ): + set_data_normalized( + span, SPANDATA.AI_RESPONSES, "".join(data_buf) + ) + span.__exit__(None, None, None) + + return new_iterator() + + return new_text_generation diff --git a/src/sentry_sdk_alpha/integrations/langchain.py b/src/sentry_sdk_alpha/integrations/langchain.py new file mode 100644 index 00000000000000..3accb4c0614933 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/langchain.py @@ -0,0 +1,472 @@ +from collections import OrderedDict +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.ai.monitoring import set_ai_pipeline_name, record_token_usage +from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS +from sentry_sdk_alpha.ai.utils import set_data_normalized +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.utils import logger, capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, List, Callable, Dict, Union, Optional + from uuid import UUID + +try: + from langchain_core.messages import BaseMessage + from langchain_core.outputs import LLMResult + from langchain_core.callbacks import ( + manager, + BaseCallbackHandler, + ) + from langchain_core.agents import AgentAction, AgentFinish +except ImportError: + raise DidNotEnable("langchain not installed") + + +DATA_FIELDS = { + "temperature": SPANDATA.AI_TEMPERATURE, + "top_p": SPANDATA.AI_TOP_P, + "top_k": SPANDATA.AI_TOP_K, + "function_call": SPANDATA.AI_FUNCTION_CALL, + "tool_calls": SPANDATA.AI_TOOL_CALLS, + "tools": SPANDATA.AI_TOOLS, + "response_format": SPANDATA.AI_RESPONSE_FORMAT, + "logit_bias": SPANDATA.AI_LOGIT_BIAS, + "tags": SPANDATA.AI_TAGS, +} + +# To avoid double collecting tokens, we do *not* measure +# token counts for models for which we have an explicit integration +NO_COLLECT_TOKEN_MODELS = [ + "openai-chat", + "anthropic-chat", + "cohere-chat", + "huggingface_endpoint", +] + + +class LangchainIntegration(Integration): + identifier = "langchain" + origin = f"auto.ai.{identifier}" + + # The most number of spans (e.g., LLM calls) that can be processed at the same time. + max_spans = 1024 + + def __init__( + self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None + ): + # type: (LangchainIntegration, bool, int, Optional[str]) -> None + self.include_prompts = include_prompts + self.max_spans = max_spans + self.tiktoken_encoding_name = tiktoken_encoding_name + + @staticmethod + def setup_once(): + # type: () -> None + manager._configure = _wrap_configure(manager._configure) + + +class WatchedSpan: + num_completion_tokens = 0 # type: int + num_prompt_tokens = 0 # type: int + no_collect_tokens = False # type: bool + children = [] # type: List[WatchedSpan] + is_pipeline = False # type: bool + + def __init__(self, span): + # type: (Span) -> None + self.span = span + + +class SentryLangchainCallback(BaseCallbackHandler): # type: ignore[misc] + """Base callback handler that can be used to handle callbacks from langchain.""" + + span_map = OrderedDict() # type: OrderedDict[UUID, WatchedSpan] + + max_span_map_size = 0 + + def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=None): + # type: (int, bool, Optional[str]) -> None + self.max_span_map_size = max_span_map_size + self.include_prompts = include_prompts + + self.tiktoken_encoding = None + if tiktoken_encoding_name is not None: + import tiktoken # type: ignore + + self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) + + def count_tokens(self, s): + # type: (str) -> int + if self.tiktoken_encoding is not None: + return len(self.tiktoken_encoding.encode_ordinary(s)) + return 0 + + def gc_span_map(self): + # type: () -> None + + while len(self.span_map) > self.max_span_map_size: + run_id, watched_span = self.span_map.popitem(last=False) + self._exit_span(watched_span, run_id) + + def _handle_error(self, run_id, error): + # type: (UUID, Any) -> None + if not run_id or run_id not in self.span_map: + return + + span_data = self.span_map[run_id] + if not span_data: + return + sentry_sdk_alpha.capture_exception(error) + span_data.span.set_status(SPANSTATUS.INTERNAL_ERROR) + span_data.span.finish() + del self.span_map[run_id] + + def _normalize_langchain_message(self, message): + # type: (BaseMessage) -> Any + parsed = {"content": message.content, "role": message.type} + parsed.update(message.additional_kwargs) + return parsed + + def _create_span(self, run_id, parent_id, **kwargs): + # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan + + parent_watched_span = self.span_map.get(parent_id) if parent_id else None + sentry_span = sentry_sdk_alpha.start_span( + parent_span=parent_watched_span.span if parent_watched_span else None, + only_if_parent=True, + **kwargs, + ) + watched_span = WatchedSpan(sentry_span) + if parent_watched_span: + parent_watched_span.children.append(watched_span) + + if kwargs.get("op", "").startswith("ai.pipeline."): + if kwargs.get("name"): + set_ai_pipeline_name(kwargs.get("name")) + watched_span.is_pipeline = True + + # the same run_id is reused for the pipeline it seems + # so we need to end the older span to avoid orphan spans + existing_span_data = self.span_map.get(run_id) + if existing_span_data is not None: + self._exit_span(existing_span_data, run_id) + + self.span_map[run_id] = watched_span + self.gc_span_map() + return watched_span + + def _exit_span(self, span_data, run_id): + # type: (SentryLangchainCallback, WatchedSpan, UUID) -> None + + if span_data.is_pipeline: + set_ai_pipeline_name(None) + + span_data.span.set_status(SPANSTATUS.OK) + span_data.span.finish() + del self.span_map[run_id] + + def on_llm_start( + self, + serialized, + prompts, + *, + run_id, + tags=None, + parent_run_id=None, + metadata=None, + **kwargs, + ): + # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any + """Run when LLM starts running.""" + with capture_internal_exceptions(): + if not run_id: + return + all_params = kwargs.get("invocation_params", {}) + all_params.update(serialized.get("kwargs", {})) + watched_span = self._create_span( + run_id, + kwargs.get("parent_run_id"), + op=OP.LANGCHAIN_RUN, + name=kwargs.get("name") or "Langchain LLM call", + origin=LangchainIntegration.origin, + ) + span = watched_span.span + if should_send_default_pii() and self.include_prompts: + set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompts) + for k, v in DATA_FIELDS.items(): + if k in all_params: + set_data_normalized(span, v, all_params[k]) + + def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any + """Run when Chat Model starts running.""" + with capture_internal_exceptions(): + if not run_id: + return + all_params = kwargs.get("invocation_params", {}) + all_params.update(serialized.get("kwargs", {})) + watched_span = self._create_span( + run_id, + kwargs.get("parent_run_id"), + op=OP.LANGCHAIN_CHAT_COMPLETIONS_CREATE, + name=kwargs.get("name") or "Langchain Chat Model", + origin=LangchainIntegration.origin, + ) + span = watched_span.span + model = all_params.get( + "model", all_params.get("model_name", all_params.get("model_id")) + ) + watched_span.no_collect_tokens = any( + x in all_params.get("_type", "") for x in NO_COLLECT_TOKEN_MODELS + ) + + if not model and "anthropic" in all_params.get("_type"): + model = "claude-2" + if model: + span.set_attribute(SPANDATA.AI_MODEL_ID, model) + if should_send_default_pii() and self.include_prompts: + set_data_normalized( + span, + SPANDATA.AI_INPUT_MESSAGES, + [ + [self._normalize_langchain_message(x) for x in list_] + for list_ in messages + ], + ) + for k, v in DATA_FIELDS.items(): + if k in all_params: + set_data_normalized(span, v, all_params[k]) + if not watched_span.no_collect_tokens: + for list_ in messages: + for message in list_: + self.span_map[run_id].num_prompt_tokens += self.count_tokens( + message.content + ) + self.count_tokens(message.type) + + def on_llm_new_token(self, token, *, run_id, **kwargs): + # type: (SentryLangchainCallback, str, UUID, Any) -> Any + """Run on new LLM token. Only available when streaming is enabled.""" + with capture_internal_exceptions(): + if not run_id or run_id not in self.span_map: + return + span_data = self.span_map[run_id] + if not span_data or span_data.no_collect_tokens: + return + span_data.num_completion_tokens += self.count_tokens(token) + + def on_llm_end(self, response, *, run_id, **kwargs): + # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any + """Run when LLM ends running.""" + with capture_internal_exceptions(): + if not run_id: + return + + token_usage = ( + response.llm_output.get("token_usage") if response.llm_output else None + ) + + span_data = self.span_map[run_id] + if not span_data: + return + + if should_send_default_pii() and self.include_prompts: + set_data_normalized( + span_data.span, + SPANDATA.AI_RESPONSES, + [[x.text for x in list_] for list_ in response.generations], + ) + + if not span_data.no_collect_tokens: + if token_usage: + record_token_usage( + span_data.span, + token_usage.get("prompt_tokens"), + token_usage.get("completion_tokens"), + token_usage.get("total_tokens"), + ) + else: + record_token_usage( + span_data.span, + span_data.num_prompt_tokens, + span_data.num_completion_tokens, + ) + + self._exit_span(span_data, run_id) + + def on_llm_error(self, error, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + """Run when LLM errors.""" + with capture_internal_exceptions(): + self._handle_error(run_id, error) + + def on_chain_start(self, serialized, inputs, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any + """Run when chain starts running.""" + with capture_internal_exceptions(): + if not run_id: + return + watched_span = self._create_span( + run_id, + kwargs.get("parent_run_id"), + op=( + OP.LANGCHAIN_RUN + if kwargs.get("parent_run_id") is not None + else OP.LANGCHAIN_PIPELINE + ), + name=kwargs.get("name") or "Chain execution", + origin=LangchainIntegration.origin, + ) + metadata = kwargs.get("metadata") + if metadata: + set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata) + + def on_chain_end(self, outputs, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any + """Run when chain ends running.""" + with capture_internal_exceptions(): + if not run_id or run_id not in self.span_map: + return + + span_data = self.span_map[run_id] + if not span_data: + return + self._exit_span(span_data, run_id) + + def on_chain_error(self, error, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + """Run when chain errors.""" + self._handle_error(run_id, error) + + def on_agent_action(self, action, *, run_id, **kwargs): + # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any + with capture_internal_exceptions(): + if not run_id: + return + watched_span = self._create_span( + run_id, + kwargs.get("parent_run_id"), + op=OP.LANGCHAIN_AGENT, + name=action.tool or "AI tool usage", + origin=LangchainIntegration.origin, + ) + if action.tool_input and should_send_default_pii() and self.include_prompts: + set_data_normalized( + watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input + ) + + def on_agent_finish(self, finish, *, run_id, **kwargs): + # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any + with capture_internal_exceptions(): + if not run_id: + return + + span_data = self.span_map[run_id] + if not span_data: + return + if should_send_default_pii() and self.include_prompts: + set_data_normalized( + span_data.span, SPANDATA.AI_RESPONSES, finish.return_values.items() + ) + self._exit_span(span_data, run_id) + + def on_tool_start(self, serialized, input_str, *, run_id, **kwargs): + # type: (SentryLangchainCallback, Dict[str, Any], str, UUID, Any) -> Any + """Run when tool starts running.""" + with capture_internal_exceptions(): + if not run_id: + return + watched_span = self._create_span( + run_id, + kwargs.get("parent_run_id"), + op=OP.LANGCHAIN_TOOL, + name=serialized.get("name") or kwargs.get("name") or "AI tool usage", + origin=LangchainIntegration.origin, + ) + if should_send_default_pii() and self.include_prompts: + set_data_normalized( + watched_span.span, + SPANDATA.AI_INPUT_MESSAGES, + kwargs.get("inputs", [input_str]), + ) + if kwargs.get("metadata"): + set_data_normalized( + watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata") + ) + + def on_tool_end(self, output, *, run_id, **kwargs): + # type: (SentryLangchainCallback, str, UUID, Any) -> Any + """Run when tool ends running.""" + with capture_internal_exceptions(): + if not run_id or run_id not in self.span_map: + return + + span_data = self.span_map[run_id] + if not span_data: + return + if should_send_default_pii() and self.include_prompts: + set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output) + self._exit_span(span_data, run_id) + + def on_tool_error(self, error, *args, run_id, **kwargs): + # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any + """Run when tool errors.""" + self._handle_error(run_id, error) + + +def _wrap_configure(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + + @wraps(f) + def new_configure(*args, **kwargs): + # type: (Any, Any) -> Any + + integration = sentry_sdk_alpha.get_client().get_integration(LangchainIntegration) + if integration is None: + return f(*args, **kwargs) + + with capture_internal_exceptions(): + new_callbacks = [] # type: List[BaseCallbackHandler] + if "local_callbacks" in kwargs: + existing_callbacks = kwargs["local_callbacks"] + kwargs["local_callbacks"] = new_callbacks + elif len(args) > 2: + existing_callbacks = args[2] + args = ( + args[0], + args[1], + new_callbacks, + ) + args[3:] + else: + existing_callbacks = [] + + if existing_callbacks: + if isinstance(existing_callbacks, list): + for cb in existing_callbacks: + new_callbacks.append(cb) + elif isinstance(existing_callbacks, BaseCallbackHandler): + new_callbacks.append(existing_callbacks) + else: + logger.debug("Unknown callback type: %s", existing_callbacks) + + already_added = False + for callback in new_callbacks: + if isinstance(callback, SentryLangchainCallback): + already_added = True + + if not already_added: + new_callbacks.append( + SentryLangchainCallback( + integration.max_spans, + integration.include_prompts, + integration.tiktoken_encoding_name, + ) + ) + return f(*args, **kwargs) + + return new_configure diff --git a/src/sentry_sdk_alpha/integrations/launchdarkly.py b/src/sentry_sdk_alpha/integrations/launchdarkly.py new file mode 100644 index 00000000000000..59390720bfc0fb --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/launchdarkly.py @@ -0,0 +1,62 @@ +from typing import TYPE_CHECKING + +from sentry_sdk_alpha.feature_flags import add_feature_flag +from sentry_sdk_alpha.integrations import DidNotEnable, Integration + +try: + import ldclient + from ldclient.hook import Hook, Metadata + + if TYPE_CHECKING: + from ldclient import LDClient + from ldclient.hook import EvaluationSeriesContext + from ldclient.evaluation import EvaluationDetail + + from typing import Any +except ImportError: + raise DidNotEnable("LaunchDarkly is not installed") + + +class LaunchDarklyIntegration(Integration): + identifier = "launchdarkly" + + def __init__(self, ld_client=None): + # type: (LDClient | None) -> None + """ + :param client: An initialized LDClient instance. If a client is not provided, this + integration will attempt to use the shared global instance. + """ + try: + client = ld_client or ldclient.get() + except Exception as exc: + raise DidNotEnable("Error getting LaunchDarkly client. " + repr(exc)) + + if not client.is_initialized(): + raise DidNotEnable("LaunchDarkly client is not initialized.") + + # Register the flag collection hook with the LD client. + client.add_hook(LaunchDarklyHook()) + + @staticmethod + def setup_once(): + # type: () -> None + pass + + +class LaunchDarklyHook(Hook): + + @property + def metadata(self): + # type: () -> Metadata + return Metadata(name="sentry-flag-auditor") + + def after_evaluation(self, series_context, data, detail): + # type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any] + if isinstance(detail.value, bool): + add_feature_flag(series_context.key, detail.value) + + return data + + def before_evaluation(self, series_context, data): + # type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any] + return data # No-op. diff --git a/src/sentry_sdk_alpha/integrations/litestar.py b/src/sentry_sdk_alpha/integrations/litestar.py new file mode 100644 index 00000000000000..7993f99ecb4843 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/litestar.py @@ -0,0 +1,308 @@ +from collections.abc import Set +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, TransactionSource, SOURCE_FOR_STYLE +from sentry_sdk_alpha.integrations import ( + _DEFAULT_FAILED_REQUEST_STATUS_CODES, + DidNotEnable, + Integration, +) +from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + ensure_integration_enabled, + event_from_exception, + transaction_from_function, +) + +try: + from litestar import Request, Litestar # type: ignore + from litestar.handlers.base import BaseRouteHandler # type: ignore + from litestar.middleware import DefineMiddleware # type: ignore + from litestar.routes.http import HTTPRoute # type: ignore + from litestar.data_extractors import ConnectionDataExtractor # type: ignore + from litestar.exceptions import HTTPException # type: ignore +except ImportError: + raise DidNotEnable("Litestar is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Union + from litestar.types.asgi_types import ASGIApp # type: ignore + from litestar.types import ( # type: ignore + HTTPReceiveMessage, + HTTPScope, + Message, + Middleware, + Receive, + Scope as LitestarScope, + Send, + WebSocketReceiveMessage, + ) + from litestar.middleware import MiddlewareProtocol + from sentry_sdk_alpha._types import Event, Hint + +_DEFAULT_TRANSACTION_NAME = "generic Litestar request" + + +class LitestarIntegration(Integration): + identifier = "litestar" + origin = f"auto.http.{identifier}" + + def __init__( + self, + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] + ) -> None: + self.failed_request_status_codes = failed_request_status_codes + + @staticmethod + def setup_once(): + # type: () -> None + patch_app_init() + patch_middlewares() + patch_http_route_handle() + + # The following line follows the pattern found in other integrations such as `DjangoIntegration.setup_once`. + # The Litestar `ExceptionHandlerMiddleware.__call__` catches exceptions and does the following + # (among other things): + # 1. Logs them, some at least (such as 500s) as errors + # 2. Calls after_exception hooks + # The `LitestarIntegration`` provides an after_exception hook (see `patch_app_init` below) to create a Sentry event + # from an exception, which ends up being called during step 2 above. However, the Sentry `LoggingIntegration` will + # by default create a Sentry event from error logs made in step 1 if we do not prevent it from doing so. + ignore_logger("litestar") + + +class SentryLitestarASGIMiddleware(SentryAsgiMiddleware): + def __init__(self, app, span_origin=LitestarIntegration.origin): + # type: (ASGIApp, str) -> None + + super().__init__( + app=app, + unsafe_context_data=False, + transaction_style="endpoint", + mechanism_type="asgi", + span_origin=span_origin, + ) + + +def patch_app_init(): + # type: () -> None + """ + Replaces the Litestar class's `__init__` function in order to inject `after_exception` handlers and set the + `SentryLitestarASGIMiddleware` as the outmost middleware in the stack. + See: + - https://docs.litestar.dev/2/usage/applications.html#after-exception + - https://docs.litestar.dev/2/usage/middleware/using-middleware.html + """ + old__init__ = Litestar.__init__ + + @ensure_integration_enabled(LitestarIntegration, old__init__) + def injection_wrapper(self, *args, **kwargs): + # type: (Litestar, *Any, **Any) -> None + kwargs["after_exception"] = [ + exception_handler, + *(kwargs.get("after_exception") or []), + ] + + SentryLitestarASGIMiddleware.__call__ = SentryLitestarASGIMiddleware._run_asgi3 # type: ignore + middleware = kwargs.get("middleware") or [] + kwargs["middleware"] = [SentryLitestarASGIMiddleware, *middleware] + old__init__(self, *args, **kwargs) + + Litestar.__init__ = injection_wrapper + + +def patch_middlewares(): + # type: () -> None + old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware + + @ensure_integration_enabled(LitestarIntegration, old_resolve_middleware_stack) + def resolve_middleware_wrapper(self): + # type: (BaseRouteHandler) -> list[Middleware] + return [ + enable_span_for_middleware(middleware) + for middleware in old_resolve_middleware_stack(self) + ] + + BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper + + +def enable_span_for_middleware(middleware): + # type: (Middleware) -> Middleware + if ( + not hasattr(middleware, "__call__") # noqa: B004 + or middleware is SentryLitestarASGIMiddleware + ): + return middleware + + if isinstance(middleware, DefineMiddleware): + old_call = middleware.middleware.__call__ # type: ASGIApp + else: + old_call = middleware.__call__ + + async def _create_span_call(self, scope, receive, send): + # type: (MiddlewareProtocol, LitestarScope, Receive, Send) -> None + if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None: + return await old_call(self, scope, receive, send) + + middleware_name = self.__class__.__name__ + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_LITESTAR, + name=middleware_name, + origin=LitestarIntegration.origin, + only_if_parent=True, + ) as middleware_span: + middleware_span.set_tag("litestar.middleware_name", middleware_name) + + # Creating spans for the "receive" callback + async def _sentry_receive(*args, **kwargs): + # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] + if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None: + return await receive(*args, **kwargs) + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_LITESTAR_RECEIVE, + name=getattr(receive, "__qualname__", str(receive)), + origin=LitestarIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("litestar.middleware_name", middleware_name) + return await receive(*args, **kwargs) + + receive_name = getattr(receive, "__name__", str(receive)) + receive_patched = receive_name == "_sentry_receive" + new_receive = _sentry_receive if not receive_patched else receive + + # Creating spans for the "send" callback + async def _sentry_send(message): + # type: (Message) -> None + if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None: + return await send(message) + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_LITESTAR_SEND, + name=getattr(send, "__qualname__", str(send)), + origin=LitestarIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("litestar.middleware_name", middleware_name) + return await send(message) + + send_name = getattr(send, "__name__", str(send)) + send_patched = send_name == "_sentry_send" + new_send = _sentry_send if not send_patched else send + + return await old_call(self, scope, new_receive, new_send) + + not_yet_patched = old_call.__name__ not in ["_create_span_call"] + + if not_yet_patched: + if isinstance(middleware, DefineMiddleware): + middleware.middleware.__call__ = _create_span_call + else: + middleware.__call__ = _create_span_call + + return middleware + + +def patch_http_route_handle(): + # type: () -> None + old_handle = HTTPRoute.handle + + async def handle_wrapper(self, scope, receive, send): + # type: (HTTPRoute, HTTPScope, Receive, Send) -> None + if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None: + return await old_handle(self, scope, receive, send) + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + request = scope["app"].request_class( + scope=scope, receive=receive, send=send + ) # type: Request[Any, Any] + extracted_request_data = ConnectionDataExtractor( + parse_body=True, parse_query=True + )(request) + body = extracted_request_data.pop("body") + + request_data = await body + + def event_processor(event, _): + # type: (Event, Hint) -> Event + route_handler = scope.get("route_handler") + + request_info = event.get("request", {}) + request_info["content_length"] = len(scope.get("_body", b"")) + if should_send_default_pii(): + request_info["cookies"] = extracted_request_data["cookies"] + if request_data is not None: + request_info["data"] = request_data + + func = None + if route_handler.name is not None: + tx_name = route_handler.name + # Accounts for use of type `Ref` in earlier versions of litestar without the need to reference it as a type + elif hasattr(route_handler.fn, "value"): + func = route_handler.fn.value + else: + func = route_handler.fn + if func is not None: + tx_name = transaction_from_function(func) + + tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]} + + if not tx_name: + tx_name = _DEFAULT_TRANSACTION_NAME + tx_info = {"source": TransactionSource.ROUTE} + + event.update( + { + "request": request_info, + "transaction": tx_name, + "transaction_info": tx_info, + } + ) + return event + + sentry_scope._name = LitestarIntegration.identifier + sentry_scope.add_event_processor(event_processor) + + return await old_handle(self, scope, receive, send) + + HTTPRoute.handle = handle_wrapper + + +def retrieve_user_from_scope(scope): + # type: (LitestarScope) -> Optional[dict[str, Any]] + scope_user = scope.get("user") + if isinstance(scope_user, dict): + return scope_user + if hasattr(scope_user, "asdict"): # dataclasses + return scope_user.asdict() + + return None + + +@ensure_integration_enabled(LitestarIntegration) +def exception_handler(exc, scope): + # type: (Exception, LitestarScope) -> None + user_info = None # type: Optional[dict[str, Any]] + if should_send_default_pii(): + user_info = retrieve_user_from_scope(scope) + if user_info and isinstance(user_info, dict): + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + sentry_scope.set_user(user_info) + + if isinstance(exc, HTTPException): + integration = sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) + if ( + integration is not None + and exc.status_code not in integration.failed_request_status_codes + ): + return + + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": LitestarIntegration.identifier, "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) diff --git a/src/sentry_sdk_alpha/integrations/logging.py b/src/sentry_sdk_alpha/integrations/logging.py new file mode 100644 index 00000000000000..350a2209938dc1 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/logging.py @@ -0,0 +1,413 @@ +import logging +import sys +from datetime import datetime, timezone +from fnmatch import fnmatch + +import sentry_sdk_alpha +from sentry_sdk_alpha.client import BaseClient +from sentry_sdk_alpha.utils import ( + safe_repr, + to_string, + event_from_exception, + current_stacktrace, + capture_internal_exceptions, +) +from sentry_sdk_alpha.integrations import Integration + +from typing import TYPE_CHECKING, Tuple + +if TYPE_CHECKING: + from collections.abc import MutableMapping + from logging import LogRecord + from typing import Any + from typing import Dict + from typing import Optional + +DEFAULT_LEVEL = logging.INFO +DEFAULT_EVENT_LEVEL = None # None means no events are captured +LOGGING_TO_EVENT_LEVEL = { + logging.NOTSET: "notset", + logging.DEBUG: "debug", + logging.INFO: "info", + logging.WARN: "warning", # WARN is same a WARNING + logging.WARNING: "warning", + logging.ERROR: "error", + logging.FATAL: "fatal", + logging.CRITICAL: "fatal", # CRITICAL is same as FATAL +} + +# Capturing events from those loggers causes recursion errors. We cannot allow +# the user to unconditionally create events from those loggers under any +# circumstances. +# +# Note: Ignoring by logger name here is better than mucking with thread-locals. +# We do not necessarily know whether thread-locals work 100% correctly in the user's environment. +_IGNORED_LOGGERS = set( + [ + "sentry_sdk.errors", + "urllib3.connectionpool", + "urllib3.connection", + "opentelemetry.*", + ] +) + + +def ignore_logger( + name, # type: str +): + # type: (...) -> None + """This disables recording (both in breadcrumbs and as events) calls to + a logger of a specific name. Among other uses, many of our integrations + use this to prevent their actions being recorded as breadcrumbs. Exposed + to users as a way to quiet spammy loggers. + + :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``). + """ + _IGNORED_LOGGERS.add(name) + + +class LoggingIntegration(Integration): + identifier = "logging" + + def __init__( + self, + level=DEFAULT_LEVEL, + event_level=DEFAULT_EVENT_LEVEL, + sentry_logs_level=DEFAULT_LEVEL, + ): + # type: (Optional[int], Optional[int], Optional[int]) -> None + self._handler = None + self._breadcrumb_handler = None + self._sentry_logs_handler = None + + if level is not None: + self._breadcrumb_handler = BreadcrumbHandler(level=level) + + if sentry_logs_level is not None: + self._sentry_logs_handler = SentryLogsHandler(level=sentry_logs_level) + + if event_level is not None: + self._handler = EventHandler(level=event_level) + + def _handle_record(self, record): + # type: (LogRecord) -> None + if self._handler is not None and record.levelno >= self._handler.level: + self._handler.handle(record) + + if ( + self._breadcrumb_handler is not None + and record.levelno >= self._breadcrumb_handler.level + ): + self._breadcrumb_handler.handle(record) + + if ( + self._sentry_logs_handler is not None + and record.levelno >= self._sentry_logs_handler.level + ): + self._sentry_logs_handler.handle(record) + + @staticmethod + def setup_once(): + # type: () -> None + old_callhandlers = logging.Logger.callHandlers + + def sentry_patched_callhandlers(self, record): + # type: (Any, LogRecord) -> Any + # keeping a local reference because the + # global might be discarded on shutdown + ignored_loggers = _IGNORED_LOGGERS + + try: + return old_callhandlers(self, record) + finally: + # This check is done twice, once also here before we even get + # the integration. Otherwise we have a high chance of getting + # into a recursion error when the integration is resolved + # (this also is slower). + if ignored_loggers is not None and record.name not in ignored_loggers: + integration = sentry_sdk_alpha.get_client().get_integration( + LoggingIntegration + ) + if integration is not None: + integration._handle_record(record) + + logging.Logger.callHandlers = sentry_patched_callhandlers # type: ignore + + +class _BaseHandler(logging.Handler): + COMMON_RECORD_ATTRS = frozenset( + ( + "args", + "created", + "exc_info", + "exc_text", + "filename", + "funcName", + "levelname", + "levelno", + "linenno", + "lineno", + "message", + "module", + "msecs", + "msg", + "name", + "pathname", + "process", + "processName", + "relativeCreated", + "stack", + "tags", + "taskName", + "thread", + "threadName", + "stack_info", + ) + ) + + def _can_record(self, record): + # type: (LogRecord) -> bool + """Prevents ignored loggers from recording""" + for logger in _IGNORED_LOGGERS: + if fnmatch(record.name, logger): + return False + return True + + def _logging_to_event_level(self, record): + # type: (LogRecord) -> str + return LOGGING_TO_EVENT_LEVEL.get( + record.levelno, record.levelname.lower() if record.levelname else "" + ) + + def _extra_from_record(self, record): + # type: (LogRecord) -> MutableMapping[str, object] + return { + k: v + for k, v in vars(record).items() + if k not in self.COMMON_RECORD_ATTRS + and (not isinstance(k, str) or not k.startswith("_")) + } + + +class EventHandler(_BaseHandler): + """ + A logging handler that emits Sentry events for each log record + + Note that you do not have to use this class if the logging integration is enabled, which it is by default. + """ + + def emit(self, record): + # type: (LogRecord) -> Any + with capture_internal_exceptions(): + self.format(record) + return self._emit(record) + + def _emit(self, record): + # type: (LogRecord) -> None + if not self._can_record(record): + return + + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return + + client_options = client.options + + # exc_info might be None or (None, None, None) + # + # exc_info may also be any falsy value due to Python stdlib being + # liberal with what it receives and Celery's billiard being "liberal" + # with what it sends. See + # https://github.com/getsentry/sentry-python/issues/904 + if record.exc_info and record.exc_info[0] is not None: + event, hint = event_from_exception( + record.exc_info, + client_options=client_options, + mechanism={"type": "logging", "handled": True}, + ) + elif (record.exc_info and record.exc_info[0] is None) or record.stack_info: + event = {} + hint = {} + with capture_internal_exceptions(): + event["threads"] = { + "values": [ + { + "stacktrace": current_stacktrace( + include_local_variables=client_options[ + "include_local_variables" + ], + max_value_length=client_options["max_value_length"], + ), + "crashed": False, + "current": True, + } + ] + } + else: + event = {} + hint = {} + + hint["log_record"] = record + + level = self._logging_to_event_level(record) + if level in {"debug", "info", "warning", "error", "critical", "fatal"}: + event["level"] = level # type: ignore[typeddict-item] + event["logger"] = record.name + + if ( + sys.version_info < (3, 11) + and record.name == "py.warnings" + and record.msg == "%s" + ): + # warnings module on Python 3.10 and below sets record.msg to "%s" + # and record.args[0] to the actual warning message. + # This was fixed in https://github.com/python/cpython/pull/30975. + message = record.args[0] + params = () + else: + message = record.msg + params = record.args + + event["logentry"] = { + "message": to_string(message), + "formatted": record.getMessage(), + "params": params, + } + + event["extra"] = self._extra_from_record(record) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +# Legacy name +SentryHandler = EventHandler + + +class BreadcrumbHandler(_BaseHandler): + """ + A logging handler that records breadcrumbs for each log record. + + Note that you do not have to use this class if the logging integration is enabled, which it is by default. + """ + + def emit(self, record): + # type: (LogRecord) -> Any + with capture_internal_exceptions(): + self.format(record) + return self._emit(record) + + def _emit(self, record): + # type: (LogRecord) -> None + if not self._can_record(record): + return + + sentry_sdk_alpha.add_breadcrumb( + self._breadcrumb_from_record(record), hint={"log_record": record} + ) + + def _breadcrumb_from_record(self, record): + # type: (LogRecord) -> Dict[str, Any] + return { + "type": "log", + "level": self._logging_to_event_level(record), + "category": record.name, + "message": record.message, + "timestamp": datetime.fromtimestamp(record.created, timezone.utc), + "data": self._extra_from_record(record), + } + + +def _python_level_to_otel(record_level): + # type: (int) -> Tuple[int, str] + for py_level, otel_severity_number, otel_severity_text in [ + (50, 21, "fatal"), + (40, 17, "error"), + (30, 13, "warn"), + (20, 9, "info"), + (10, 5, "debug"), + (5, 1, "trace"), + ]: + if record_level >= py_level: + return otel_severity_number, otel_severity_text + return 0, "default" + + +class SentryLogsHandler(_BaseHandler): + """ + A logging handler that records Sentry logs for each Python log record. + + Note that you do not have to use this class if the logging integration is enabled, which it is by default. + """ + + def emit(self, record): + # type: (LogRecord) -> Any + with capture_internal_exceptions(): + self.format(record) + if not self._can_record(record): + return + + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return + + if not client.options["_experiments"].get("enable_logs", False): + return + + SentryLogsHandler._capture_log_from_record(client, record) + + @staticmethod + def _capture_log_from_record(client, record): + # type: (BaseClient, LogRecord) -> None + scope = sentry_sdk_alpha.get_current_scope() + otel_severity_number, otel_severity_text = _python_level_to_otel(record.levelno) + project_root = client.options["project_root"] + attrs = { + "sentry.origin": "auto.logger.log", + } # type: dict[str, str | bool | float | int] + if isinstance(record.msg, str): + attrs["sentry.message.template"] = record.msg + if record.args is not None: + if isinstance(record.args, tuple): + for i, arg in enumerate(record.args): + attrs[f"sentry.message.parameters.{i}"] = ( + arg + if isinstance(arg, str) + or isinstance(arg, float) + or isinstance(arg, int) + or isinstance(arg, bool) + else safe_repr(arg) + ) + if record.lineno: + attrs["code.line.number"] = record.lineno + if record.pathname: + if project_root is not None and record.pathname.startswith(project_root): + attrs["code.file.path"] = record.pathname[len(project_root) + 1 :] + else: + attrs["code.file.path"] = record.pathname + if record.funcName: + attrs["code.function.name"] = record.funcName + + if record.thread: + attrs["thread.id"] = record.thread + if record.threadName: + attrs["thread.name"] = record.threadName + + if record.process: + attrs["process.pid"] = record.process + if record.processName: + attrs["process.executable.name"] = record.processName + if record.name: + attrs["logger.name"] = record.name + + # noinspection PyProtectedMember + client._capture_experimental_log( + scope, + { + "severity_text": otel_severity_text, + "severity_number": otel_severity_number, + "body": record.message, + "attributes": attrs, + "time_unix_nano": int(record.created * 1e9), + "trace_id": None, + }, + ) diff --git a/src/sentry_sdk_alpha/integrations/loguru.py b/src/sentry_sdk_alpha/integrations/loguru.py new file mode 100644 index 00000000000000..3cbec227e698a4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/loguru.py @@ -0,0 +1,130 @@ +import enum + +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.integrations.logging import ( + BreadcrumbHandler, + EventHandler, + _BaseHandler, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from logging import LogRecord + from typing import Optional, Tuple, Any + +try: + import loguru + from loguru import logger + from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT +except ImportError: + raise DidNotEnable("LOGURU is not installed") + + +class LoggingLevels(enum.IntEnum): + TRACE = 5 + DEBUG = 10 + INFO = 20 + SUCCESS = 25 + WARNING = 30 + ERROR = 40 + CRITICAL = 50 + + +SENTRY_LEVEL_FROM_LOGURU_LEVEL = { + "TRACE": "DEBUG", + "DEBUG": "DEBUG", + "INFO": "INFO", + "SUCCESS": "INFO", + "WARNING": "WARNING", + "ERROR": "ERROR", + "CRITICAL": "CRITICAL", +} + +DEFAULT_LEVEL = LoggingLevels.INFO.value +DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value +# We need to save the handlers to be able to remove them later +# in tests (they call `LoguruIntegration.__init__` multiple times, +# and we can't use `setup_once` because it's called before +# than we get configuration). +_ADDED_HANDLERS = (None, None) # type: Tuple[Optional[int], Optional[int]] + + +class LoguruIntegration(Integration): + identifier = "loguru" + + def __init__( + self, + level=DEFAULT_LEVEL, + event_level=DEFAULT_EVENT_LEVEL, + breadcrumb_format=DEFAULT_FORMAT, + event_format=DEFAULT_FORMAT, + ): + # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None + global _ADDED_HANDLERS + breadcrumb_handler, event_handler = _ADDED_HANDLERS + + if breadcrumb_handler is not None: + logger.remove(breadcrumb_handler) + breadcrumb_handler = None + if event_handler is not None: + logger.remove(event_handler) + event_handler = None + + if level is not None: + breadcrumb_handler = logger.add( + LoguruBreadcrumbHandler(level=level), + level=level, + format=breadcrumb_format, + ) + + if event_level is not None: + event_handler = logger.add( + LoguruEventHandler(level=event_level), + level=event_level, + format=event_format, + ) + + _ADDED_HANDLERS = (breadcrumb_handler, event_handler) + + @staticmethod + def setup_once(): + # type: () -> None + pass # we do everything in __init__ + + +class _LoguruBaseHandler(_BaseHandler): + def _logging_to_event_level(self, record): + # type: (LogRecord) -> str + try: + return SENTRY_LEVEL_FROM_LOGURU_LEVEL[ + LoggingLevels(record.levelno).name + ].lower() + except (ValueError, KeyError): + return record.levelname.lower() if record.levelname else "" + + +class LoguruEventHandler(_LoguruBaseHandler, EventHandler): + """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names.""" + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + if kwargs.get("level"): + kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get( + kwargs.get("level", ""), DEFAULT_LEVEL + ) + + super().__init__(*args, **kwargs) + + +class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler): + """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names.""" + + def __init__(self, *args, **kwargs): + # type: (*Any, **Any) -> None + if kwargs.get("level"): + kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get( + kwargs.get("level", ""), DEFAULT_LEVEL + ) + + super().__init__(*args, **kwargs) diff --git a/src/sentry_sdk_alpha/integrations/modules.py b/src/sentry_sdk_alpha/integrations/modules.py new file mode 100644 index 00000000000000..6109613299d592 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/modules.py @@ -0,0 +1,29 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import add_global_event_processor +from sentry_sdk_alpha.utils import _get_installed_modules + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from sentry_sdk_alpha._types import Event + + +class ModulesIntegration(Integration): + identifier = "modules" + + @staticmethod + def setup_once(): + # type: () -> None + @add_global_event_processor + def processor(event, hint): + # type: (Event, Any) -> Event + if event.get("type") == "transaction": + return event + + if sentry_sdk_alpha.get_client().get_integration(ModulesIntegration) is None: + return event + + event["modules"] = _get_installed_modules() + return event diff --git a/src/sentry_sdk_alpha/integrations/openai.py b/src/sentry_sdk_alpha/integrations/openai.py new file mode 100644 index 00000000000000..87c112de74cd38 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/openai.py @@ -0,0 +1,431 @@ +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha import consts +from sentry_sdk_alpha.ai.monitoring import record_token_usage +from sentry_sdk_alpha.ai.utils import set_data_normalized +from sentry_sdk_alpha.consts import SPANDATA +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Iterable, List, Optional, Callable, AsyncIterator, Iterator + from sentry_sdk_alpha.tracing import Span + +try: + from openai.resources.chat.completions import Completions, AsyncCompletions + from openai.resources import Embeddings, AsyncEmbeddings + + if TYPE_CHECKING: + from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk +except ImportError: + raise DidNotEnable("OpenAI not installed") + + +class OpenAIIntegration(Integration): + identifier = "openai" + origin = f"auto.ai.{identifier}" + + def __init__(self, include_prompts=True, tiktoken_encoding_name=None): + # type: (OpenAIIntegration, bool, Optional[str]) -> None + self.include_prompts = include_prompts + + self.tiktoken_encoding = None + if tiktoken_encoding_name is not None: + import tiktoken # type: ignore + + self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name) + + @staticmethod + def setup_once(): + # type: () -> None + Completions.create = _wrap_chat_completion_create(Completions.create) + Embeddings.create = _wrap_embeddings_create(Embeddings.create) + + AsyncCompletions.create = _wrap_async_chat_completion_create( + AsyncCompletions.create + ) + AsyncEmbeddings.create = _wrap_async_embeddings_create(AsyncEmbeddings.create) + + def count_tokens(self, s): + # type: (OpenAIIntegration, str) -> int + if self.tiktoken_encoding is not None: + return len(self.tiktoken_encoding.encode_ordinary(s)) + return 0 + + +def _capture_exception(exc): + # type: (Any) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "openai", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _calculate_chat_completion_usage( + messages, response, span, streaming_message_responses, count_tokens +): + # type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]], Callable[..., Any]) -> None + completion_tokens = 0 # type: Optional[int] + prompt_tokens = 0 # type: Optional[int] + total_tokens = 0 # type: Optional[int] + if hasattr(response, "usage"): + if hasattr(response.usage, "completion_tokens") and isinstance( + response.usage.completion_tokens, int + ): + completion_tokens = response.usage.completion_tokens + if hasattr(response.usage, "prompt_tokens") and isinstance( + response.usage.prompt_tokens, int + ): + prompt_tokens = response.usage.prompt_tokens + if hasattr(response.usage, "total_tokens") and isinstance( + response.usage.total_tokens, int + ): + total_tokens = response.usage.total_tokens + + if prompt_tokens == 0: + for message in messages: + if "content" in message: + prompt_tokens += count_tokens(message["content"]) + + if completion_tokens == 0: + if streaming_message_responses is not None: + for message in streaming_message_responses: + completion_tokens += count_tokens(message) + elif hasattr(response, "choices"): + for choice in response.choices: + if hasattr(choice, "message"): + completion_tokens += count_tokens(choice.message) + + if prompt_tokens == 0: + prompt_tokens = None + if completion_tokens == 0: + completion_tokens = None + if total_tokens == 0: + total_tokens = None + record_token_usage(span, prompt_tokens, completion_tokens, total_tokens) + + +def _new_chat_completion_common(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None: + return f(*args, **kwargs) + + if "messages" not in kwargs: + # invalid call (in all versions of openai), let it return error + return f(*args, **kwargs) + + try: + iter(kwargs["messages"]) + except TypeError: + # invalid call (in all versions), messages must be iterable + return f(*args, **kwargs) + + kwargs["messages"] = list(kwargs["messages"]) + messages = kwargs["messages"] + model = kwargs.get("model") + streaming = kwargs.get("stream") + + span = sentry_sdk_alpha.start_span( + op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, + name="Chat Completion", + origin=OpenAIIntegration.origin, + only_if_parent=True, + ) + span.__enter__() + + res = yield f, args, kwargs + + with capture_internal_exceptions(): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, messages) + + set_data_normalized(span, SPANDATA.AI_MODEL_ID, model) + set_data_normalized(span, SPANDATA.AI_STREAMING, streaming) + + if hasattr(res, "choices"): + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, + SPANDATA.AI_RESPONSES, + list(map(lambda x: x.message, res.choices)), + ) + _calculate_chat_completion_usage( + messages, res, span, None, integration.count_tokens + ) + span.__exit__(None, None, None) + elif hasattr(res, "_iterator"): + data_buf: list[list[str]] = [] # one for each choice + + old_iterator = res._iterator + + def new_iterator(): + # type: () -> Iterator[ChatCompletionChunk] + with capture_internal_exceptions(): + for x in old_iterator: + if hasattr(x, "choices"): + choice_index = 0 + for choice in x.choices: + if hasattr(choice, "delta") and hasattr( + choice.delta, "content" + ): + content = choice.delta.content + if len(data_buf) <= choice_index: + data_buf.append([]) + data_buf[choice_index].append(content or "") + choice_index += 1 + yield x + if len(data_buf) > 0: + all_responses = list( + map(lambda chunk: "".join(chunk), data_buf) + ) + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, SPANDATA.AI_RESPONSES, all_responses + ) + _calculate_chat_completion_usage( + messages, + res, + span, + all_responses, + integration.count_tokens, + ) + span.__exit__(None, None, None) + + async def new_iterator_async(): + # type: () -> AsyncIterator[ChatCompletionChunk] + with capture_internal_exceptions(): + async for x in old_iterator: + if hasattr(x, "choices"): + choice_index = 0 + for choice in x.choices: + if hasattr(choice, "delta") and hasattr( + choice.delta, "content" + ): + content = choice.delta.content + if len(data_buf) <= choice_index: + data_buf.append([]) + data_buf[choice_index].append(content or "") + choice_index += 1 + yield x + if len(data_buf) > 0: + all_responses = list( + map(lambda chunk: "".join(chunk), data_buf) + ) + if should_send_default_pii() and integration.include_prompts: + set_data_normalized( + span, SPANDATA.AI_RESPONSES, all_responses + ) + _calculate_chat_completion_usage( + messages, + res, + span, + all_responses, + integration.count_tokens, + ) + span.__exit__(None, None, None) + + if str(type(res._iterator)) == "": + res._iterator = new_iterator_async() + else: + res._iterator = new_iterator() + + else: + set_data_normalized(span, "unknown_response", True) + span.__exit__(None, None, None) + return res + + +def _wrap_chat_completion_create(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + def _execute_sync(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _new_chat_completion_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return e.value + + try: + try: + result = f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + raise e from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + def _sentry_patched_create_sync(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None or "messages" not in kwargs: + # no "messages" means invalid call (in all versions of openai), let it return error + return f(*args, **kwargs) + + return _execute_sync(f, *args, **kwargs) + + return _sentry_patched_create_sync + + +def _wrap_async_chat_completion_create(f): + # type: (Callable[..., Any]) -> Callable[..., Any] + async def _execute_async(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _new_chat_completion_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return await e.value + + try: + try: + result = await f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + raise e from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + async def _sentry_patched_create_async(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None or "messages" not in kwargs: + # no "messages" means invalid call (in all versions of openai), let it return error + return await f(*args, **kwargs) + + return await _execute_async(f, *args, **kwargs) + + return _sentry_patched_create_async + + +def _new_embeddings_create_common(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None: + return f(*args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=consts.OP.OPENAI_EMBEDDINGS_CREATE, + description="OpenAI Embedding Creation", + origin=OpenAIIntegration.origin, + only_if_parent=True, + ) as span: + if "input" in kwargs and ( + should_send_default_pii() and integration.include_prompts + ): + if isinstance(kwargs["input"], str): + set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, [kwargs["input"]]) + elif ( + isinstance(kwargs["input"], list) + and len(kwargs["input"]) > 0 + and isinstance(kwargs["input"][0], str) + ): + set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, kwargs["input"]) + if "model" in kwargs: + set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"]) + + response = yield f, args, kwargs + + prompt_tokens = 0 + total_tokens = 0 + if hasattr(response, "usage"): + if hasattr(response.usage, "prompt_tokens") and isinstance( + response.usage.prompt_tokens, int + ): + prompt_tokens = response.usage.prompt_tokens + if hasattr(response.usage, "total_tokens") and isinstance( + response.usage.total_tokens, int + ): + total_tokens = response.usage.total_tokens + + if prompt_tokens == 0: + prompt_tokens = integration.count_tokens(kwargs["input"] or "") + + record_token_usage(span, prompt_tokens, None, total_tokens or prompt_tokens) + + return response + + +def _wrap_embeddings_create(f): + # type: (Any) -> Any + def _execute_sync(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _new_embeddings_create_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return e.value + + try: + try: + result = f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + raise e from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + def _sentry_patched_create_sync(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None: + return f(*args, **kwargs) + + return _execute_sync(f, *args, **kwargs) + + return _sentry_patched_create_sync + + +def _wrap_async_embeddings_create(f): + # type: (Any) -> Any + async def _execute_async(f, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + gen = _new_embeddings_create_common(f, *args, **kwargs) + + try: + f, args, kwargs = next(gen) + except StopIteration as e: + return await e.value + + try: + try: + result = await f(*args, **kwargs) + except Exception as e: + _capture_exception(e) + raise e from None + + return gen.send(result) + except StopIteration as e: + return e.value + + @wraps(f) + async def _sentry_patched_create_async(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration) + if integration is None: + return await f(*args, **kwargs) + + return await _execute_async(f, *args, **kwargs) + + return _sentry_patched_create_async diff --git a/src/sentry_sdk_alpha/integrations/openfeature.py b/src/sentry_sdk_alpha/integrations/openfeature.py new file mode 100644 index 00000000000000..26716fea32c5cd --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/openfeature.py @@ -0,0 +1,37 @@ +from typing import TYPE_CHECKING + +from sentry_sdk_alpha.feature_flags import add_feature_flag +from sentry_sdk_alpha.integrations import DidNotEnable, Integration + +try: + from openfeature import api + from openfeature.hook import Hook + + if TYPE_CHECKING: + from openfeature.flag_evaluation import FlagEvaluationDetails + from openfeature.hook import HookContext, HookHints +except ImportError: + raise DidNotEnable("OpenFeature is not installed") + + +class OpenFeatureIntegration(Integration): + identifier = "openfeature" + + @staticmethod + def setup_once(): + # type: () -> None + # Register the hook within the global openfeature hooks list. + api.add_hooks(hooks=[OpenFeatureHook()]) + + +class OpenFeatureHook(Hook): + + def after(self, hook_context, details, hints): + # type: (HookContext, FlagEvaluationDetails[bool], HookHints) -> None + if isinstance(details.value, bool): + add_feature_flag(details.flag_key, details.value) + + def error(self, hook_context, exception, hints): + # type: (HookContext, Exception, HookHints) -> None + if isinstance(hook_context.default_value, bool): + add_feature_flag(hook_context.flag_key, hook_context.default_value) diff --git a/src/sentry_sdk_alpha/integrations/pure_eval.py b/src/sentry_sdk_alpha/integrations/pure_eval.py new file mode 100644 index 00000000000000..0b41efa57bb418 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/pure_eval.py @@ -0,0 +1,139 @@ +import ast + +import sentry_sdk_alpha +from sentry_sdk_alpha import serializer +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.scope import add_global_event_processor +from sentry_sdk_alpha.utils import walk_exception_chain, iter_stacks + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Dict, Any, Tuple, List + from types import FrameType + + from sentry_sdk_alpha._types import Event, Hint + +try: + import executing +except ImportError: + raise DidNotEnable("executing is not installed") + +try: + import pure_eval +except ImportError: + raise DidNotEnable("pure_eval is not installed") + +try: + # Used implicitly, just testing it's available + import asttokens # noqa +except ImportError: + raise DidNotEnable("asttokens is not installed") + + +class PureEvalIntegration(Integration): + identifier = "pure_eval" + + @staticmethod + def setup_once(): + # type: () -> None + + @add_global_event_processor + def add_executing_info(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + if sentry_sdk_alpha.get_client().get_integration(PureEvalIntegration) is None: + return event + + if hint is None: + return event + + exc_info = hint.get("exc_info", None) + + if exc_info is None: + return event + + exception = event.get("exception", None) + + if exception is None: + return event + + values = exception.get("values", None) + + if values is None: + return event + + for exception, (_exc_type, _exc_value, exc_tb) in zip( + reversed(values), walk_exception_chain(exc_info) + ): + sentry_frames = [ + frame + for frame in exception.get("stacktrace", {}).get("frames", []) + if frame.get("function") + ] + tbs = list(iter_stacks(exc_tb)) + if len(sentry_frames) != len(tbs): + continue + + for sentry_frame, tb in zip(sentry_frames, tbs): + sentry_frame["vars"] = ( + pure_eval_frame(tb.tb_frame) or sentry_frame["vars"] + ) + return event + + +def pure_eval_frame(frame): + # type: (FrameType) -> Dict[str, Any] + source = executing.Source.for_frame(frame) + if not source.tree: + return {} + + statements = source.statements_at_line(frame.f_lineno) + if not statements: + return {} + + scope = stmt = list(statements)[0] + while True: + # Get the parent first in case the original statement is already + # a function definition, e.g. if we're calling a decorator + # In that case we still want the surrounding scope, not that function + scope = scope.parent + if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)): + break + + evaluator = pure_eval.Evaluator.from_frame(frame) + expressions = evaluator.interesting_expressions_grouped(scope) + + def closeness(expression): + # type: (Tuple[List[Any], Any]) -> Tuple[int, int] + # Prioritise expressions with a node closer to the statement executed + # without being after that statement + # A higher return value is better - the expression will appear + # earlier in the list of values and is less likely to be trimmed + nodes, _value = expression + + def start(n): + # type: (ast.expr) -> Tuple[int, int] + return (n.lineno, n.col_offset) + + nodes_before_stmt = [ + node for node in nodes if start(node) < stmt.last_token.end # type: ignore + ] + if nodes_before_stmt: + # The position of the last node before or in the statement + return max(start(node) for node in nodes_before_stmt) + else: + # The position of the first node after the statement + # Negative means it's always lower priority than nodes that come before + # Less negative means closer to the statement and higher priority + lineno, col_offset = min(start(node) for node in nodes) + return (-lineno, -col_offset) + + # This adds the first_token and last_token attributes to nodes + atok = source.asttokens() + + expressions.sort(key=closeness, reverse=True) + vars = { + atok.get_text(nodes[0]): value + for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH] + } + return serializer.serialize(vars, is_vars=True) diff --git a/src/sentry_sdk_alpha/integrations/pymongo.py b/src/sentry_sdk_alpha/integrations/pymongo.py new file mode 100644 index 00000000000000..d9f169af249e17 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/pymongo.py @@ -0,0 +1,207 @@ +import copy + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SPANSTATUS, SPANDATA, OP +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.utils import capture_internal_exceptions, _serialize_span_attribute + +try: + from pymongo import monitoring +except ImportError: + raise DidNotEnable("Pymongo not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Dict, Union + + from pymongo.monitoring import ( + CommandFailedEvent, + CommandStartedEvent, + CommandSucceededEvent, + ) + + +SAFE_COMMAND_ATTRIBUTES = [ + "insert", + "ordered", + "find", + "limit", + "singleBatch", + "aggregate", + "createIndexes", + "indexes", + "delete", + "findAndModify", + "renameCollection", + "to", + "drop", +] + + +def _strip_pii(command): + # type: (Dict[str, Any]) -> Dict[str, Any] + for key in command: + is_safe_field = key in SAFE_COMMAND_ATTRIBUTES + if is_safe_field: + # Skip if safe key + continue + + update_db_command = key == "update" and "findAndModify" not in command + if update_db_command: + # Also skip "update" db command because it is save. + # There is also an "update" key in the "findAndModify" command, which is NOT safe! + continue + + # Special stripping for documents + is_document = key == "documents" + if is_document: + for doc in command[key]: + for doc_key in doc: + doc[doc_key] = "%s" + continue + + # Special stripping for dict style fields + is_dict_field = key in ["filter", "query", "update"] + if is_dict_field: + for item_key in command[key]: + command[key][item_key] = "%s" + continue + + # For pipeline fields strip the `$match` dict + is_pipeline_field = key == "pipeline" + if is_pipeline_field: + for pipeline in command[key]: + for match_key in pipeline["$match"] if "$match" in pipeline else []: + pipeline["$match"][match_key] = "%s" + continue + + # Default stripping + command[key] = "%s" + + return command + + +def _get_db_data(event): + # type: (Any) -> Dict[str, Any] + data = {} + + data[SPANDATA.DB_SYSTEM] = "mongodb" + + db_name = event.database_name + if db_name is not None: + data[SPANDATA.DB_NAME] = db_name + + server_address = event.connection_id[0] + if server_address is not None: + data[SPANDATA.SERVER_ADDRESS] = server_address + + server_port = event.connection_id[1] + if server_port is not None: + data[SPANDATA.SERVER_PORT] = server_port + + return data + + +class CommandTracer(monitoring.CommandListener): + def __init__(self): + # type: () -> None + self._ongoing_operations = {} # type: Dict[int, Span] + + def _operation_key(self, event): + # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int + return event.request_id + + def started(self, event): + # type: (CommandStartedEvent) -> None + if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None: + return + + with capture_internal_exceptions(): + command = dict(copy.deepcopy(event.command)) + + command.pop("$db", None) + command.pop("$clusterTime", None) + command.pop("$signature", None) + + data = { + SPANDATA.DB_NAME: event.database_name, + SPANDATA.DB_SYSTEM: "mongodb", + SPANDATA.DB_OPERATION: event.command_name, + SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name), + } + + try: + data["net.peer.name"] = event.connection_id[0] + data["net.peer.port"] = str(event.connection_id[1]) + except TypeError: + pass + + try: + lsid = command.pop("lsid")["id"] + data["session_id"] = str(lsid) + except KeyError: + pass + + if not should_send_default_pii(): + command = _strip_pii(command) + + query = _serialize_span_attribute(command) + span = sentry_sdk_alpha.start_span( + op=OP.DB, + name=query, + origin=PyMongoIntegration.origin, + only_if_parent=True, + ) + + with capture_internal_exceptions(): + sentry_sdk_alpha.add_breadcrumb( + message=query, category="query", type=OP.DB, data=data + ) + + for key, value in data.items(): + span.set_attribute(key, value) + + for key, value in _get_db_data(event).items(): + span.set_attribute(key, value) + + span.set_attribute("operation_id", event.operation_id) + span.set_attribute("request_id", event.request_id) + + self._ongoing_operations[self._operation_key(event)] = span.__enter__() + + def failed(self, event): + # type: (CommandFailedEvent) -> None + if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None: + return + + try: + span = self._ongoing_operations.pop(self._operation_key(event)) + span.set_status(SPANSTATUS.INTERNAL_ERROR) + span.__exit__(None, None, None) + except KeyError: + return + + def succeeded(self, event): + # type: (CommandSucceededEvent) -> None + if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None: + return + + try: + span = self._ongoing_operations.pop(self._operation_key(event)) + span.set_status(SPANSTATUS.OK) + span.__exit__(None, None, None) + except KeyError: + pass + + +class PyMongoIntegration(Integration): + identifier = "pymongo" + origin = f"auto.db.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + monitoring.register(CommandTracer()) diff --git a/src/sentry_sdk_alpha/integrations/pyramid.py b/src/sentry_sdk_alpha/integrations/pyramid.py new file mode 100644 index 00000000000000..941aa40ec328f4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/pyramid.py @@ -0,0 +1,229 @@ +import functools +import os +import sys +import weakref + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + reraise, +) + +try: + from pyramid.httpexceptions import HTTPException + from pyramid.request import Request +except ImportError: + raise DidNotEnable("Pyramid not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pyramid.response import Response + from typing import Any + from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse + from typing import Callable + from typing import Dict + from typing import Optional + from webob.cookies import RequestCookies + from webob.request import _FieldStorageWithFile + + from sentry_sdk_alpha.utils import ExcInfo + from sentry_sdk_alpha._types import Event, EventProcessor + + +if getattr(Request, "authenticated_userid", None): + + def authenticated_userid(request): + # type: (Request) -> Optional[Any] + return request.authenticated_userid + +else: + # bw-compat for pyramid < 1.5 + from pyramid.security import authenticated_userid # type: ignore + + +TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern") + + +class PyramidIntegration(Integration): + identifier = "pyramid" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__(self, transaction_style="route_name"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + + @staticmethod + def setup_once(): + # type: () -> None + from pyramid import router + + old_call_view = router._call_view + + @functools.wraps(old_call_view) + def sentry_patched_call_view(registry, request, *args, **kwargs): + # type: (Any, Request, *Any, **Any) -> Response + integration = sentry_sdk_alpha.get_client().get_integration(PyramidIntegration) + if integration is None: + return old_call_view(registry, request, *args, **kwargs) + + _set_transaction_name_and_source( + sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request + ) + scope = sentry_sdk_alpha.get_isolation_scope() + scope.add_event_processor( + _make_event_processor(weakref.ref(request), integration) + ) + + return old_call_view(registry, request, *args, **kwargs) + + router._call_view = sentry_patched_call_view + + if hasattr(Request, "invoke_exception_view"): + old_invoke_exception_view = Request.invoke_exception_view + + def sentry_patched_invoke_exception_view(self, *args, **kwargs): + # type: (Request, *Any, **Any) -> Any + rv = old_invoke_exception_view(self, *args, **kwargs) + + if ( + self.exc_info + and all(self.exc_info) + and rv.status_int == 500 + and sentry_sdk_alpha.get_client().get_integration(PyramidIntegration) + is not None + ): + _capture_exception(self.exc_info) + + return rv + + Request.invoke_exception_view = sentry_patched_invoke_exception_view + + old_wsgi_call = router.Router.__call__ + + @ensure_integration_enabled(PyramidIntegration, old_wsgi_call) + def sentry_patched_wsgi_call(self, environ, start_response): + # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse + def sentry_patched_inner_wsgi_call(environ, start_response): + # type: (Dict[str, Any], Callable[..., Any]) -> Any + try: + return old_wsgi_call(self, environ, start_response) + except Exception: + einfo = sys.exc_info() + _capture_exception(einfo) + reraise(*einfo) + + middleware = SentryWsgiMiddleware( + sentry_patched_inner_wsgi_call, + span_origin=PyramidIntegration.origin, + ) + return middleware(environ, start_response) + + router.Router.__call__ = sentry_patched_wsgi_call + + +@ensure_integration_enabled(PyramidIntegration) +def _capture_exception(exc_info): + # type: (ExcInfo) -> None + if exc_info[0] is None or issubclass(exc_info[0], HTTPException): + return + + event, hint = event_from_exception( + exc_info, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "pyramid", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, Request) -> None + try: + name_for_style = { + "route_name": request.matched_route.name, + "route_pattern": request.matched_route.pattern, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + +class PyramidRequestExtractor(RequestExtractor): + def url(self): + # type: () -> str + return self.request.path_url + + def env(self): + # type: () -> Dict[str, str] + return self.request.environ + + def cookies(self): + # type: () -> RequestCookies + return self.request.cookies + + def raw_data(self): + # type: () -> str + return self.request.text + + def form(self): + # type: () -> Dict[str, str] + return { + key: value + for key, value in self.request.POST.items() + if not getattr(value, "filename", None) + } + + def files(self): + # type: () -> Dict[str, _FieldStorageWithFile] + return { + key: value + for key, value in self.request.POST.items() + if getattr(value, "filename", None) + } + + def size_of_file(self, postdata): + # type: (_FieldStorageWithFile) -> int + file = postdata.file + try: + return os.fstat(file.fileno()).st_size + except Exception: + return 0 + + +def _make_event_processor(weak_request, integration): + # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor + def pyramid_event_processor(event, hint): + # type: (Event, Dict[str, Any]) -> Event + request = weak_request() + if request is None: + return event + + with capture_internal_exceptions(): + PyramidRequestExtractor(request).extract_into_event(event) + + if should_send_default_pii(): + with capture_internal_exceptions(): + user_info = event.setdefault("user", {}) + user_info.setdefault("id", authenticated_userid(request)) + + return event + + return pyramid_event_processor diff --git a/src/sentry_sdk_alpha/integrations/quart.py b/src/sentry_sdk_alpha/integrations/quart.py new file mode 100644 index 00000000000000..a9405a61a81519 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/quart.py @@ -0,0 +1,237 @@ +import asyncio +import inspect +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.integrations._wsgi_common import _filter_headers +from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, +) +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Union + + from sentry_sdk_alpha._types import Event, EventProcessor + +try: + import quart_auth # type: ignore +except ImportError: + quart_auth = None + +try: + from quart import ( # type: ignore + has_request_context, + has_websocket_context, + Request, + Quart, + request, + websocket, + ) + from quart.signals import ( # type: ignore + got_background_exception, + got_request_exception, + got_websocket_exception, + request_started, + websocket_started, + ) +except ImportError: + raise DidNotEnable("Quart is not installed") +else: + # Quart 0.19 is based on Flask and hence no longer has a Scaffold + try: + from quart.scaffold import Scaffold # type: ignore + except ImportError: + from flask.sansio.scaffold import Scaffold # type: ignore + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class QuartIntegration(Integration): + identifier = "quart" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__(self, transaction_style="endpoint"): + # type: (str) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + + @staticmethod + def setup_once(): + # type: () -> None + + request_started.connect(_request_websocket_started) + websocket_started.connect(_request_websocket_started) + got_background_exception.connect(_capture_exception) + got_request_exception.connect(_capture_exception) + got_websocket_exception.connect(_capture_exception) + + patch_asgi_app() + patch_scaffold_route() + + +def patch_asgi_app(): + # type: () -> None + old_app = Quart.__call__ + + async def sentry_patched_asgi_app(self, scope, receive, send): + # type: (Any, Any, Any, Any) -> Any + if sentry_sdk_alpha.get_client().get_integration(QuartIntegration) is None: + return await old_app(self, scope, receive, send) + + middleware = SentryAsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + span_origin=QuartIntegration.origin, + ) + middleware.__call__ = middleware._run_asgi3 + return await middleware(scope, receive, send) + + Quart.__call__ = sentry_patched_asgi_app + + +def patch_scaffold_route(): + # type: () -> None + old_route = Scaffold.route + + def _sentry_route(*args, **kwargs): + # type: (*Any, **Any) -> Any + old_decorator = old_route(*args, **kwargs) + + def decorator(old_func): + # type: (Any) -> Any + + if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction( + old_func + ): + + @wraps(old_func) + @ensure_integration_enabled(QuartIntegration, old_func) + def _sentry_func(*args, **kwargs): + # type: (*Any, **Any) -> Any + current_scope = sentry_sdk_alpha.get_current_scope() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + if sentry_scope.profile is not None: + sentry_scope.profile.update_active_thread_id() + + return old_func(*args, **kwargs) + + return old_decorator(_sentry_func) + + return old_decorator(old_func) + + return decorator + + Scaffold.route = _sentry_route + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, Request) -> None + + try: + name_for_style = { + "url": request.url_rule.rule, + "endpoint": request.url_rule.endpoint, + } + scope.set_transaction_name( + name_for_style[transaction_style], + source=SOURCE_FOR_STYLE[transaction_style], + ) + except Exception: + pass + + +async def _request_websocket_started(app, **kwargs): + # type: (Quart, **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(QuartIntegration) + if integration is None: + return + + if has_request_context(): + request_websocket = request._get_current_object() + if has_websocket_context(): + request_websocket = websocket._get_current_object() + + # Set the transaction name here, but rely on ASGI middleware + # to actually start the transaction + _set_transaction_name_and_source( + sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request_websocket + ) + + scope = sentry_sdk_alpha.get_isolation_scope() + evt_processor = _make_request_event_processor(app, request_websocket, integration) + scope.add_event_processor(evt_processor) + + +def _make_request_event_processor(app, request, integration): + # type: (Quart, Request, QuartIntegration) -> EventProcessor + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + # if the request is gone we are fine not logging the data from + # it. This might happen if the processor is pushed away to + # another thread. + if request is None: + return event + + with capture_internal_exceptions(): + # TODO: Figure out what to do with request body. Methods on request + # are async, but event processors are not. + + request_info = event.setdefault("request", {}) + request_info["url"] = request.url + request_info["query_string"] = request.query_string + request_info["method"] = request.method + request_info["headers"] = _filter_headers(dict(request.headers)) + + if should_send_default_pii(): + request_info["env"] = {"REMOTE_ADDR": request.access_route[0]} + _add_user_to_event(event) + + return event + + return inner + + +async def _capture_exception(sender, exception, **kwargs): + # type: (Quart, Union[ValueError, BaseException], **Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(QuartIntegration) + if integration is None: + return + + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "quart", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _add_user_to_event(event): + # type: (Event) -> None + if quart_auth is None: + return + + user = quart_auth.current_user + if user is None: + return + + with capture_internal_exceptions(): + user_info = event.setdefault("user", {}) + + user_info["id"] = quart_auth.current_user._auth_id diff --git a/src/sentry_sdk_alpha/integrations/ray.py b/src/sentry_sdk_alpha/integrations/ray.py new file mode 100644 index 00000000000000..03bcd9fb15708e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/ray.py @@ -0,0 +1,147 @@ +import inspect +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANSTATUS +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + event_from_exception, + logger, + package_version, + qualname_from_function, + reraise, +) + +try: + import ray # type: ignore[import-not-found] +except ImportError: + raise DidNotEnable("Ray not installed.") +import functools + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any, Optional + from sentry_sdk_alpha.utils import ExcInfo + +DEFAULT_TRANSACTION_NAME = "unknown Ray function" + + +def _check_sentry_initialized(): + # type: () -> None + if sentry_sdk_alpha.get_client().is_active(): + return + + logger.debug( + "[Tracing] Sentry not initialized in ray cluster worker, performance data will be discarded." + ) + + +def _patch_ray_remote(): + # type: () -> None + old_remote = ray.remote + + @functools.wraps(old_remote) + def new_remote(f, *args, **kwargs): + # type: (Callable[..., Any], *Any, **Any) -> Callable[..., Any] + if inspect.isclass(f): + # Ray Actors + # (https://docs.ray.io/en/latest/ray-core/actors.html) + # are not supported + # (Only Ray Tasks are supported) + return old_remote(f, *args, *kwargs) + + def _f(*f_args, _tracing=None, **f_kwargs): + # type: (Any, Optional[dict[str, Any]], Any) -> Any + """ + Ray Worker + """ + _check_sentry_initialized() + + root_span_name = qualname_from_function(f) or DEFAULT_TRANSACTION_NAME + sentry_sdk_alpha.get_current_scope().set_transaction_name( + root_span_name, + source=TransactionSource.TASK, + ) + with sentry_sdk_alpha.continue_trace(_tracing or {}): + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_TASK_RAY, + name=root_span_name, + origin=RayIntegration.origin, + source=TransactionSource.TASK, + ) as root_span: + try: + result = f(*f_args, **f_kwargs) + root_span.set_status(SPANSTATUS.OK) + except Exception: + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result + + rv = old_remote(_f, *args, *kwargs) + old_remote_method = rv.remote + + def _remote_method_with_header_propagation(*args, **kwargs): + # type: (*Any, **Any) -> Any + """ + Ray Client + """ + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_SUBMIT_RAY, + name=qualname_from_function(f), + origin=RayIntegration.origin, + only_if_parent=True, + ) as span: + tracing = { + k: v + for k, v in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers() + } + try: + result = old_remote_method(*args, **kwargs, _tracing=tracing) + span.set_status(SPANSTATUS.OK) + except Exception: + span.set_status(SPANSTATUS.INTERNAL_ERROR) + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + + return result + + rv.remote = _remote_method_with_header_propagation + + return rv + + ray.remote = new_remote + + +def _capture_exception(exc_info, **kwargs): + # type: (ExcInfo, **Any) -> None + client = sentry_sdk_alpha.get_client() + + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={ + "handled": False, + "type": RayIntegration.identifier, + }, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + +class RayIntegration(Integration): + identifier = "ray" + origin = f"auto.queue.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("ray") + _check_minimum_version(RayIntegration, version) + + _patch_ray_remote() diff --git a/src/sentry_sdk_alpha/integrations/redis/__init__.py b/src/sentry_sdk_alpha/integrations/redis/__init__.py new file mode 100644 index 00000000000000..63e523b1f5f7ae --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/__init__.py @@ -0,0 +1,38 @@ +from sentry_sdk_alpha.integrations import Integration, DidNotEnable +from sentry_sdk_alpha.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE +from sentry_sdk_alpha.integrations.redis.rb import _patch_rb +from sentry_sdk_alpha.integrations.redis.redis import _patch_redis +from sentry_sdk_alpha.integrations.redis.redis_cluster import _patch_redis_cluster +from sentry_sdk_alpha.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster +from sentry_sdk_alpha.utils import logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + +class RedisIntegration(Integration): + identifier = "redis" + + def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None): + # type: (int, Optional[list[str]]) -> None + self.max_data_size = max_data_size + self.cache_prefixes = cache_prefixes if cache_prefixes is not None else [] + + @staticmethod + def setup_once(): + # type: () -> None + try: + from redis import StrictRedis, client + except ImportError: + raise DidNotEnable("Redis client not installed") + + _patch_redis(StrictRedis, client) + _patch_redis_cluster() + _patch_rb() + + try: + _patch_rediscluster() + except Exception: + logger.exception("Error occurred while patching `rediscluster` library") diff --git a/src/sentry_sdk_alpha/integrations/redis/_async_common.py b/src/sentry_sdk_alpha/integrations/redis/_async_common.py new file mode 100644 index 00000000000000..0200aa45423c02 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/_async_common.py @@ -0,0 +1,120 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN +from sentry_sdk_alpha.integrations.redis.modules.caches import ( + _compile_cache_span_properties, + _get_cache_data, +) +from sentry_sdk_alpha.integrations.redis.modules.queries import _compile_db_span_properties +from sentry_sdk_alpha.integrations.redis.utils import ( + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, +) +from sentry_sdk_alpha.utils import capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any, Union + from redis.asyncio.client import Pipeline, StrictRedis + from redis.asyncio.cluster import ClusterPipeline, RedisCluster + + +def patch_redis_async_pipeline( + pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn +): + # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Any], dict[str, Any]]) -> None + old_execute = pipeline_cls.execute + + from sentry_sdk_alpha.integrations.redis import RedisIntegration + + async def _sentry_execute(self, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + if sentry_sdk_alpha.get_client().get_integration(RedisIntegration) is None: + return await old_execute(self, *args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=OP.DB_REDIS, + name="redis.pipeline.execute", + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + with capture_internal_exceptions(): + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.is_transaction, + command_stack=( + self._command_stack if is_cluster else self.command_stack + ), + ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) + + return await old_execute(self, *args, **kwargs) + + pipeline_cls.execute = _sentry_execute # type: ignore + + +def patch_redis_async_client(cls, is_cluster, get_db_data_fn): + # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Any], dict[str, Any]]) -> None + old_execute_command = cls.execute_command + + from sentry_sdk_alpha.integrations.redis import RedisIntegration + + async def _sentry_execute_command(self, name, *args, **kwargs): + # type: (Any, str, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(RedisIntegration) + if integration is None: + return await old_execute_command(self, name, *args, **kwargs) + + cache_properties = _compile_cache_span_properties( + name, + args, + kwargs, + integration, + ) + + cache_span = None + if cache_properties["is_cache_key"] and cache_properties["op"] is not None: + cache_span = sentry_sdk_alpha.start_span( + op=cache_properties["op"], + name=cache_properties["description"], + origin=SPAN_ORIGIN, + only_if_parent=True, + ) + cache_span.__enter__() + + db_properties = _compile_db_span_properties(integration, name, args) + + db_span = sentry_sdk_alpha.start_span( + op=db_properties["op"], + name=db_properties["description"], + origin=SPAN_ORIGIN, + only_if_parent=True, + ) + db_span.__enter__() + + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) + + value = await old_execute_command(self, name, *args, **kwargs) + + db_span.__exit__(None, None, None) + + if cache_span: + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) + cache_span.__exit__(None, None, None) + + return value + + cls.execute_command = _sentry_execute_command # type: ignore diff --git a/src/sentry_sdk_alpha/integrations/redis/_sync_common.py b/src/sentry_sdk_alpha/integrations/redis/_sync_common.py new file mode 100644 index 00000000000000..e9a4c38d4c5e71 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/_sync_common.py @@ -0,0 +1,123 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN +from sentry_sdk_alpha.integrations.redis.modules.caches import ( + _compile_cache_span_properties, + _get_cache_data, +) +from sentry_sdk_alpha.integrations.redis.modules.queries import _compile_db_span_properties +from sentry_sdk_alpha.integrations.redis.utils import ( + _create_breadcrumb, + _get_client_data, + _get_pipeline_data, + _update_span, +) +from sentry_sdk_alpha.utils import capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any + + +def patch_redis_pipeline( + pipeline_cls, + is_cluster, + get_command_args_fn, + get_db_data_fn, +): + # type: (Any, bool, Any, Callable[[Any], dict[str, Any]]) -> None + old_execute = pipeline_cls.execute + + from sentry_sdk_alpha.integrations.redis import RedisIntegration + + def sentry_patched_execute(self, *args, **kwargs): + # type: (Any, *Any, **Any) -> Any + if sentry_sdk_alpha.get_client().get_integration(RedisIntegration) is None: + return old_execute(self, *args, **kwargs) + + with sentry_sdk_alpha.start_span( + op=OP.DB_REDIS, + name="redis.pipeline.execute", + origin=SPAN_ORIGIN, + only_if_parent=True, + ) as span: + with capture_internal_exceptions(): + span_data = get_db_data_fn(self) + pipeline_data = _get_pipeline_data( + is_cluster=is_cluster, + get_command_args_fn=get_command_args_fn, + is_transaction=False if is_cluster else self.transaction, + command_stack=self.command_stack, + ) + _update_span(span, span_data, pipeline_data) + _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data) + + return old_execute(self, *args, **kwargs) + + pipeline_cls.execute = sentry_patched_execute + + +def patch_redis_client(cls, is_cluster, get_db_data_fn): + # type: (Any, bool, Callable[[Any], dict[str, Any]]) -> None + """ + This function can be used to instrument custom redis client classes or + subclasses. + """ + old_execute_command = cls.execute_command + + from sentry_sdk_alpha.integrations.redis import RedisIntegration + + def sentry_patched_execute_command(self, name, *args, **kwargs): + # type: (Any, str, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(RedisIntegration) + if integration is None: + return old_execute_command(self, name, *args, **kwargs) + + cache_properties = _compile_cache_span_properties( + name, + args, + kwargs, + integration, + ) + + cache_span = None + if cache_properties["is_cache_key"] and cache_properties["op"] is not None: + cache_span = sentry_sdk_alpha.start_span( + op=cache_properties["op"], + name=cache_properties["description"], + origin=SPAN_ORIGIN, + only_if_parent=True, + ) + cache_span.__enter__() + + db_properties = _compile_db_span_properties(integration, name, args) + + db_span = sentry_sdk_alpha.start_span( + op=db_properties["op"], + name=db_properties["description"], + origin=SPAN_ORIGIN, + only_if_parent=True, + ) + db_span.__enter__() + + db_span_data = get_db_data_fn(self) + db_client_span_data = _get_client_data(is_cluster, name, *args) + _update_span(db_span, db_span_data, db_client_span_data) + _create_breadcrumb( + db_properties["description"], db_span_data, db_client_span_data + ) + + value = old_execute_command(self, name, *args, **kwargs) + + db_span.__exit__(None, None, None) + + if cache_span: + cache_span_data = _get_cache_data(self, cache_properties, value) + _update_span(cache_span, cache_span_data) + cache_span.__exit__(None, None, None) + + return value + + cls.execute_command = sentry_patched_execute_command diff --git a/src/sentry_sdk_alpha/integrations/redis/consts.py b/src/sentry_sdk_alpha/integrations/redis/consts.py new file mode 100644 index 00000000000000..737e8297352158 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/consts.py @@ -0,0 +1,19 @@ +SPAN_ORIGIN = "auto.db.redis" + +_SINGLE_KEY_COMMANDS = frozenset( + ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"], +) +_MULTI_KEY_COMMANDS = frozenset( + [ + "del", + "touch", + "unlink", + "mget", + ], +) +_COMMANDS_INCLUDING_SENSITIVE_DATA = [ + "auth", +] +_MAX_NUM_ARGS = 10 # Trim argument lists to this many values +_MAX_NUM_COMMANDS = 10 # Trim command lists to this many values +_DEFAULT_MAX_DATA_SIZE = 1024 diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/__init__.py b/src/sentry_sdk_alpha/integrations/redis/modules/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/caches.py b/src/sentry_sdk_alpha/integrations/redis/modules/caches.py new file mode 100644 index 00000000000000..63b983a688738e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/modules/caches.py @@ -0,0 +1,124 @@ +""" +Code used for the Caches module in Sentry +""" + +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string +from sentry_sdk_alpha.utils import capture_internal_exceptions + +GET_COMMANDS = ("get", "mget") +SET_COMMANDS = ("set", "setex") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from sentry_sdk_alpha.integrations.redis import RedisIntegration + from typing import Any, Optional + + +def _get_op(name): + # type: (str) -> Optional[str] + op = None + if name.lower() in GET_COMMANDS: + op = OP.CACHE_GET + elif name.lower() in SET_COMMANDS: + op = OP.CACHE_PUT + + return op + + +def _compile_cache_span_properties(redis_command, args, kwargs, integration): + # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any] + key = _get_safe_key(redis_command, args, kwargs) + key_as_string = _key_as_string(key) + keys_as_string = key_as_string.split(", ") + + is_cache_key = False + for prefix in integration.cache_prefixes: + for kee in keys_as_string: + if kee.startswith(prefix): + is_cache_key = True + break + if is_cache_key: + break + + value = None + if redis_command.lower() in SET_COMMANDS: + value = args[-1] + + properties = { + "op": _get_op(redis_command), + "description": _get_cache_span_description( + redis_command, args, kwargs, integration + ), + "key": key, + "key_as_string": key_as_string, + "redis_command": redis_command.lower(), + "is_cache_key": is_cache_key, + "value": value, + } + + return properties + + +def _get_cache_span_description(redis_command, args, kwargs, integration): + # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str + description = _key_as_string(_get_safe_key(redis_command, args, kwargs)) + + data_should_be_truncated = ( + integration.max_data_size and len(description) > integration.max_data_size + ) + if data_should_be_truncated: + description = description[: integration.max_data_size - len("...")] + "..." + + return description + + +def _get_cache_data(redis_client, properties, return_value): + # type: (Any, dict[str, Any], Optional[Any]) -> dict[str, Any] + data = {} + + with capture_internal_exceptions(): + data[SPANDATA.CACHE_KEY] = properties["key"] + + if properties["redis_command"] in GET_COMMANDS: + if return_value is not None: + data[SPANDATA.CACHE_HIT] = True + size = ( + len(str(return_value).encode("utf-8")) + if not isinstance(return_value, bytes) + else len(return_value) + ) + data[SPANDATA.CACHE_ITEM_SIZE] = size + else: + data[SPANDATA.CACHE_HIT] = False + + elif properties["redis_command"] in SET_COMMANDS: + if properties["value"] is not None: + size = ( + len(properties["value"].encode("utf-8")) + if not isinstance(properties["value"], bytes) + else len(properties["value"]) + ) + data[SPANDATA.CACHE_ITEM_SIZE] = size + + try: + connection_params = redis_client.connection_pool.connection_kwargs + except AttributeError: + # If it is a cluster, there is no connection_pool attribute so we + # need to get the default node from the cluster instance + default_node = redis_client.get_default_node() + connection_params = { + "host": default_node.host, + "port": default_node.port, + } + + host = connection_params.get("host") + if host is not None: + data[SPANDATA.NETWORK_PEER_ADDRESS] = host + + port = connection_params.get("port") + if port is not None: + data[SPANDATA.NETWORK_PEER_PORT] = port + + return data diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/queries.py b/src/sentry_sdk_alpha/integrations/redis/modules/queries.py new file mode 100644 index 00000000000000..ec4db8756960c6 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/modules/queries.py @@ -0,0 +1,71 @@ +""" +Code used for the Queries module in Sentry +""" + +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations.redis.utils import _get_safe_command +from sentry_sdk_alpha.utils import capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from redis import Redis + from sentry_sdk_alpha.integrations.redis import RedisIntegration + from typing import Any + + +def _compile_db_span_properties(integration, redis_command, args): + # type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any] + description = _get_db_span_description(integration, redis_command, args) + + properties = { + "op": OP.DB_REDIS, + "description": description, + } + + return properties + + +def _get_db_span_description(integration, command_name, args): + # type: (RedisIntegration, str, tuple[Any, ...]) -> str + description = command_name + + with capture_internal_exceptions(): + description = _get_safe_command(command_name, args) + + data_should_be_truncated = ( + integration.max_data_size and len(description) > integration.max_data_size + ) + if data_should_be_truncated: + description = description[: integration.max_data_size - len("...")] + "..." + + return description + + +def _get_connection_data(connection_params): + # type: (dict[str, Any]) -> dict[str, Any] + data = { + SPANDATA.DB_SYSTEM: "redis", + } + + db = connection_params.get("db") + if db is not None: + data[SPANDATA.DB_NAME] = str(db) + + host = connection_params.get("host") + if host is not None: + data[SPANDATA.SERVER_ADDRESS] = host + + port = connection_params.get("port") + if port is not None: + data[SPANDATA.SERVER_PORT] = port + + return data + + +def _get_db_data(redis_instance): + # type: (Redis[Any]) -> dict[str, Any] + try: + return _get_connection_data(redis_instance.connection_pool.connection_kwargs) + except AttributeError: + return {} # connections_kwargs may be missing in some cases diff --git a/src/sentry_sdk_alpha/integrations/redis/rb.py b/src/sentry_sdk_alpha/integrations/redis/rb.py new file mode 100644 index 00000000000000..9a8e9af2283b50 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/rb.py @@ -0,0 +1,32 @@ +""" +Instrumentation for Redis Blaster (rb) + +https://github.com/getsentry/rb +""" + +from sentry_sdk_alpha.integrations.redis._sync_common import patch_redis_client +from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data + + +def _patch_rb(): + # type: () -> None + try: + import rb.clients # type: ignore + except ImportError: + pass + else: + patch_redis_client( + rb.clients.FanoutClient, + is_cluster=False, + get_db_data_fn=_get_db_data, + ) + patch_redis_client( + rb.clients.MappingClient, + is_cluster=False, + get_db_data_fn=_get_db_data, + ) + patch_redis_client( + rb.clients.RoutingClient, + is_cluster=False, + get_db_data_fn=_get_db_data, + ) diff --git a/src/sentry_sdk_alpha/integrations/redis/redis.py b/src/sentry_sdk_alpha/integrations/redis/redis.py new file mode 100644 index 00000000000000..560ff7f24f12d6 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/redis.py @@ -0,0 +1,69 @@ +""" +Instrumentation for Redis + +https://github.com/redis/redis-py +""" + +from sentry_sdk_alpha.integrations.redis._sync_common import ( + patch_redis_client, + patch_redis_pipeline, +) +from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Sequence + + +def _get_redis_command_args(command): + # type: (Any) -> Sequence[Any] + return command[0] + + +def _patch_redis(StrictRedis, client): # noqa: N803 + # type: (Any, Any) -> None + patch_redis_client( + StrictRedis, + is_cluster=False, + get_db_data_fn=_get_db_data, + ) + patch_redis_pipeline( + client.Pipeline, + is_cluster=False, + get_command_args_fn=_get_redis_command_args, + get_db_data_fn=_get_db_data, + ) + try: + strict_pipeline = client.StrictPipeline + except AttributeError: + pass + else: + patch_redis_pipeline( + strict_pipeline, + is_cluster=False, + get_command_args_fn=_get_redis_command_args, + get_db_data_fn=_get_db_data, + ) + + try: + import redis.asyncio + except ImportError: + pass + else: + from sentry_sdk_alpha.integrations.redis._async_common import ( + patch_redis_async_client, + patch_redis_async_pipeline, + ) + + patch_redis_async_client( + redis.asyncio.client.StrictRedis, + is_cluster=False, + get_db_data_fn=_get_db_data, + ) + patch_redis_async_pipeline( + redis.asyncio.client.Pipeline, + False, + _get_redis_command_args, + get_db_data_fn=_get_db_data, + ) diff --git a/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py b/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py new file mode 100644 index 00000000000000..9bd393872388ae --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py @@ -0,0 +1,101 @@ +""" +Instrumentation for RedisCluster +This is part of the main redis-py client. + +https://github.com/redis/redis-py/blob/master/redis/cluster.py +""" + +from sentry_sdk_alpha.integrations.redis._sync_common import ( + patch_redis_client, + patch_redis_pipeline, +) +from sentry_sdk_alpha.integrations.redis.modules.queries import _get_connection_data +from sentry_sdk_alpha.integrations.redis.utils import _parse_rediscluster_command + +from sentry_sdk_alpha.utils import capture_internal_exceptions + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from redis import RedisCluster + from redis.asyncio.cluster import ( + RedisCluster as AsyncRedisCluster, + ClusterPipeline as AsyncClusterPipeline, + ) + + +def _get_async_cluster_db_data(async_redis_cluster_instance): + # type: (AsyncRedisCluster[Any]) -> dict[str, Any] + default_node = async_redis_cluster_instance.get_default_node() + if default_node is not None and default_node.connection_kwargs is not None: + return _get_connection_data(default_node.connection_kwargs) + else: + return {} + + +def _get_async_cluster_pipeline_db_data(async_redis_cluster_pipeline_instance): + # type: (AsyncClusterPipeline[Any]) -> dict[str, Any] + with capture_internal_exceptions(): + return _get_async_cluster_db_data( + # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy + # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386 + async_redis_cluster_pipeline_instance._client, # type: ignore[attr-defined] + ) + + +def _get_cluster_db_data(redis_cluster_instance): + # type: (RedisCluster[Any]) -> dict[str, Any] + default_node = redis_cluster_instance.get_default_node() + + if default_node is not None: + connection_params = { + "host": default_node.host, + "port": default_node.port, + } + return _get_connection_data(connection_params) + else: + return {} + + +def _patch_redis_cluster(): + # type: () -> None + """Patches the cluster module on redis SDK (as opposed to rediscluster library)""" + try: + from redis import RedisCluster, cluster + except ImportError: + pass + else: + patch_redis_client( + RedisCluster, + is_cluster=True, + get_db_data_fn=_get_cluster_db_data, + ) + patch_redis_pipeline( + cluster.ClusterPipeline, + is_cluster=True, + get_command_args_fn=_parse_rediscluster_command, + get_db_data_fn=_get_cluster_db_data, + ) + + try: + from redis.asyncio import cluster as async_cluster + except ImportError: + pass + else: + from sentry_sdk_alpha.integrations.redis._async_common import ( + patch_redis_async_client, + patch_redis_async_pipeline, + ) + + patch_redis_async_client( + async_cluster.RedisCluster, + is_cluster=True, + get_db_data_fn=_get_async_cluster_db_data, + ) + patch_redis_async_pipeline( + async_cluster.ClusterPipeline, + is_cluster=True, + get_command_args_fn=_parse_rediscluster_command, + get_db_data_fn=_get_async_cluster_pipeline_db_data, + ) diff --git a/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py b/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py new file mode 100644 index 00000000000000..5380b3d03a56eb --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py @@ -0,0 +1,50 @@ +""" +Instrumentation for redis-py-cluster +The project redis-py-cluster is EOL and was integrated into redis-py starting from version 4.1.0 (Dec 26, 2021). + +https://github.com/grokzen/redis-py-cluster +""" + +from sentry_sdk_alpha.integrations.redis._sync_common import ( + patch_redis_client, + patch_redis_pipeline, +) +from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data +from sentry_sdk_alpha.integrations.redis.utils import _parse_rediscluster_command + + +def _patch_rediscluster(): + # type: () -> None + try: + import rediscluster # type: ignore + except ImportError: + return + + patch_redis_client( + rediscluster.RedisCluster, + is_cluster=True, + get_db_data_fn=_get_db_data, + ) + + # up to v1.3.6, __version__ attribute is a tuple + # from v2.0.0, __version__ is a string and VERSION a tuple + version = getattr(rediscluster, "VERSION", rediscluster.__version__) + + # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0 + # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst + if (0, 2, 0) < version < (2, 0, 0): + pipeline_cls = rediscluster.pipeline.StrictClusterPipeline + patch_redis_client( + rediscluster.StrictRedisCluster, + is_cluster=True, + get_db_data_fn=_get_db_data, + ) + else: + pipeline_cls = rediscluster.pipeline.ClusterPipeline + + patch_redis_pipeline( + pipeline_cls, + is_cluster=True, + get_command_args_fn=_parse_rediscluster_command, + get_db_data_fn=_get_db_data, + ) diff --git a/src/sentry_sdk_alpha/integrations/redis/utils.py b/src/sentry_sdk_alpha/integrations/redis/utils.py new file mode 100644 index 00000000000000..3b7a57bb8e0aa0 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/redis/utils.py @@ -0,0 +1,188 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SPANDATA +from sentry_sdk_alpha.integrations.redis.consts import ( + _COMMANDS_INCLUDING_SENSITIVE_DATA, + _MAX_NUM_ARGS, + _MAX_NUM_COMMANDS, + _MULTI_KEY_COMMANDS, + _SINGLE_KEY_COMMANDS, +) +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Sequence + from sentry_sdk_alpha.tracing import Span + + +TAG_KEYS = [ + "redis.command", + "redis.is_cluster", + "redis.key", + "redis.transaction", + SPANDATA.DB_OPERATION, +] + + +def _update_span(span, *data_bags): + # type: (Span, *dict[str, Any]) -> None + """ + Set tags and data on the given span to data from the given data bags. + """ + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + span.set_tag(key, value) + else: + span.set_attribute(key, value) + + +def _create_breadcrumb(message, *data_bags): + # type: (str, *dict[str, Any]) -> None + """ + Create a breadcrumb containing the tags data from the given data bags. + """ + data = {} + for data in data_bags: + for key, value in data.items(): + if key in TAG_KEYS: + data[key] = value + + sentry_sdk_alpha.add_breadcrumb( + message=message, + type="redis", + category="redis", + data=data, + ) + + +def _get_safe_command(name, args): + # type: (str, Sequence[Any]) -> str + command_parts = [name] + + for i, arg in enumerate(args): + if i > _MAX_NUM_ARGS: + break + + name_low = name.lower() + + if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA: + command_parts.append(SENSITIVE_DATA_SUBSTITUTE) + continue + + arg_is_the_key = i == 0 + if arg_is_the_key: + command_parts.append(repr(arg)) + + else: + if should_send_default_pii(): + command_parts.append(repr(arg)) + else: + command_parts.append(SENSITIVE_DATA_SUBSTITUTE) + + command = " ".join(command_parts) + return command + + +def _safe_decode(key): + # type: (Any) -> str + if isinstance(key, bytes): + try: + return key.decode() + except UnicodeDecodeError: + return "" + + return str(key) + + +def _key_as_string(key): + # type: (Any) -> str + if isinstance(key, (dict, list, tuple)): + key = ", ".join(_safe_decode(x) for x in key) + elif isinstance(key, bytes): + key = _safe_decode(key) + elif key is None: + key = "" + else: + key = str(key) + + return key + + +def _get_safe_key(method_name, args, kwargs): + # type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]] + """ + Gets the key (or keys) from the given method_name. + The method_name could be a redis command or a django caching command + """ + key = None + + if args is not None and method_name.lower() in _MULTI_KEY_COMMANDS: + # for example redis "mget" + key = tuple(args) + + elif args is not None and len(args) >= 1: + # for example django "set_many/get_many" or redis "get" + if isinstance(args[0], (dict, list, tuple)): + key = tuple(args[0]) + else: + key = (args[0],) + + elif kwargs is not None and "key" in kwargs: + # this is a legacy case for older versions of Django + if isinstance(kwargs["key"], (list, tuple)): + if len(kwargs["key"]) > 0: + key = tuple(kwargs["key"]) + else: + if kwargs["key"] is not None: + key = (kwargs["key"],) + + return key + + +def _parse_rediscluster_command(command): + # type: (Any) -> Sequence[Any] + return command.args + + +def _get_pipeline_data(is_cluster, get_command_args_fn, is_transaction, command_stack): + # type: (bool, Any, bool, Sequence[Any]) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + "redis.transaction": is_transaction, + } # type: dict[str, Any] + + commands = [] + for i, arg in enumerate(command_stack): + if i >= _MAX_NUM_COMMANDS: + break + + command = get_command_args_fn(arg) + commands.append(_get_safe_command(command[0], command[1:])) + + data["redis.commands.count"] = len(command_stack) + data["redis.commands.first_ten"] = commands + + return data + + +def _get_client_data(is_cluster, name, *args): + # type: (bool, str, *Any) -> dict[str, Any] + data = { + "redis.is_cluster": is_cluster, + } # type: dict[str, Any] + + if name: + data["redis.command"] = name + data[SPANDATA.DB_OPERATION] = name + + if name and args: + name_low = name.lower() + if (name_low in _SINGLE_KEY_COMMANDS) or ( + name_low in _MULTI_KEY_COMMANDS and len(args) == 1 + ): + data["redis.key"] = args[0] + + return data diff --git a/src/sentry_sdk_alpha/integrations/rq.py b/src/sentry_sdk_alpha/integrations/rq.py new file mode 100644 index 00000000000000..112075972157f2 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/rq.py @@ -0,0 +1,206 @@ +import weakref + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + format_timestamp, + parse_version, +) + +try: + from rq.queue import Queue + from rq.timeouts import JobTimeoutException + from rq.version import VERSION as RQ_VERSION + from rq.worker import Worker + from rq.job import JobStatus +except ImportError: + raise DidNotEnable("RQ not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable + + from sentry_sdk_alpha._types import Event, EventProcessor + from sentry_sdk_alpha.utils import ExcInfo + + from rq.job import Job + +DEFAULT_TRANSACTION_NAME = "unknown RQ task" + + +JOB_PROPERTY_TO_ATTRIBUTE = { + "id": "messaging.message.id", +} + +QUEUE_PROPERTY_TO_ATTRIBUTE = { + "name": "messaging.destination.name", +} + + +class RqIntegration(Integration): + identifier = "rq" + origin = f"auto.queue.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(RQ_VERSION) + _check_minimum_version(RqIntegration, version) + + old_perform_job = Worker.perform_job + + @ensure_integration_enabled(RqIntegration, old_perform_job) + def sentry_patched_perform_job(self, job, queue, *args, **kwargs): + # type: (Any, Job, Queue, *Any, **Any) -> bool + with sentry_sdk_alpha.new_scope() as scope: + try: + transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME + except AttributeError: + transaction_name = DEFAULT_TRANSACTION_NAME + + scope.set_transaction_name( + transaction_name, source=TransactionSource.TASK + ) + scope.clear_breadcrumbs() + scope.add_event_processor(_make_event_processor(weakref.ref(job))) + + with sentry_sdk_alpha.continue_trace( + job.meta.get("_sentry_trace_headers") or {} + ): + with sentry_sdk_alpha.start_span( + op=OP.QUEUE_TASK_RQ, + name=transaction_name, + source=TransactionSource.TASK, + origin=RqIntegration.origin, + attributes=_prepopulate_attributes(job, queue), + ): + rv = old_perform_job(self, job, queue, *args, **kwargs) + + if self.is_horse: + # We're inside of a forked process and RQ is + # about to call `os._exit`. Make sure that our + # events get sent out. + sentry_sdk_alpha.get_client().flush() + + return rv + + Worker.perform_job = sentry_patched_perform_job + + old_handle_exception = Worker.handle_exception + + def sentry_patched_handle_exception(self, job, *exc_info, **kwargs): + # type: (Worker, Any, *Any, **Any) -> Any + retry = ( + hasattr(job, "retries_left") + and job.retries_left + and job.retries_left > 0 + ) + failed = job._status == JobStatus.FAILED or job.is_failed + if failed and not retry: + _capture_exception(exc_info) + + return old_handle_exception(self, job, *exc_info, **kwargs) + + Worker.handle_exception = sentry_patched_handle_exception + + old_enqueue_job = Queue.enqueue_job + + @ensure_integration_enabled(RqIntegration, old_enqueue_job) + def sentry_patched_enqueue_job(self, job, **kwargs): + # type: (Queue, Any, **Any) -> Any + job.meta["_sentry_trace_headers"] = dict( + sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers() + ) + + return old_enqueue_job(self, job, **kwargs) + + Queue.enqueue_job = sentry_patched_enqueue_job + + ignore_logger("rq.worker") + + +def _make_event_processor(weak_job): + # type: (Callable[[], Job]) -> EventProcessor + def event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + job = weak_job() + if job is not None: + with capture_internal_exceptions(): + extra = event.setdefault("extra", {}) + rq_job = { + "job_id": job.id, + "func": job.func_name, + "args": job.args, + "kwargs": job.kwargs, + "description": job.description, + } + + if job.enqueued_at: + rq_job["enqueued_at"] = format_timestamp(job.enqueued_at) + if job.started_at: + rq_job["started_at"] = format_timestamp(job.started_at) + + extra["rq-job"] = rq_job + + if "exc_info" in hint: + with capture_internal_exceptions(): + if issubclass(hint["exc_info"][0], JobTimeoutException): + event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name] + + return event + + return event_processor + + +def _capture_exception(exc_info, **kwargs): + # type: (ExcInfo, **Any) -> None + client = sentry_sdk_alpha.get_client() + + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "rq", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _prepopulate_attributes(job, queue): + # type: (Job, Queue) -> dict[str, Any] + attributes = { + "messaging.system": "rq", + "rq.job.id": job.id, + } + + for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(job, prop, None) is not None: + attributes[attr] = getattr(job, prop) + + for prop, attr in QUEUE_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(queue, prop, None) is not None: + attributes[attr] = getattr(queue, prop) + + if getattr(job, "args", None): + for i, arg in enumerate(job.args): + with capture_internal_exceptions(): + attributes[f"rq.job.args.{i}"] = str(arg) + + if getattr(job, "kwargs", None): + for kwarg, value in job.kwargs.items(): + with capture_internal_exceptions(): + attributes[f"rq.job.kwargs.{kwarg}"] = str(value) + + func = job.func + if callable(func): + func = func.__name__ + + attributes["rq.job.func"] = str(func) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/rust_tracing.py b/src/sentry_sdk_alpha/integrations/rust_tracing.py new file mode 100644 index 00000000000000..cb12e9241e8f8e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/rust_tracing.py @@ -0,0 +1,269 @@ +""" +This integration ingests tracing data from native extensions written in Rust. + +Using it requires additional setup on the Rust side to accept a +`RustTracingLayer` Python object and register it with the `tracing-subscriber` +using an adapter from the `pyo3-python-tracing-subscriber` crate. For example: +```rust +#[pyfunction] +pub fn initialize_tracing(py_impl: Bound<'_, PyAny>) { + tracing_subscriber::registry() + .with(pyo3_python_tracing_subscriber::PythonCallbackLayerBridge::new(py_impl)) + .init(); +} +``` + +Usage in Python would then look like: +``` +sentry_sdk.init( + dsn=sentry_dsn, + integrations=[ + RustTracingIntegration( + "demo_rust_extension", + demo_rust_extension.initialize_tracing, + event_type_mapping=event_type_mapping, + ) + ], +) +``` + +Each native extension requires its own integration. +""" + +import json +from enum import Enum, auto +from typing import Any, Callable, Dict, Optional + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE + + +class RustTracingLevel(Enum): + Trace = "TRACE" + Debug = "DEBUG" + Info = "INFO" + Warn = "WARN" + Error = "ERROR" + + +class EventTypeMapping(Enum): + Ignore = auto() + Exc = auto() + Breadcrumb = auto() + Event = auto() + + +def tracing_level_to_sentry_level(level): + # type: (str) -> sentry_sdk._types.LogLevelStr + level = RustTracingLevel(level) + if level in (RustTracingLevel.Trace, RustTracingLevel.Debug): + return "debug" + elif level == RustTracingLevel.Info: + return "info" + elif level == RustTracingLevel.Warn: + return "warning" + elif level == RustTracingLevel.Error: + return "error" + else: + # Better this than crashing + return "info" + + +def extract_contexts(event: Dict[str, Any]) -> Dict[str, Any]: + metadata = event.get("metadata", {}) + contexts = {} + + location = {} + for field in ["module_path", "file", "line"]: + if field in metadata: + location[field] = metadata[field] + if len(location) > 0: + contexts["rust_tracing_location"] = location + + fields = {} + for field in metadata.get("fields", []): + fields[field] = event.get(field) + if len(fields) > 0: + contexts["rust_tracing_fields"] = fields + + return contexts + + +def process_event(event: Dict[str, Any]) -> None: + metadata = event.get("metadata", {}) + + logger = metadata.get("target") + level = tracing_level_to_sentry_level(metadata.get("level")) + message = event.get("message") # type: sentry_sdk._types.Any + contexts = extract_contexts(event) + + sentry_event = { + "logger": logger, + "level": level, + "message": message, + "contexts": contexts, + } # type: sentry_sdk._types.Event + + sentry_sdk_alpha.capture_event(sentry_event) + + +def process_exception(event: Dict[str, Any]) -> None: + process_event(event) + + +def process_breadcrumb(event: Dict[str, Any]) -> None: + level = tracing_level_to_sentry_level(event.get("metadata", {}).get("level")) + message = event.get("message") + + sentry_sdk_alpha.add_breadcrumb(level=level, message=message) + + +def default_span_filter(metadata: Dict[str, Any]) -> bool: + return RustTracingLevel(metadata.get("level")) in ( + RustTracingLevel.Error, + RustTracingLevel.Warn, + RustTracingLevel.Info, + ) + + +def default_event_type_mapping(metadata: Dict[str, Any]) -> EventTypeMapping: + level = RustTracingLevel(metadata.get("level")) + if level == RustTracingLevel.Error: + return EventTypeMapping.Exc + elif level in (RustTracingLevel.Warn, RustTracingLevel.Info): + return EventTypeMapping.Breadcrumb + elif level in (RustTracingLevel.Debug, RustTracingLevel.Trace): + return EventTypeMapping.Ignore + else: + return EventTypeMapping.Ignore + + +class RustTracingLayer: + def __init__( + self, + origin: str, + event_type_mapping: Callable[ + [Dict[str, Any]], EventTypeMapping + ] = default_event_type_mapping, + span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter, + include_tracing_fields: Optional[bool] = None, + ): + self.origin = origin + self.event_type_mapping = event_type_mapping + self.span_filter = span_filter + self.include_tracing_fields = include_tracing_fields + + def _include_tracing_fields(self) -> bool: + """ + By default, the values of tracing fields are not included in case they + contain PII. A user may override that by passing `True` for the + `include_tracing_fields` keyword argument of this integration or by + setting `send_default_pii` to `True` in their Sentry client options. + """ + return ( + should_send_default_pii() + if self.include_tracing_fields is None + else self.include_tracing_fields + ) + + def on_event(self, event: str, _span_state: Optional[Span]) -> None: + deserialized_event = json.loads(event) + metadata = deserialized_event.get("metadata", {}) + + event_type = self.event_type_mapping(metadata) + if event_type == EventTypeMapping.Ignore: + return + elif event_type == EventTypeMapping.Exc: + process_exception(deserialized_event) + elif event_type == EventTypeMapping.Breadcrumb: + process_breadcrumb(deserialized_event) + elif event_type == EventTypeMapping.Event: + process_event(deserialized_event) + + def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]: + attrs = json.loads(attrs) + metadata = attrs.get("metadata", {}) + + if not self.span_filter(metadata): + return None + + module_path = metadata.get("module_path") + name = metadata.get("name") + message = attrs.get("message") + + if message is not None: + sentry_span_name = message + elif module_path is not None and name is not None: + sentry_span_name = f"{module_path}::{name}" # noqa: E231 + elif name is not None: + sentry_span_name = name + else: + sentry_span_name = "" + + span = sentry_sdk_alpha.start_span( + op="function", + name=sentry_span_name, + origin=self.origin, + only_if_parent=True, + ) + span.__enter__() + + fields = metadata.get("fields", []) + for field in fields: + if self._include_tracing_fields(): + span.set_attribute(field, attrs.get(field)) + else: + span.set_attribute(field, SENSITIVE_DATA_SUBSTITUTE) + + return span + + def on_close(self, span_id: str, span: Optional[Span]) -> None: + if span is not None: + span.__exit__(None, None, None) + + def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None: + if span is not None: + deserialized_values = json.loads(values) + for key, value in deserialized_values.items(): + if self._include_tracing_fields(): + span.set_attribute(key, value) + else: + span.set_attribute(key, SENSITIVE_DATA_SUBSTITUTE) + + +class RustTracingIntegration(Integration): + """ + Ingests tracing data from a Rust native extension's `tracing` instrumentation. + + If a project uses more than one Rust native extension, each one will need + its own instance of `RustTracingIntegration` with an initializer function + specific to that extension. + + Since all of the setup for this integration requires instance-specific state + which is not available in `setup_once()`, setup instead happens in `__init__()`. + """ + + def __init__( + self, + identifier: str, + initializer: Callable[[RustTracingLayer], None], + event_type_mapping: Callable[ + [Dict[str, Any]], EventTypeMapping + ] = default_event_type_mapping, + span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter, + include_tracing_fields: Optional[bool] = None, + ): + self.identifier = identifier + origin = f"auto.function.rust_tracing.{identifier}" + self.tracing_layer = RustTracingLayer( + origin, event_type_mapping, span_filter, include_tracing_fields + ) + + initializer(self.tracing_layer) + + @staticmethod + def setup_once() -> None: + pass diff --git a/src/sentry_sdk_alpha/integrations/sanic.py b/src/sentry_sdk_alpha/integrations/sanic.py new file mode 100644 index 00000000000000..dfe27fb754713b --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/sanic.py @@ -0,0 +1,378 @@ +import sys +import weakref +from inspect import isawaitable +from urllib.parse import urlsplit + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor, _filter_headers +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, + parse_version, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Container + from typing import Any + from typing import Callable + from typing import Optional + from typing import Union + from typing import Dict + + from sanic.request import Request, RequestParameters + from sanic.response import BaseHTTPResponse + + from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint + from sanic.router import Route + +try: + from sanic import Sanic, __version__ as SANIC_VERSION + from sanic.exceptions import SanicException + from sanic.router import Router + from sanic.handlers import ErrorHandler +except ImportError: + raise DidNotEnable("Sanic not installed") + +old_error_handler_lookup = ErrorHandler.lookup +old_handle_request = Sanic.handle_request +old_router_get = Router.get + +try: + # This method was introduced in Sanic v21.9 + old_startup = Sanic._startup +except AttributeError: + pass + + +class SanicIntegration(Integration): + identifier = "sanic" + origin = f"auto.http.{identifier}" + version = None + + def __init__(self, unsampled_statuses=frozenset({404})): + # type: (Optional[Container[int]]) -> None + """ + The unsampled_statuses parameter can be used to specify for which HTTP statuses the + transactions should not be sent to Sentry. By default, transactions are sent for all + HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all + HTTP statuses, including 404. + """ + self._unsampled_statuses = unsampled_statuses or set() + + @staticmethod + def setup_once(): + # type: () -> None + SanicIntegration.version = parse_version(SANIC_VERSION) + _check_minimum_version(SanicIntegration, SanicIntegration.version) + + if not HAS_REAL_CONTEXTVARS: + # We better have contextvars or we're going to leak state between + # requests. + raise DidNotEnable( + "The sanic integration for Sentry requires Python 3.7+ " + " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE + ) + + if SANIC_VERSION.startswith("0.8."): + # Sanic 0.8 and older creates a logger named "root" and puts a + # stringified version of every exception in there (without exc_info), + # which our error deduplication can't detect. + # + # We explicitly check the version here because it is a very + # invasive step to ignore this logger and not necessary in newer + # versions at all. + # + # https://github.com/huge-success/sanic/issues/1332 + ignore_logger("root") + + if SanicIntegration.version is not None and SanicIntegration.version < (21, 9): + _setup_legacy_sanic() + return + + _setup_sanic() + + +class SanicRequestExtractor(RequestExtractor): + def content_length(self): + # type: () -> int + if self.request.body is None: + return 0 + return len(self.request.body) + + def cookies(self): + # type: () -> Dict[str, str] + return dict(self.request.cookies) + + def raw_data(self): + # type: () -> bytes + return self.request.body + + def form(self): + # type: () -> RequestParameters + return self.request.form + + def is_json(self): + # type: () -> bool + raise NotImplementedError() + + def json(self): + # type: () -> Optional[Any] + return self.request.json + + def files(self): + # type: () -> RequestParameters + return self.request.files + + def size_of_file(self, file): + # type: (Any) -> int + return len(file.body or ()) + + +def _setup_sanic(): + # type: () -> None + Sanic._startup = _startup + ErrorHandler.lookup = _sentry_error_handler_lookup + + +def _setup_legacy_sanic(): + # type: () -> None + Sanic.handle_request = _legacy_handle_request + Router.get = _legacy_router_get + ErrorHandler.lookup = _sentry_error_handler_lookup + + +async def _startup(self): + # type: (Sanic) -> None + # This happens about as early in the lifecycle as possible, just after the + # Request object is created. The body has not yet been consumed. + self.signal("http.lifecycle.request")(_context_enter) + + # This happens after the handler is complete. In v21.9 this signal is not + # dispatched when there is an exception. Therefore we need to close out + # and call _context_exit from the custom exception handler as well. + # See https://github.com/sanic-org/sanic/issues/2297 + self.signal("http.lifecycle.response")(_context_exit) + + # This happens inside of request handling immediately after the route + # has been identified by the router. + self.signal("http.routing.after")(_set_transaction) + + # The above signals need to be declared before this can be called. + await old_startup(self) + + +async def _context_enter(request): + # type: (Request) -> None + request.ctx._sentry_do_integration = ( + sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is not None + ) + + if not request.ctx._sentry_do_integration: + return + + weak_request = weakref.ref(request) + request.ctx._sentry_scope_manager = sentry_sdk_alpha.isolation_scope() + scope = request.ctx._sentry_scope_manager.__enter__() + request.ctx._sentry_scope = scope + + scope.set_transaction_name(request.path, TransactionSource.URL) + scope.clear_breadcrumbs() + scope.add_event_processor(_make_request_processor(weak_request)) + + # TODO-neel-potel test if this works + request.ctx._sentry_continue_trace = sentry_sdk_alpha.continue_trace( + dict(request.headers) + ) + request.ctx._sentry_continue_trace.__enter__() + request.ctx._sentry_transaction = sentry_sdk_alpha.start_span( + op=OP.HTTP_SERVER, + # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction + name=request.path, + source=TransactionSource.URL, + origin=SanicIntegration.origin, + ).__enter__() + + +async def _context_exit(request, response=None): + # type: (Request, Optional[BaseHTTPResponse]) -> None + with capture_internal_exceptions(): + if not request.ctx._sentry_do_integration: + return + + integration = sentry_sdk_alpha.get_client().get_integration(SanicIntegration) + + response_status = None if response is None else response.status + + # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception + # happens while trying to end the transaction, we still attempt to exit the scope. + with capture_internal_exceptions(): + request.ctx._sentry_transaction.set_http_status(response_status) + + if ( + isinstance(integration, SanicIntegration) + and response_status in integration._unsampled_statuses + ): + # drop the event in an event processor + request.ctx._sentry_scope.add_event_processor( + lambda _event, _hint: None + ) + + request.ctx._sentry_transaction.__exit__(None, None, None) + request.ctx._sentry_continue_trace.__exit__(None, None, None) + + request.ctx._sentry_scope_manager.__exit__(None, None, None) + + +async def _set_transaction(request, route, **_): + # type: (Request, Route, **Any) -> None + if request.ctx._sentry_do_integration: + with capture_internal_exceptions(): + scope = sentry_sdk_alpha.get_current_scope() + route_name = route.name.replace(request.app.name, "").strip(".") + scope.set_transaction_name(route_name, source=TransactionSource.COMPONENT) + + +def _sentry_error_handler_lookup(self, exception, *args, **kwargs): + # type: (Any, Exception, *Any, **Any) -> Optional[object] + _capture_exception(exception) + old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs) + + if old_error_handler is None: + return None + + if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is None: + return old_error_handler + + async def sentry_wrapped_error_handler(request, exception): + # type: (Request, Exception) -> Any + try: + response = old_error_handler(request, exception) + if isawaitable(response): + response = await response + return response + except Exception: + # Report errors that occur in Sanic error handler. These + # exceptions will not even show up in Sanic's + # `sanic.exceptions` logger. + exc_info = sys.exc_info() + _capture_exception(exc_info) + reraise(*exc_info) + finally: + # As mentioned in previous comment in _startup, this can be removed + # after https://github.com/sanic-org/sanic/issues/2297 is resolved + if SanicIntegration.version and SanicIntegration.version == (21, 9): + await _context_exit(request) + + return sentry_wrapped_error_handler + + +async def _legacy_handle_request(self, request, *args, **kwargs): + # type: (Any, Request, *Any, **Any) -> Any + if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is None: + return await old_handle_request(self, request, *args, **kwargs) + + weak_request = weakref.ref(request) + + with sentry_sdk_alpha.isolation_scope() as scope: + scope.clear_breadcrumbs() + scope.add_event_processor(_make_request_processor(weak_request)) + + response = old_handle_request(self, request, *args, **kwargs) + if isawaitable(response): + response = await response + + return response + + +def _legacy_router_get(self, *args): + # type: (Any, Union[Any, Request]) -> Any + rv = old_router_get(self, *args) + if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is not None: + with capture_internal_exceptions(): + scope = sentry_sdk_alpha.get_isolation_scope() + if SanicIntegration.version and SanicIntegration.version >= (21, 3): + # Sanic versions above and including 21.3 append the app name to the + # route name, and so we need to remove it from Route name so the + # transaction name is consistent across all versions + sanic_app_name = self.ctx.app.name + sanic_route = rv[0].name + + if sanic_route.startswith("%s." % sanic_app_name): + # We add a 1 to the len of the sanic_app_name because there is a dot + # that joins app name and the route name + # Format: app_name.route_name + sanic_route = sanic_route[len(sanic_app_name) + 1 :] + + scope.set_transaction_name( + sanic_route, source=TransactionSource.COMPONENT + ) + else: + scope.set_transaction_name( + rv[0].__name__, source=TransactionSource.COMPONENT + ) + + return rv + + +@ensure_integration_enabled(SanicIntegration) +def _capture_exception(exception): + # type: (Union[ExcInfo, BaseException]) -> None + with capture_internal_exceptions(): + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "sanic", "handled": False}, + ) + + if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet: + return + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _make_request_processor(weak_request): + # type: (Callable[[], Request]) -> EventProcessor + def sanic_processor(event, hint): + # type: (Event, Optional[Hint]) -> Optional[Event] + + try: + if hint and issubclass(hint["exc_info"][0], SanicException): + return None + except KeyError: + pass + + request = weak_request() + if request is None: + return event + + with capture_internal_exceptions(): + extractor = SanicRequestExtractor(request) + extractor.extract_into_event(event) + + request_info = event["request"] + urlparts = urlsplit(request.url) + + request_info["url"] = "%s://%s%s" % ( + urlparts.scheme, + urlparts.netloc, + urlparts.path, + ) + + request_info["query_string"] = urlparts.query + request_info["method"] = request.method + request_info["env"] = {"REMOTE_ADDR": request.remote_addr} + request_info["headers"] = _filter_headers(dict(request.headers)) + + return event + + return sanic_processor diff --git a/src/sentry_sdk_alpha/integrations/serverless.py b/src/sentry_sdk_alpha/integrations/serverless.py new file mode 100644 index 00000000000000..1c2c4bcaa1947b --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/serverless.py @@ -0,0 +1,76 @@ +import sys +from functools import wraps + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import event_from_exception, reraise + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import TypeVar + from typing import Union + from typing import Optional + from typing import overload + + F = TypeVar("F", bound=Callable[..., Any]) + +else: + + def overload(x): + # type: (F) -> F + return x + + +@overload +def serverless_function(f, flush=True): + # type: (F, bool) -> F + pass + + +@overload +def serverless_function(f=None, flush=True): # noqa: F811 + # type: (None, bool) -> Callable[[F], F] + pass + + +def serverless_function(f=None, flush=True): # noqa + # type: (Optional[F], bool) -> Union[F, Callable[[F], F]] + def wrapper(f): + # type: (F) -> F + @wraps(f) + def inner(*args, **kwargs): + # type: (*Any, **Any) -> Any + with sentry_sdk_alpha.isolation_scope() as scope: + scope.clear_breadcrumbs() + + try: + return f(*args, **kwargs) + except Exception: + _capture_and_reraise() + finally: + if flush: + sentry_sdk_alpha.flush() + + return inner # type: ignore + + if f is None: + return wrapper + else: + return wrapper(f) + + +def _capture_and_reraise(): + # type: () -> None + exc_info = sys.exc_info() + client = sentry_sdk_alpha.get_client() + if client.is_active(): + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "serverless", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + reraise(*exc_info) diff --git a/src/sentry_sdk_alpha/integrations/socket.py b/src/sentry_sdk_alpha/integrations/socket.py new file mode 100644 index 00000000000000..ce1200407e5e39 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/socket.py @@ -0,0 +1,100 @@ +import socket + +import sentry_sdk_alpha +from sentry_sdk_alpha._types import MYPY +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import Integration + +if MYPY: + from socket import AddressFamily, SocketKind + from typing import Tuple, Optional, Union, List + +__all__ = ["SocketIntegration"] + + +class SocketIntegration(Integration): + identifier = "socket" + origin = f"auto.socket.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + """ + patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver) + """ + _patch_create_connection() + _patch_getaddrinfo() + + +def _get_span_description(host, port): + # type: (Union[bytes, str, None], Union[bytes, str, int, None]) -> str + + try: + host = host.decode() # type: ignore + except (UnicodeDecodeError, AttributeError): + pass + + try: + port = port.decode() # type: ignore + except (UnicodeDecodeError, AttributeError): + pass + + description = "%s:%s" % (host, port) # type: ignore + return description + + +def _patch_create_connection(): + # type: () -> None + real_create_connection = socket.create_connection + + def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, # type: ignore + source_address=None, + ): + # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket + integration = sentry_sdk_alpha.get_client().get_integration(SocketIntegration) + if integration is None: + return real_create_connection(address, timeout, source_address) + + with sentry_sdk_alpha.start_span( + op=OP.SOCKET_CONNECTION, + name=_get_span_description(address[0], address[1]), + origin=SocketIntegration.origin, + only_if_parent=True, + ) as span: + host, port = address + span.set_attribute("address.host", host) + span.set_attribute("address.port", port) + span.set_attribute("timeout", timeout) + span.set_attribute("source_address", source_address) + + return real_create_connection( + address=address, timeout=timeout, source_address=source_address + ) + + socket.create_connection = create_connection # type: ignore + + +def _patch_getaddrinfo(): + # type: () -> None + real_getaddrinfo = socket.getaddrinfo + + def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0): + # type: (Union[bytes, str, None], Union[bytes, str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]]]] + integration = sentry_sdk_alpha.get_client().get_integration(SocketIntegration) + if integration is None: + return real_getaddrinfo(host, port, family, type, proto, flags) + + with sentry_sdk_alpha.start_span( + op=OP.SOCKET_DNS, + name=_get_span_description(host, port), + origin=SocketIntegration.origin, + only_if_parent=True, + ) as span: + span.set_attribute("host", host) + span.set_attribute("port", port) + + return real_getaddrinfo(host, port, family, type, proto, flags) + + socket.getaddrinfo = getaddrinfo diff --git a/src/sentry_sdk_alpha/integrations/spark/__init__.py b/src/sentry_sdk_alpha/integrations/spark/__init__.py new file mode 100644 index 00000000000000..6f573a5e012a32 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/spark/__init__.py @@ -0,0 +1,4 @@ +from sentry_sdk_alpha.integrations.spark.spark_driver import SparkIntegration +from sentry_sdk_alpha.integrations.spark.spark_worker import SparkWorkerIntegration + +__all__ = ["SparkIntegration", "SparkWorkerIntegration"] diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_driver.py b/src/sentry_sdk_alpha/integrations/spark/spark_driver.py new file mode 100644 index 00000000000000..eb12a5ce474a42 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/spark/spark_driver.py @@ -0,0 +1,315 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Optional + + from sentry_sdk_alpha._types import Event, Hint + from pyspark import SparkContext + + +class SparkIntegration(Integration): + identifier = "spark" + + @staticmethod + def setup_once(): + # type: () -> None + _setup_sentry_tracing() + + +def _set_app_properties(): + # type: () -> None + """ + Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties. + This allows worker integration to have access to app_name and application_id. + """ + from pyspark import SparkContext + + spark_context = SparkContext._active_spark_context + if spark_context: + spark_context.setLocalProperty( + "sentry_app_name", + spark_context.appName, + ) + spark_context.setLocalProperty( + "sentry_application_id", + spark_context.applicationId, + ) + + +def _start_sentry_listener(sc): + # type: (SparkContext) -> None + """ + Start java gateway server to add custom `SparkListener` + """ + from pyspark.java_gateway import ensure_callback_server_started + + gw = sc._gateway + ensure_callback_server_started(gw) + listener = SentryListener() + sc._jsc.sc().addSparkListener(listener) + + +def _add_event_processor(sc): + # type: (SparkContext) -> None + scope = sentry_sdk_alpha.get_isolation_scope() + + @scope.add_event_processor + def process_event(event, hint): + # type: (Event, Hint) -> Optional[Event] + with capture_internal_exceptions(): + if sentry_sdk_alpha.get_client().get_integration(SparkIntegration) is None: + return event + + if sc._active_spark_context is None: + return event + + event.setdefault("user", {}).setdefault("id", sc.sparkUser()) + + event.setdefault("tags", {}).setdefault( + "executor.id", sc._conf.get("spark.executor.id") + ) + event["tags"].setdefault( + "spark-submit.deployMode", + sc._conf.get("spark.submit.deployMode"), + ) + event["tags"].setdefault("driver.host", sc._conf.get("spark.driver.host")) + event["tags"].setdefault("driver.port", sc._conf.get("spark.driver.port")) + event["tags"].setdefault("spark_version", sc.version) + event["tags"].setdefault("app_name", sc.appName) + event["tags"].setdefault("application_id", sc.applicationId) + event["tags"].setdefault("master", sc.master) + event["tags"].setdefault("spark_home", sc.sparkHome) + + event.setdefault("extra", {}).setdefault("web_url", sc.uiWebUrl) + + return event + + +def _activate_integration(sc): + # type: (SparkContext) -> None + + _start_sentry_listener(sc) + _set_app_properties() + _add_event_processor(sc) + + +def _patch_spark_context_init(): + # type: () -> None + from pyspark import SparkContext + + spark_context_init = SparkContext._do_init + + @ensure_integration_enabled(SparkIntegration, spark_context_init) + def _sentry_patched_spark_context_init(self, *args, **kwargs): + # type: (SparkContext, *Any, **Any) -> Optional[Any] + rv = spark_context_init(self, *args, **kwargs) + _activate_integration(self) + return rv + + SparkContext._do_init = _sentry_patched_spark_context_init + + +def _setup_sentry_tracing(): + # type: () -> None + from pyspark import SparkContext + + if SparkContext._active_spark_context is not None: + _activate_integration(SparkContext._active_spark_context) + return + _patch_spark_context_init() + + +class SparkListener: + def onApplicationEnd(self, applicationEnd): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onApplicationStart(self, applicationStart): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onBlockManagerAdded(self, blockManagerAdded): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onBlockManagerRemoved(self, blockManagerRemoved): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onBlockUpdated(self, blockUpdated): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onEnvironmentUpdate(self, environmentUpdate): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onExecutorAdded(self, executorAdded): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onExecutorBlacklisted(self, executorBlacklisted): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onExecutorBlacklistedForStage( # noqa: N802 + self, executorBlacklistedForStage # noqa: N803 + ): + # type: (Any) -> None + pass + + def onExecutorMetricsUpdate(self, executorMetricsUpdate): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onExecutorRemoved(self, executorRemoved): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onJobEnd(self, jobEnd): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onJobStart(self, jobStart): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onNodeBlacklisted(self, nodeBlacklisted): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onNodeBlacklistedForStage(self, nodeBlacklistedForStage): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onNodeUnblacklisted(self, nodeUnblacklisted): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onOtherEvent(self, event): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onSpeculativeTaskSubmitted(self, speculativeTask): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onStageCompleted(self, stageCompleted): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onTaskEnd(self, taskEnd): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onTaskGettingResult(self, taskGettingResult): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onTaskStart(self, taskStart): # noqa: N802,N803 + # type: (Any) -> None + pass + + def onUnpersistRDD(self, unpersistRDD): # noqa: N802,N803 + # type: (Any) -> None + pass + + class Java: + implements = ["org.apache.spark.scheduler.SparkListenerInterface"] + + +class SentryListener(SparkListener): + def _add_breadcrumb( + self, + level, # type: str + message, # type: str + data=None, # type: Optional[dict[str, Any]] + ): + # type: (...) -> None + sentry_sdk_alpha.get_isolation_scope().add_breadcrumb( + level=level, message=message, data=data + ) + + def onJobStart(self, jobStart): # noqa: N802,N803 + # type: (Any) -> None + sentry_sdk_alpha.get_isolation_scope().clear_breadcrumbs() + + message = "Job {} Started".format(jobStart.jobId()) + self._add_breadcrumb(level="info", message=message) + _set_app_properties() + + def onJobEnd(self, jobEnd): # noqa: N802,N803 + # type: (Any) -> None + level = "" + message = "" + data = {"result": jobEnd.jobResult().toString()} + + if jobEnd.jobResult().toString() == "JobSucceeded": + level = "info" + message = "Job {} Ended".format(jobEnd.jobId()) + else: + level = "warning" + message = "Job {} Failed".format(jobEnd.jobId()) + + self._add_breadcrumb(level=level, message=message, data=data) + + def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803 + # type: (Any) -> None + stage_info = stageSubmitted.stageInfo() + message = "Stage {} Submitted".format(stage_info.stageId()) + + data = {"name": stage_info.name()} + attempt_id = _get_attempt_id(stage_info) + if attempt_id is not None: + data["attemptId"] = attempt_id + + self._add_breadcrumb(level="info", message=message, data=data) + _set_app_properties() + + def onStageCompleted(self, stageCompleted): # noqa: N802,N803 + # type: (Any) -> None + from py4j.protocol import Py4JJavaError # type: ignore + + stage_info = stageCompleted.stageInfo() + message = "" + level = "" + + data = {"name": stage_info.name()} + attempt_id = _get_attempt_id(stage_info) + if attempt_id is not None: + data["attemptId"] = attempt_id + + # Have to Try Except because stageInfo.failureReason() is typed with Scala Option + try: + data["reason"] = stage_info.failureReason().get() + message = "Stage {} Failed".format(stage_info.stageId()) + level = "warning" + except Py4JJavaError: + message = "Stage {} Completed".format(stage_info.stageId()) + level = "info" + + self._add_breadcrumb(level=level, message=message, data=data) + + +def _get_attempt_id(stage_info): + # type: (Any) -> Optional[int] + try: + return stage_info.attemptId() + except Exception: + pass + + try: + return stage_info.attemptNumber() + except Exception: + pass + + return None diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_worker.py b/src/sentry_sdk_alpha/integrations/spark/spark_worker.py new file mode 100644 index 00000000000000..f0ed84b9b8f84e --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/spark/spark_worker.py @@ -0,0 +1,116 @@ +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + exc_info_from_error, + single_exception_from_error_tuple, + walk_exception_chain, + event_hint_with_exc_info, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Optional + + from sentry_sdk_alpha._types import ExcInfo, Event, Hint + + +class SparkWorkerIntegration(Integration): + identifier = "spark_worker" + + @staticmethod + def setup_once(): + # type: () -> None + import pyspark.daemon as original_daemon + + original_daemon.worker_main = _sentry_worker_main + + +def _capture_exception(exc_info): + # type: (ExcInfo) -> None + client = sentry_sdk_alpha.get_client() + + mechanism = {"type": "spark", "handled": False} + + exc_info = exc_info_from_error(exc_info) + + exc_type, exc_value, tb = exc_info + rv = [] + + # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors + for exc_type, exc_value, tb in walk_exception_chain(exc_info): + if exc_type not in (SystemExit, EOFError, ConnectionResetError): + rv.append( + single_exception_from_error_tuple( + exc_type, exc_value, tb, client.options, mechanism + ) + ) + + if rv: + rv.reverse() + hint = event_hint_with_exc_info(exc_info) + event = {"level": "error", "exception": {"values": rv}} # type: Event + + _tag_task_context() + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _tag_task_context(): + # type: () -> None + from pyspark.taskcontext import TaskContext + + scope = sentry_sdk_alpha.get_isolation_scope() + + @scope.add_event_processor + def process_event(event, hint): + # type: (Event, Hint) -> Optional[Event] + with capture_internal_exceptions(): + integration = sentry_sdk_alpha.get_client().get_integration( + SparkWorkerIntegration + ) + task_context = TaskContext.get() + + if integration is None or task_context is None: + return event + + event.setdefault("tags", {}).setdefault( + "stageId", str(task_context.stageId()) + ) + event["tags"].setdefault("partitionId", str(task_context.partitionId())) + event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber())) + event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId())) + + if task_context._localProperties: + if "sentry_app_name" in task_context._localProperties: + event["tags"].setdefault( + "app_name", task_context._localProperties["sentry_app_name"] + ) + event["tags"].setdefault( + "application_id", + task_context._localProperties["sentry_application_id"], + ) + + if "callSite.short" in task_context._localProperties: + event.setdefault("extra", {}).setdefault( + "callSite", task_context._localProperties["callSite.short"] + ) + + return event + + +def _sentry_worker_main(*args, **kwargs): + # type: (*Optional[Any], **Optional[Any]) -> None + import pyspark.worker as original_worker + + try: + original_worker.main(*args, **kwargs) + except SystemExit: + if sentry_sdk_alpha.get_client().get_integration(SparkWorkerIntegration) is not None: + exc_info = sys.exc_info() + with capture_internal_exceptions(): + _capture_exception(exc_info) diff --git a/src/sentry_sdk_alpha/integrations/sqlalchemy.py b/src/sentry_sdk_alpha/integrations/sqlalchemy.py new file mode 100644 index 00000000000000..2a1237eb8fe288 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/sqlalchemy.py @@ -0,0 +1,146 @@ +from sentry_sdk_alpha.consts import SPANSTATUS, SPANDATA +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + parse_version, +) + +try: + from sqlalchemy.engine import Engine # type: ignore + from sqlalchemy.event import listen # type: ignore + from sqlalchemy import __version__ as SQLALCHEMY_VERSION # type: ignore +except ImportError: + raise DidNotEnable("SQLAlchemy not installed.") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import ContextManager + from typing import Optional + + from sentry_sdk_alpha.tracing import Span + + +class SqlalchemyIntegration(Integration): + identifier = "sqlalchemy" + origin = f"auto.db.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(SQLALCHEMY_VERSION) + _check_minimum_version(SqlalchemyIntegration, version) + + listen(Engine, "before_cursor_execute", _before_cursor_execute) + listen(Engine, "after_cursor_execute", _after_cursor_execute) + listen(Engine, "handle_error", _handle_error) + + +@ensure_integration_enabled(SqlalchemyIntegration) +def _before_cursor_execute( + conn, cursor, statement, parameters, context, executemany, *args +): + # type: (Any, Any, Any, Any, Any, bool, *Any) -> None + ctx_mgr = record_sql_queries( + cursor, + statement, + parameters, + paramstyle=context and context.dialect and context.dialect.paramstyle or None, + executemany=executemany, + span_origin=SqlalchemyIntegration.origin, + ) + context._sentry_sql_span_manager = ctx_mgr + + span = ctx_mgr.__enter__() + + if span is not None: + _set_db_data(span, conn) + context._sentry_sql_span = span + + +@ensure_integration_enabled(SqlalchemyIntegration) +def _after_cursor_execute(conn, cursor, statement, parameters, context, *args): + # type: (Any, Any, Any, Any, Any, *Any) -> None + ctx_mgr = getattr( + context, "_sentry_sql_span_manager", None + ) # type: Optional[ContextManager[Any]] + + span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span] + if span is not None: + with capture_internal_exceptions(): + add_query_source(span) + + if ctx_mgr is not None: + context._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) + + +def _handle_error(context, *args): + # type: (Any, *Any) -> None + execution_context = context.execution_context + if execution_context is None: + return + + span = getattr(execution_context, "_sentry_sql_span", None) # type: Optional[Span] + + if span is not None: + span.set_status(SPANSTATUS.INTERNAL_ERROR) + + # _after_cursor_execute does not get called for crashing SQL stmts. Judging + # from SQLAlchemy codebase it does seem like any error coming into this + # handler is going to be fatal. + ctx_mgr = getattr( + execution_context, "_sentry_sql_span_manager", None + ) # type: Optional[ContextManager[Any]] + + if ctx_mgr is not None: + execution_context._sentry_sql_span_manager = None + ctx_mgr.__exit__(None, None, None) + + +# See: https://docs.sqlalchemy.org/en/20/dialects/index.html +def _get_db_system(name): + # type: (str) -> Optional[str] + name = str(name) + + if "sqlite" in name: + return "sqlite" + + if "postgres" in name: + return "postgresql" + + if "mariadb" in name: + return "mariadb" + + if "mysql" in name: + return "mysql" + + if "oracle" in name: + return "oracle" + + return None + + +def _set_db_data(span, conn): + # type: (Span, Any) -> None + db_system = _get_db_system(conn.engine.name) + if db_system is not None: + span.set_attribute(SPANDATA.DB_SYSTEM, db_system) + + if conn.engine.url is None: + return + + db_name = conn.engine.url.database + if db_name is not None: + span.set_attribute(SPANDATA.DB_NAME, db_name) + + server_address = conn.engine.url.host + if server_address is not None: + span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address) + + server_port = conn.engine.url.port + if server_port is not None: + span.set_attribute(SPANDATA.SERVER_PORT, server_port) diff --git a/src/sentry_sdk_alpha/integrations/starlette.py b/src/sentry_sdk_alpha/integrations/starlette.py new file mode 100644 index 00000000000000..c1a85877f64e45 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/starlette.py @@ -0,0 +1,723 @@ +import asyncio +import functools +from collections.abc import Set +from copy import deepcopy +from json import JSONDecodeError + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource +from sentry_sdk_alpha.integrations import ( + DidNotEnable, + Integration, + _DEFAULT_FAILED_REQUEST_STATUS_CODES, +) +from sentry_sdk_alpha.integrations._wsgi_common import ( + DEFAULT_HTTP_METHODS_TO_CAPTURE, + _is_json_content_type, + request_body_within_bounds, +) +from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + logger, + parse_version, + transaction_from_function, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Awaitable, Callable, Dict, Optional, Tuple + + from sentry_sdk_alpha._types import Event + +try: + import starlette # type: ignore + from starlette import __version__ as STARLETTE_VERSION + from starlette.applications import Starlette # type: ignore + from starlette.datastructures import UploadFile # type: ignore + from starlette.middleware import Middleware # type: ignore + from starlette.middleware.authentication import ( # type: ignore + AuthenticationMiddleware, + ) + from starlette.requests import Request # type: ignore + from starlette.routing import Match # type: ignore + from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send # type: ignore +except ImportError: + raise DidNotEnable("Starlette is not installed") + +try: + # Starlette 0.20 + from starlette.middleware.exceptions import ExceptionMiddleware # type: ignore +except ImportError: + # Startlette 0.19.1 + from starlette.exceptions import ExceptionMiddleware # type: ignore + +try: + # Optional dependency of Starlette to parse form data. + try: + # python-multipart 0.0.13 and later + import python_multipart as multipart # type: ignore + except ImportError: + # python-multipart 0.0.12 and earlier + import multipart # type: ignore +except ImportError: + multipart = None + + +_DEFAULT_TRANSACTION_NAME = "generic Starlette request" + +TRANSACTION_STYLE_VALUES = ("endpoint", "url") + + +class StarletteIntegration(Integration): + identifier = "starlette" + origin = f"auto.http.{identifier}" + + transaction_style = "" + + def __init__( + self, + transaction_style="url", # type: str + failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES, # type: Set[int] + middleware_spans=True, # type: bool + http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: tuple[str, ...] + ): + # type: (...) -> None + if transaction_style not in TRANSACTION_STYLE_VALUES: + raise ValueError( + "Invalid value for transaction_style: %s (must be in %s)" + % (transaction_style, TRANSACTION_STYLE_VALUES) + ) + self.transaction_style = transaction_style + self.middleware_spans = middleware_spans + self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture)) + + self.failed_request_status_codes = failed_request_status_codes + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(STARLETTE_VERSION) + + if version is None: + raise DidNotEnable( + "Unparsable Starlette version: {}".format(STARLETTE_VERSION) + ) + + patch_middlewares() + patch_asgi_app() + patch_request_response() + + if version >= (0, 24): + patch_templates() + + +def _enable_span_for_middleware(middleware_class): + # type: (Any) -> type + old_call = middleware_class.__call__ + + async def _create_span_call(app, scope, receive, send, **kwargs): + # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration) + if integration is None or not integration.middleware_spans: + return await old_call(app, scope, receive, send, **kwargs) + + middleware_name = app.__class__.__name__ + + # Update transaction name with middleware name + name, source = _get_transaction_from_middleware(app, scope, integration) + if name is not None: + sentry_sdk_alpha.get_current_scope().set_transaction_name( + name, + source=source, + ) + + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLETTE, + name=middleware_name, + origin=StarletteIntegration.origin, + only_if_parent=True, + ) as middleware_span: + middleware_span.set_tag("starlette.middleware_name", middleware_name) + + # Creating spans for the "receive" callback + async def _sentry_receive(*args, **kwargs): + # type: (*Any, **Any) -> Any + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLETTE_RECEIVE, + name=getattr(receive, "__qualname__", str(receive)), + origin=StarletteIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("starlette.middleware_name", middleware_name) + return await receive(*args, **kwargs) + + receive_name = getattr(receive, "__name__", str(receive)) + receive_patched = receive_name == "_sentry_receive" + new_receive = _sentry_receive if not receive_patched else receive + + # Creating spans for the "send" callback + async def _sentry_send(*args, **kwargs): + # type: (*Any, **Any) -> Any + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLETTE_SEND, + name=getattr(send, "__qualname__", str(send)), + origin=StarletteIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("starlette.middleware_name", middleware_name) + return await send(*args, **kwargs) + + send_name = getattr(send, "__name__", str(send)) + send_patched = send_name == "_sentry_send" + new_send = _sentry_send if not send_patched else send + + return await old_call(app, scope, new_receive, new_send, **kwargs) + + not_yet_patched = old_call.__name__ not in [ + "_create_span_call", + "_sentry_authenticationmiddleware_call", + "_sentry_exceptionmiddleware_call", + ] + + if not_yet_patched: + middleware_class.__call__ = _create_span_call + + return middleware_class + + +@ensure_integration_enabled(StarletteIntegration) +def _capture_exception(exception, handled=False): + # type: (BaseException, **Any) -> None + event, hint = event_from_exception( + exception, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": StarletteIntegration.identifier, "handled": handled}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def patch_exception_middleware(middleware_class): + # type: (Any) -> None + """ + Capture all exceptions in Starlette app and + also extract user information. + """ + old_middleware_init = middleware_class.__init__ + + not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) + + if not_yet_patched: + + def _sentry_middleware_init(self, *args, **kwargs): + # type: (Any, Any, Any) -> None + old_middleware_init(self, *args, **kwargs) + + # Patch existing exception handlers + old_handlers = self._exception_handlers.copy() + + async def _sentry_patched_exception_handler(self, *args, **kwargs): + # type: (Any, Any, Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration( + StarletteIntegration + ) + + exp = args[0] + + if integration is not None: + is_http_server_error = ( + hasattr(exp, "status_code") + and isinstance(exp.status_code, int) + and exp.status_code in integration.failed_request_status_codes + ) + if is_http_server_error: + _capture_exception(exp, handled=True) + + # Find a matching handler + old_handler = None + for cls in type(exp).__mro__: + if cls in old_handlers: + old_handler = old_handlers[cls] + break + + if old_handler is None: + return + + if _is_async_callable(old_handler): + return await old_handler(self, *args, **kwargs) + else: + return old_handler(self, *args, **kwargs) + + for key in self._exception_handlers.keys(): + self._exception_handlers[key] = _sentry_patched_exception_handler + + middleware_class.__init__ = _sentry_middleware_init + + old_call = middleware_class.__call__ + + async def _sentry_exceptionmiddleware_call(self, scope, receive, send): + # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + # Also add the user (that was eventually set by be Authentication middle + # that was called before this middleware). This is done because the authentication + # middleware sets the user in the scope and then (in the same function) + # calls this exception middelware. In case there is no exception (or no handler + # for the type of exception occuring) then the exception bubbles up and setting the + # user information into the sentry scope is done in auth middleware and the + # ASGI middleware will then send everything to Sentry and this is fine. + # But if there is an exception happening that the exception middleware here + # has a handler for, it will send the exception directly to Sentry, so we need + # the user information right now. + # This is why we do it here. + _add_user_to_sentry_scope(scope) + await old_call(self, scope, receive, send) + + middleware_class.__call__ = _sentry_exceptionmiddleware_call + + +@ensure_integration_enabled(StarletteIntegration) +def _add_user_to_sentry_scope(scope): + # type: (Dict[str, Any]) -> None + """ + Extracts user information from the ASGI scope and + adds it to Sentry's scope. + """ + if "user" not in scope: + return + + if not should_send_default_pii(): + return + + user_info = {} # type: Dict[str, Any] + starlette_user = scope["user"] + + username = getattr(starlette_user, "username", None) + if username: + user_info.setdefault("username", starlette_user.username) + + user_id = getattr(starlette_user, "id", None) + if user_id: + user_info.setdefault("id", starlette_user.id) + + email = getattr(starlette_user, "email", None) + if email: + user_info.setdefault("email", starlette_user.email) + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + sentry_scope.set_user(user_info) + + +def patch_authentication_middleware(middleware_class): + # type: (Any) -> None + """ + Add user information to Sentry scope. + """ + old_call = middleware_class.__call__ + + not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call) + + if not_yet_patched: + + async def _sentry_authenticationmiddleware_call(self, scope, receive, send): + # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None + await old_call(self, scope, receive, send) + _add_user_to_sentry_scope(scope) + + middleware_class.__call__ = _sentry_authenticationmiddleware_call + + +def patch_middlewares(): + # type: () -> None + """ + Patches Starlettes `Middleware` class to record + spans for every middleware invoked. + """ + old_middleware_init = Middleware.__init__ + + not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init) + + if not_yet_patched: + + def _sentry_middleware_init(self, cls, *args, **kwargs): + # type: (Any, Any, Any, Any) -> None + if cls == SentryAsgiMiddleware: + return old_middleware_init(self, cls, *args, **kwargs) + + span_enabled_cls = _enable_span_for_middleware(cls) + old_middleware_init(self, span_enabled_cls, *args, **kwargs) + + if cls == AuthenticationMiddleware: + patch_authentication_middleware(cls) + + if cls == ExceptionMiddleware: + patch_exception_middleware(cls) + + Middleware.__init__ = _sentry_middleware_init + + +def patch_asgi_app(): + # type: () -> None + """ + Instrument Starlette ASGI app using the SentryAsgiMiddleware. + """ + old_app = Starlette.__call__ + + async def _sentry_patched_asgi_app(self, scope, receive, send): + # type: (Starlette, StarletteScope, Receive, Send) -> None + integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration) + if integration is None: + return await old_app(self, scope, receive, send) + + middleware = SentryAsgiMiddleware( + lambda *a, **kw: old_app(self, *a, **kw), + mechanism_type=StarletteIntegration.identifier, + transaction_style=integration.transaction_style, + span_origin=StarletteIntegration.origin, + http_methods_to_capture=( + integration.http_methods_to_capture + if integration + else DEFAULT_HTTP_METHODS_TO_CAPTURE + ), + ) + + middleware.__call__ = middleware._run_asgi3 + return await middleware(scope, receive, send) + + Starlette.__call__ = _sentry_patched_asgi_app + + +# This was vendored in from Starlette to support Starlette 0.19.1 because +# this function was only introduced in 0.20.x +def _is_async_callable(obj): + # type: (Any) -> bool + while isinstance(obj, functools.partial): + obj = obj.func + + return asyncio.iscoroutinefunction(obj) or ( + callable(obj) and asyncio.iscoroutinefunction(obj.__call__) + ) + + +def patch_request_response(): + # type: () -> None + old_request_response = starlette.routing.request_response + + def _sentry_request_response(func): + # type: (Callable[[Any], Any]) -> ASGIApp + old_func = func + + is_coroutine = _is_async_callable(old_func) + if is_coroutine: + + async def _sentry_async_func(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration( + StarletteIntegration + ) + if integration is None: + return await old_func(*args, **kwargs) + + request = args[0] + + _set_transaction_name_and_source( + sentry_sdk_alpha.get_current_scope(), + integration.transaction_style, + request, + ) + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + extractor = StarletteRequestExtractor(request) + info = await extractor.extract_request_info() + + def _make_request_event_processor(req, integration): + # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] + def event_processor(event, hint): + # type: (Event, Dict[str, Any]) -> Event + + # Add info from request to event + request_info = event.get("request", {}) + if info: + if "cookies" in info: + request_info["cookies"] = info["cookies"] + if "data" in info: + request_info["data"] = info["data"] + event["request"] = deepcopy(request_info) + + return event + + return event_processor + + sentry_scope._name = StarletteIntegration.identifier + sentry_scope.add_event_processor( + _make_request_event_processor(request, integration) + ) + + return await old_func(*args, **kwargs) + + func = _sentry_async_func + + else: + + @functools.wraps(old_func) + def _sentry_sync_func(*args, **kwargs): + # type: (*Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration( + StarletteIntegration + ) + if integration is None: + return old_func(*args, **kwargs) + + current_scope = sentry_sdk_alpha.get_current_scope() + if current_scope.root_span is not None: + current_scope.root_span.update_active_thread() + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + if sentry_scope.profile is not None: + sentry_scope.profile.update_active_thread_id() + + request = args[0] + + _set_transaction_name_and_source( + sentry_scope, integration.transaction_style, request + ) + + extractor = StarletteRequestExtractor(request) + cookies = extractor.extract_cookies_from_request() + + def _make_request_event_processor(req, integration): + # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event] + def event_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + + # Extract information from request + request_info = event.get("request", {}) + if cookies: + request_info["cookies"] = cookies + + event["request"] = deepcopy(request_info) + + return event + + return event_processor + + sentry_scope._name = StarletteIntegration.identifier + sentry_scope.add_event_processor( + _make_request_event_processor(request, integration) + ) + + return old_func(*args, **kwargs) + + func = _sentry_sync_func + + return old_request_response(func) + + starlette.routing.request_response = _sentry_request_response + + +def patch_templates(): + # type: () -> None + + # If markupsafe is not installed, then Jinja2 is not installed + # (markupsafe is a dependency of Jinja2) + # In this case we do not need to patch the Jinja2Templates class + try: + from markupsafe import Markup + except ImportError: + return # Nothing to do + + from starlette.templating import Jinja2Templates # type: ignore + + old_jinja2templates_init = Jinja2Templates.__init__ + + not_yet_patched = "_sentry_jinja2templates_init" not in str( + old_jinja2templates_init + ) + + if not_yet_patched: + + def _sentry_jinja2templates_init(self, *args, **kwargs): + # type: (Jinja2Templates, *Any, **Any) -> None + def add_sentry_trace_meta(request): + # type: (Request) -> Dict[str, Any] + trace_meta = Markup( + sentry_sdk_alpha.get_current_scope().trace_propagation_meta() + ) + return { + "sentry_trace_meta": trace_meta, + } + + kwargs.setdefault("context_processors", []) + + if add_sentry_trace_meta not in kwargs["context_processors"]: + kwargs["context_processors"].append(add_sentry_trace_meta) + + return old_jinja2templates_init(self, *args, **kwargs) + + Jinja2Templates.__init__ = _sentry_jinja2templates_init + + +class StarletteRequestExtractor: + """ + Extracts useful information from the Starlette request + (like form data or cookies) and adds it to the Sentry event. + """ + + request = None # type: Request + + def __init__(self, request): + # type: (StarletteRequestExtractor, Request) -> None + self.request = request + + def extract_cookies_from_request(self): + # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + cookies = None # type: Optional[Dict[str, Any]] + if should_send_default_pii(): + cookies = self.cookies() + + return cookies + + async def extract_request_info(self): + # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + client = sentry_sdk_alpha.get_client() + + request_info = {} # type: Dict[str, Any] + + with capture_internal_exceptions(): + # Add cookies + if should_send_default_pii(): + request_info["cookies"] = self.cookies() + + # If there is no body, just return the cookies + content_length = await self.content_length() + if not content_length: + return request_info + + # Add annotation if body is too big + if content_length and not request_body_within_bounds( + client, content_length + ): + request_info["data"] = AnnotatedValue.removed_because_over_size_limit() + return request_info + + # Add JSON body, if it is a JSON request + json = await self.json() + if json: + request_info["data"] = json + return request_info + + # Add form as key/value pairs, if request has form data + form = await self.form() + if form: + form_data = {} + for key, val in form.items(): + is_file = isinstance(val, UploadFile) + form_data[key] = ( + val + if not is_file + else AnnotatedValue.removed_because_raw_data() + ) + + request_info["data"] = form_data + return request_info + + # Raw data, do not add body just an annotation + request_info["data"] = AnnotatedValue.removed_because_raw_data() + return request_info + + async def content_length(self): + # type: (StarletteRequestExtractor) -> Optional[int] + if "content-length" in self.request.headers: + return int(self.request.headers["content-length"]) + + return None + + def cookies(self): + # type: (StarletteRequestExtractor) -> Dict[str, Any] + return self.request.cookies + + async def form(self): + # type: (StarletteRequestExtractor) -> Any + if multipart is None: + return None + + # Parse the body first to get it cached, as Starlette does not cache form() as it + # does with body() and json() https://github.com/encode/starlette/discussions/1933 + # Calling `.form()` without calling `.body()` first will + # potentially break the users project. + await self.request.body() + + return await self.request.form() + + def is_json(self): + # type: (StarletteRequestExtractor) -> bool + return _is_json_content_type(self.request.headers.get("content-type")) + + async def json(self): + # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]] + if not self.is_json(): + return None + try: + return await self.request.json() + except JSONDecodeError: + return None + + +def _transaction_name_from_router(scope): + # type: (StarletteScope) -> Optional[str] + router = scope.get("router") + if not router: + return None + + for route in router.routes: + match = route.matches(scope) + if match[0] == Match.FULL: + try: + return route.path + except AttributeError: + # routes added via app.host() won't have a path attribute + return scope.get("path") + + return None + + +def _set_transaction_name_and_source(scope, transaction_style, request): + # type: (sentry_sdk.Scope, str, Any) -> None + name = None + source = SOURCE_FOR_STYLE[transaction_style] + + if transaction_style == "endpoint": + endpoint = request.scope.get("endpoint") + if endpoint: + name = transaction_from_function(endpoint) or None + + elif transaction_style == "url": + name = _transaction_name_from_router(request.scope) + + if name is None: + name = _DEFAULT_TRANSACTION_NAME + source = TransactionSource.ROUTE + + scope.set_transaction_name(name, source=source) + logger.debug( + "[Starlette] Set transaction name and source on scope: %s / %s", name, source + ) + + +def _get_transaction_from_middleware(app, asgi_scope, integration): + # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]] + name = None + source = None + + if integration.transaction_style == "endpoint": + name = transaction_from_function(app.__class__) + source = TransactionSource.COMPONENT + elif integration.transaction_style == "url": + name = _transaction_name_from_router(asgi_scope) + source = TransactionSource.ROUTE + + return name, source diff --git a/src/sentry_sdk_alpha/integrations/starlite.py b/src/sentry_sdk_alpha/integrations/starlite.py new file mode 100644 index 00000000000000..26069c72b06caf --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/starlite.py @@ -0,0 +1,294 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource +from sentry_sdk_alpha.integrations import DidNotEnable, Integration +from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.utils import ( + ensure_integration_enabled, + event_from_exception, + transaction_from_function, +) + +try: + from starlite import Request, Starlite, State # type: ignore + from starlite.handlers.base import BaseRouteHandler # type: ignore + from starlite.middleware import DefineMiddleware # type: ignore + from starlite.plugins.base import get_plugin_for_value # type: ignore + from starlite.routes.http import HTTPRoute # type: ignore + from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref # type: ignore + from pydantic import BaseModel # type: ignore +except ImportError: + raise DidNotEnable("Starlite is not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Union + from starlite.types import ( # type: ignore + ASGIApp, + Hint, + HTTPReceiveMessage, + HTTPScope, + Message, + Middleware, + Receive, + Scope as StarliteScope, + Send, + WebSocketReceiveMessage, + ) + from starlite import MiddlewareProtocol + from sentry_sdk_alpha._types import Event + + +_DEFAULT_TRANSACTION_NAME = "generic Starlite request" + + +class StarliteIntegration(Integration): + identifier = "starlite" + origin = f"auto.http.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + patch_app_init() + patch_middlewares() + patch_http_route_handle() + + +class SentryStarliteASGIMiddleware(SentryAsgiMiddleware): + def __init__(self, app, span_origin=StarliteIntegration.origin): + # type: (ASGIApp, str) -> None + super().__init__( + app=app, + unsafe_context_data=False, + transaction_style="endpoint", + mechanism_type="asgi", + span_origin=span_origin, + ) + + +def patch_app_init(): + # type: () -> None + """ + Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the + `SentryStarliteASGIMiddleware` as the outmost middleware in the stack. + See: + - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception + - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/ + """ + old__init__ = Starlite.__init__ + + @ensure_integration_enabled(StarliteIntegration, old__init__) + def injection_wrapper(self, *args, **kwargs): + # type: (Starlite, *Any, **Any) -> None + after_exception = kwargs.pop("after_exception", []) + kwargs.update( + after_exception=[ + exception_handler, + *( + after_exception + if isinstance(after_exception, list) + else [after_exception] + ), + ] + ) + + SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3 # type: ignore + middleware = kwargs.get("middleware") or [] + kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware] + old__init__(self, *args, **kwargs) + + Starlite.__init__ = injection_wrapper + + +def patch_middlewares(): + # type: () -> None + old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware + + @ensure_integration_enabled(StarliteIntegration, old_resolve_middleware_stack) + def resolve_middleware_wrapper(self): + # type: (BaseRouteHandler) -> list[Middleware] + return [ + enable_span_for_middleware(middleware) + for middleware in old_resolve_middleware_stack(self) + ] + + BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper + + +def enable_span_for_middleware(middleware): + # type: (Middleware) -> Middleware + if ( + not hasattr(middleware, "__call__") # noqa: B004 + or middleware is SentryStarliteASGIMiddleware + ): + return middleware + + if isinstance(middleware, DefineMiddleware): + old_call = middleware.middleware.__call__ # type: ASGIApp + else: + old_call = middleware.__call__ + + async def _create_span_call(self, scope, receive, send): + # type: (MiddlewareProtocol, StarliteScope, Receive, Send) -> None + if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None: + return await old_call(self, scope, receive, send) + + middleware_name = self.__class__.__name__ + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLITE, + name=middleware_name, + origin=StarliteIntegration.origin, + only_if_parent=True, + ) as middleware_span: + middleware_span.set_tag("starlite.middleware_name", middleware_name) + + # Creating spans for the "receive" callback + async def _sentry_receive(*args, **kwargs): + # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage] + if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None: + return await receive(*args, **kwargs) + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLITE_RECEIVE, + name=getattr(receive, "__qualname__", str(receive)), + origin=StarliteIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("starlite.middleware_name", middleware_name) + return await receive(*args, **kwargs) + + receive_name = getattr(receive, "__name__", str(receive)) + receive_patched = receive_name == "_sentry_receive" + new_receive = _sentry_receive if not receive_patched else receive + + # Creating spans for the "send" callback + async def _sentry_send(message): + # type: (Message) -> None + if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None: + return await send(message) + with sentry_sdk_alpha.start_span( + op=OP.MIDDLEWARE_STARLITE_SEND, + name=getattr(send, "__qualname__", str(send)), + origin=StarliteIntegration.origin, + only_if_parent=True, + ) as span: + span.set_tag("starlite.middleware_name", middleware_name) + return await send(message) + + send_name = getattr(send, "__name__", str(send)) + send_patched = send_name == "_sentry_send" + new_send = _sentry_send if not send_patched else send + + return await old_call(self, scope, new_receive, new_send) + + not_yet_patched = old_call.__name__ not in ["_create_span_call"] + + if not_yet_patched: + if isinstance(middleware, DefineMiddleware): + middleware.middleware.__call__ = _create_span_call + else: + middleware.__call__ = _create_span_call + + return middleware + + +def patch_http_route_handle(): + # type: () -> None + old_handle = HTTPRoute.handle + + async def handle_wrapper(self, scope, receive, send): + # type: (HTTPRoute, HTTPScope, Receive, Send) -> None + if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None: + return await old_handle(self, scope, receive, send) + + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + request = scope["app"].request_class( + scope=scope, receive=receive, send=send + ) # type: Request[Any, Any] + extracted_request_data = ConnectionDataExtractor( + parse_body=True, parse_query=True + )(request) + body = extracted_request_data.pop("body") + + request_data = await body + + def event_processor(event, _): + # type: (Event, Hint) -> Event + route_handler = scope.get("route_handler") + + request_info = event.get("request", {}) + request_info["content_length"] = len(scope.get("_body", b"")) + if should_send_default_pii(): + request_info["cookies"] = extracted_request_data["cookies"] + if request_data is not None: + request_info["data"] = request_data + + func = None + if route_handler.name is not None: + tx_name = route_handler.name + elif isinstance(route_handler.fn, Ref): + func = route_handler.fn.value + else: + func = route_handler.fn + if func is not None: + tx_name = transaction_from_function(func) + + tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]} + + if not tx_name: + tx_name = _DEFAULT_TRANSACTION_NAME + tx_info = {"source": TransactionSource.ROUTE} + + event.update( + { + "request": request_info, + "transaction": tx_name, + "transaction_info": tx_info, + } + ) + return event + + sentry_scope._name = StarliteIntegration.identifier + sentry_scope.add_event_processor(event_processor) + + return await old_handle(self, scope, receive, send) + + HTTPRoute.handle = handle_wrapper + + +def retrieve_user_from_scope(scope): + # type: (StarliteScope) -> Optional[dict[str, Any]] + scope_user = scope.get("user") + if not scope_user: + return None + if isinstance(scope_user, dict): + return scope_user + if isinstance(scope_user, BaseModel): + return scope_user.dict() + if hasattr(scope_user, "asdict"): # dataclasses + return scope_user.asdict() + + plugin = get_plugin_for_value(scope_user) + if plugin and not is_async_callable(plugin.to_dict): + return plugin.to_dict(scope_user) + + return None + + +@ensure_integration_enabled(StarliteIntegration) +def exception_handler(exc, scope, _): + # type: (Exception, StarliteScope, State) -> None + user_info = None # type: Optional[dict[str, Any]] + if should_send_default_pii(): + user_info = retrieve_user_from_scope(scope) + if user_info and isinstance(user_info, dict): + sentry_scope = sentry_sdk_alpha.get_isolation_scope() + sentry_scope.set_user(user_info) + + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": StarliteIntegration.identifier, "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) diff --git a/src/sentry_sdk_alpha/integrations/statsig.py b/src/sentry_sdk_alpha/integrations/statsig.py new file mode 100644 index 00000000000000..ff9fc94b18c158 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/statsig.py @@ -0,0 +1,37 @@ +from functools import wraps +from typing import Any, TYPE_CHECKING + +from sentry_sdk_alpha.feature_flags import add_feature_flag +from sentry_sdk_alpha.integrations import Integration, DidNotEnable, _check_minimum_version +from sentry_sdk_alpha.utils import parse_version + +try: + from statsig import statsig as statsig_module + from statsig.version import __version__ as STATSIG_VERSION +except ImportError: + raise DidNotEnable("statsig is not installed") + +if TYPE_CHECKING: + from statsig.statsig_user import StatsigUser + + +class StatsigIntegration(Integration): + identifier = "statsig" + + @staticmethod + def setup_once(): + # type: () -> None + version = parse_version(STATSIG_VERSION) + _check_minimum_version(StatsigIntegration, version, "statsig") + + # Wrap and patch evaluation method(s) in the statsig module + old_check_gate = statsig_module.check_gate + + @wraps(old_check_gate) + def sentry_check_gate(user, gate, *args, **kwargs): + # type: (StatsigUser, str, *Any, **Any) -> Any + enabled = old_check_gate(user, gate, *args, **kwargs) + add_feature_flag(gate, enabled) + return enabled + + statsig_module.check_gate = sentry_check_gate diff --git a/src/sentry_sdk_alpha/integrations/stdlib.py b/src/sentry_sdk_alpha/integrations/stdlib.py new file mode 100644 index 00000000000000..c68acb2c8b8451 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/stdlib.py @@ -0,0 +1,312 @@ +import os +import subprocess +import sys +import platform +from http.client import HTTPConnection + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP, SPANDATA +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.scope import add_global_event_processor +from sentry_sdk_alpha.tracing_utils import EnvironHeaders, should_propagate_trace +from sentry_sdk_alpha.utils import ( + SENSITIVE_DATA_SUBSTITUTE, + capture_internal_exceptions, + ensure_integration_enabled, + get_current_thread_meta, + http_client_status_to_breadcrumb_level, + is_sentry_url, + logger, + safe_repr, + parse_url, + set_thread_info_from_span, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Dict + from typing import Optional + from typing import List + + from sentry_sdk_alpha._types import Event, Hint + + +_RUNTIME_CONTEXT = { + "name": platform.python_implementation(), + "version": "%s.%s.%s" % (sys.version_info[:3]), + "build": sys.version, +} # type: dict[str, object] + + +class StdlibIntegration(Integration): + identifier = "stdlib" + + @staticmethod + def setup_once(): + # type: () -> None + _install_httplib() + _install_subprocess() + + @add_global_event_processor + def add_python_runtime_context(event, hint): + # type: (Event, Hint) -> Optional[Event] + if sentry_sdk_alpha.get_client().get_integration(StdlibIntegration) is not None: + contexts = event.setdefault("contexts", {}) + if isinstance(contexts, dict) and "runtime" not in contexts: + contexts["runtime"] = _RUNTIME_CONTEXT + + return event + + +def _install_httplib(): + # type: () -> None + real_putrequest = HTTPConnection.putrequest + real_getresponse = HTTPConnection.getresponse + + def putrequest(self, method, url, *args, **kwargs): + # type: (HTTPConnection, str, str, *Any, **Any) -> Any + host = self.host + port = self.port + default_port = self.default_port + + client = sentry_sdk_alpha.get_client() + if client.get_integration(StdlibIntegration) is None or is_sentry_url( + client, f"{host}:{port}" # noqa: E231 + ): + return real_putrequest(self, method, url, *args, **kwargs) + + real_url = url + if real_url is None or not real_url.startswith(("http://", "https://")): + real_url = "%s://%s%s%s" % ( + default_port == 443 and "https" or "http", + host, + port != default_port and ":%s" % port or "", + url, + ) + + parsed_url = None + with capture_internal_exceptions(): + parsed_url = parse_url(real_url, sanitize=False) + + span = sentry_sdk_alpha.start_span( + op=OP.HTTP_CLIENT, + name="%s %s" + % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE), + origin="auto.http.stdlib.httplib", + only_if_parent=True, + ) + span.__enter__() + + data = { + SPANDATA.HTTP_METHOD: method, + } + set_thread_info_from_span(data, span) + + if parsed_url is not None: + data["url"] = parsed_url.url + data[SPANDATA.HTTP_QUERY] = parsed_url.query + data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment + + for key, value in data.items(): + span.set_attribute(key, value) + + rv = real_putrequest(self, method, url, *args, **kwargs) + + if should_propagate_trace(client, real_url): + for ( + key, + value, + ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers( + span=span + ): + logger.debug( + "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format( + key=key, value=value, real_url=real_url + ) + ) + self.putheader(key, value) + + self._sentrysdk_span = span # type: ignore[attr-defined] + self._sentrysdk_span_data = data # type: ignore[attr-defined] + + return rv + + def getresponse(self, *args, **kwargs): + # type: (HTTPConnection, *Any, **Any) -> Any + span = getattr(self, "_sentrysdk_span", None) + + if span is None: + return real_getresponse(self, *args, **kwargs) + + try: + rv = real_getresponse(self, *args, **kwargs) + + span_data = getattr(self, "_sentrysdk_span_data", {}) + span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status) + span_data["reason"] = rv.reason + + status_code = int(rv.status) + span.set_http_status(status_code) + span.set_attribute("reason", rv.reason) + + sentry_sdk_alpha.add_breadcrumb( + type="http", + category="httplib", + data=span_data, + level=http_client_status_to_breadcrumb_level(status_code), + ) + finally: + span.__exit__(None, None, None) + + return rv + + HTTPConnection.putrequest = putrequest # type: ignore[method-assign] + HTTPConnection.getresponse = getresponse # type: ignore[method-assign] + + +def _init_argument(args, kwargs, name, position, setdefault_callback=None): + # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any + """ + given (*args, **kwargs) of a function call, retrieve (and optionally set a + default for) an argument by either name or position. + + This is useful for wrapping functions with complex type signatures and + extracting a few arguments without needing to redefine that function's + entire type signature. + """ + + if name in kwargs: + rv = kwargs[name] + if setdefault_callback is not None: + rv = setdefault_callback(rv) + if rv is not None: + kwargs[name] = rv + elif position < len(args): + rv = args[position] + if setdefault_callback is not None: + rv = setdefault_callback(rv) + if rv is not None: + args[position] = rv + else: + rv = setdefault_callback and setdefault_callback(None) + if rv is not None: + kwargs[name] = rv + + return rv + + +def _install_subprocess(): + # type: () -> None + old_popen_init = subprocess.Popen.__init__ + + @ensure_integration_enabled(StdlibIntegration, old_popen_init) + def sentry_patched_popen_init(self, *a, **kw): + # type: (subprocess.Popen[Any], *Any, **Any) -> None + # Convert from tuple to list to be able to set values. + a = list(a) + + args = _init_argument(a, kw, "args", 0) or [] + cwd = _init_argument(a, kw, "cwd", 9) + + # if args is not a list or tuple (and e.g. some iterator instead), + # let's not use it at all. There are too many things that can go wrong + # when trying to collect an iterator into a list and setting that list + # into `a` again. + # + # Also invocations where `args` is not a sequence are not actually + # legal. They just happen to work under CPython. + description = None + + if isinstance(args, (list, tuple)) and len(args) < 100: + with capture_internal_exceptions(): + description = " ".join(map(str, args)) + + if description is None: + description = safe_repr(args) + + env = None + + with sentry_sdk_alpha.start_span( + op=OP.SUBPROCESS, + name=description, + origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, + ) as span: + for k, v in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers( + span=span + ): + if env is None: + env = _init_argument( + a, + kw, + "env", + 10, + lambda x: dict(x if x is not None else os.environ), + ) + env["SUBPROCESS_" + k.upper().replace("-", "_")] = v + + if cwd: + span.set_attribute("subprocess.cwd", cwd) + + rv = old_popen_init(self, *a, **kw) + + span.set_tag("subprocess.pid", self.pid) + + with capture_internal_exceptions(): + thread_id, thread_name = get_current_thread_meta() + breadcrumb_data = { + "subprocess.pid": self.pid, + SPANDATA.THREAD_ID: thread_id, + SPANDATA.THREAD_NAME: thread_name, + } + if cwd: + breadcrumb_data["subprocess.cwd"] = cwd + + sentry_sdk_alpha.add_breadcrumb( + type="subprocess", + category="subprocess", + message=description, + data=breadcrumb_data, + ) + + return rv + + subprocess.Popen.__init__ = sentry_patched_popen_init # type: ignore + + old_popen_wait = subprocess.Popen.wait + + @ensure_integration_enabled(StdlibIntegration, old_popen_wait) + def sentry_patched_popen_wait(self, *a, **kw): + # type: (subprocess.Popen[Any], *Any, **Any) -> Any + with sentry_sdk_alpha.start_span( + op=OP.SUBPROCESS_WAIT, + origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, + ) as span: + span.set_tag("subprocess.pid", self.pid) + return old_popen_wait(self, *a, **kw) + + subprocess.Popen.wait = sentry_patched_popen_wait # type: ignore + + old_popen_communicate = subprocess.Popen.communicate + + @ensure_integration_enabled(StdlibIntegration, old_popen_communicate) + def sentry_patched_popen_communicate(self, *a, **kw): + # type: (subprocess.Popen[Any], *Any, **Any) -> Any + with sentry_sdk_alpha.start_span( + op=OP.SUBPROCESS_COMMUNICATE, + origin="auto.subprocess.stdlib.subprocess", + only_if_parent=True, + ) as span: + span.set_tag("subprocess.pid", self.pid) + return old_popen_communicate(self, *a, **kw) + + subprocess.Popen.communicate = sentry_patched_popen_communicate # type: ignore + + +def get_subprocess_traceparent_headers(): + # type: () -> EnvironHeaders + return EnvironHeaders(os.environ, prefix="SUBPROCESS_") diff --git a/src/sentry_sdk_alpha/integrations/strawberry.py b/src/sentry_sdk_alpha/integrations/strawberry.py new file mode 100644 index 00000000000000..09a7cfc3eebaaf --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/strawberry.py @@ -0,0 +1,374 @@ +import functools +import hashlib +from inspect import isawaitable + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations.logging import ignore_logger +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + ensure_integration_enabled, + event_from_exception, + logger, + package_version, + _get_installed_modules, +) + +try: + from functools import cached_property +except ImportError: + # The strawberry integration requires Python 3.8+. functools.cached_property + # was added in 3.8, so this check is technically not needed, but since this + # is an auto-enabling integration, we might get to executing this import in + # lower Python versions, so we need to deal with it. + raise DidNotEnable("strawberry-graphql integration requires Python 3.8 or newer") + +try: + from strawberry import Schema + from strawberry.extensions import SchemaExtension + from strawberry.extensions.tracing.utils import ( + should_skip_tracing as strawberry_should_skip_tracing, + ) + from strawberry.http import async_base_view, sync_base_view +except ImportError: + raise DidNotEnable("strawberry-graphql is not installed") + +try: + from strawberry.extensions.tracing import ( + SentryTracingExtension as StrawberrySentryAsyncExtension, + SentryTracingExtensionSync as StrawberrySentrySyncExtension, + ) +except ImportError: + StrawberrySentryAsyncExtension = None + StrawberrySentrySyncExtension = None + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Callable, Generator, List, Optional + from graphql import GraphQLError, GraphQLResolveInfo + from strawberry.http import GraphQLHTTPResponse + from strawberry.types import ExecutionContext + from sentry_sdk_alpha._types import Event, EventProcessor + + +ignore_logger("strawberry.execution") + + +class StrawberryIntegration(Integration): + identifier = "strawberry" + origin = f"auto.graphql.{identifier}" + + def __init__(self, async_execution=None): + # type: (Optional[bool]) -> None + if async_execution not in (None, False, True): + raise ValueError( + 'Invalid value for async_execution: "{}" (must be bool)'.format( + async_execution + ) + ) + self.async_execution = async_execution + + @staticmethod + def setup_once(): + # type: () -> None + version = package_version("strawberry-graphql") + _check_minimum_version(StrawberryIntegration, version, "strawberry-graphql") + + _patch_schema_init() + _patch_views() + + +def _patch_schema_init(): + # type: () -> None + old_schema_init = Schema.__init__ + + @functools.wraps(old_schema_init) + def _sentry_patched_schema_init(self, *args, **kwargs): + # type: (Schema, Any, Any) -> None + integration = sentry_sdk_alpha.get_client().get_integration(StrawberryIntegration) + if integration is None: + return old_schema_init(self, *args, **kwargs) + + extensions = kwargs.get("extensions") or [] + + if integration.async_execution is not None: + should_use_async_extension = integration.async_execution + else: + # try to figure it out ourselves + should_use_async_extension = _guess_if_using_async(extensions) + + logger.info( + "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).", + "async" if should_use_async_extension else "sync", + "False" if should_use_async_extension else "True", + ) + + # add our extension + extensions.append( + SentryAsyncExtension if should_use_async_extension else SentrySyncExtension + ) + + kwargs["extensions"] = extensions + + return old_schema_init(self, *args, **kwargs) + + Schema.__init__ = _sentry_patched_schema_init # type: ignore[method-assign] + + +class SentryAsyncExtension(SchemaExtension): + def __init__( + self, + *, + execution_context=None, + ): + # type: (Any, Optional[ExecutionContext]) -> None + if execution_context: + self.execution_context = execution_context + + @cached_property + def _resource_name(self): + # type: () -> str + query_hash = self.hash_query(self.execution_context.query) # type: ignore + + if self.execution_context.operation_name: + return "{}:{}".format(self.execution_context.operation_name, query_hash) + + return query_hash + + def hash_query(self, query): + # type: (str) -> str + return hashlib.md5(query.encode("utf-8")).hexdigest() + + def on_operation(self): + # type: () -> Generator[None, None, None] + self._operation_name = self.execution_context.operation_name + + operation_type = "query" + op = OP.GRAPHQL_QUERY + + if self.execution_context.query is None: + self.execution_context.query = "" + + if self.execution_context.query.strip().startswith("mutation"): + operation_type = "mutation" + op = OP.GRAPHQL_MUTATION + elif self.execution_context.query.strip().startswith("subscription"): + operation_type = "subscription" + op = OP.GRAPHQL_SUBSCRIPTION + + description = operation_type + if self._operation_name: + description += " {}".format(self._operation_name) + + sentry_sdk_alpha.add_breadcrumb( + category="graphql.operation", + data={ + "operation_name": self._operation_name, + "operation_type": operation_type, + }, + ) + + scope = sentry_sdk_alpha.get_isolation_scope() + event_processor = _make_request_event_processor(self.execution_context) + scope.add_event_processor(event_processor) + + with sentry_sdk_alpha.start_span( + op=op, + name=description, + origin=StrawberryIntegration.origin, + only_if_parent=True, + ) as graphql_span: + graphql_span.set_attribute("graphql.operation.type", operation_type) + graphql_span.set_attribute("graphql.document", self.execution_context.query) + graphql_span.set_attribute("graphql.resource_name", self._resource_name) + + yield + + # we might have a more accurate operation_name after the parsing + self._operation_name = self.execution_context.operation_name + + if self._operation_name is not None: + graphql_span.set_attribute( + "graphql.operation.name", self._operation_name + ) + + sentry_sdk_alpha.get_current_scope().set_transaction_name( + self._operation_name, + source=TransactionSource.COMPONENT, + ) + + root_span = graphql_span.root_span + if root_span: + root_span.op = op + + def on_validate(self): + # type: () -> Generator[None, None, None] + with sentry_sdk_alpha.start_span( + op=OP.GRAPHQL_VALIDATE, + name="validation", + origin=StrawberryIntegration.origin, + ): + yield + + def on_parse(self): + # type: () -> Generator[None, None, None] + with sentry_sdk_alpha.start_span( + op=OP.GRAPHQL_PARSE, + name="parsing", + origin=StrawberryIntegration.origin, + ): + yield + + def should_skip_tracing(self, _next, info): + # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool + return strawberry_should_skip_tracing(_next, info) + + async def _resolve(self, _next, root, info, *args, **kwargs): + # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + result = _next(root, info, *args, **kwargs) + + if isawaitable(result): + result = await result + + return result + + async def resolve(self, _next, root, info, *args, **kwargs): + # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + if self.should_skip_tracing(_next, info): + return await self._resolve(_next, root, info, *args, **kwargs) + + field_path = "{}.{}".format(info.parent_type, info.field_name) + + with sentry_sdk_alpha.start_span( + op=OP.GRAPHQL_RESOLVE, + name="resolving {}".format(field_path), + origin=StrawberryIntegration.origin, + ) as span: + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) + + return await self._resolve(_next, root, info, *args, **kwargs) + + +class SentrySyncExtension(SentryAsyncExtension): + def resolve(self, _next, root, info, *args, **kwargs): + # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any + if self.should_skip_tracing(_next, info): + return _next(root, info, *args, **kwargs) + + field_path = "{}.{}".format(info.parent_type, info.field_name) + + with sentry_sdk_alpha.start_span( + op=OP.GRAPHQL_RESOLVE, + name="resolving {}".format(field_path), + origin=StrawberryIntegration.origin, + ) as span: + span.set_attribute("graphql.field_name", info.field_name) + span.set_attribute("graphql.parent_type", info.parent_type.name) + span.set_attribute("graphql.field_path", field_path) + span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list()))) + + return _next(root, info, *args, **kwargs) + + +def _patch_views(): + # type: () -> None + old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors + old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors + + def _sentry_patched_async_view_handle_errors(self, errors, response_data): + # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + old_async_view_handle_errors(self, errors, response_data) + _sentry_patched_handle_errors(self, errors, response_data) + + def _sentry_patched_sync_view_handle_errors(self, errors, response_data): + # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + old_sync_view_handle_errors(self, errors, response_data) + _sentry_patched_handle_errors(self, errors, response_data) + + @ensure_integration_enabled(StrawberryIntegration) + def _sentry_patched_handle_errors(self, errors, response_data): + # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None + if not errors: + return + + scope = sentry_sdk_alpha.get_isolation_scope() + event_processor = _make_response_event_processor(response_data) + scope.add_event_processor(event_processor) + + with capture_internal_exceptions(): + for error in errors: + event, hint = event_from_exception( + error, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={ + "type": StrawberryIntegration.identifier, + "handled": False, + }, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + async_base_view.AsyncBaseHTTPView._handle_errors = ( # type: ignore[method-assign] + _sentry_patched_async_view_handle_errors + ) + sync_base_view.SyncBaseHTTPView._handle_errors = ( # type: ignore[method-assign] + _sentry_patched_sync_view_handle_errors + ) + + +def _make_request_event_processor(execution_context): + # type: (ExecutionContext) -> EventProcessor + + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + with capture_internal_exceptions(): + if should_send_default_pii(): + request_data = event.setdefault("request", {}) + request_data["api_target"] = "graphql" + + if not request_data.get("data"): + data = {"query": execution_context.query} # type: dict[str, Any] + if execution_context.variables: + data["variables"] = execution_context.variables + if execution_context.operation_name: + data["operationName"] = execution_context.operation_name + + request_data["data"] = data + + else: + try: + del event["request"]["data"] + except (KeyError, TypeError): + pass + + return event + + return inner + + +def _make_response_event_processor(response_data): + # type: (GraphQLHTTPResponse) -> EventProcessor + + def inner(event, hint): + # type: (Event, dict[str, Any]) -> Event + with capture_internal_exceptions(): + if should_send_default_pii(): + contexts = event.setdefault("contexts", {}) + contexts["response"] = {"data": response_data} + + return event + + return inner + + +def _guess_if_using_async(extensions): + # type: (List[SchemaExtension]) -> bool + return bool( + {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()) + ) diff --git a/src/sentry_sdk_alpha/integrations/sys_exit.py b/src/sentry_sdk_alpha/integrations/sys_exit.py new file mode 100644 index 00000000000000..3ced5fe5db1fdc --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/sys_exit.py @@ -0,0 +1,70 @@ +import functools +import sys + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha._types import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import NoReturn, Union + + +class SysExitIntegration(Integration): + """Captures sys.exit calls and sends them as events to Sentry. + + By default, SystemExit exceptions are not captured by the SDK. Enabling this integration will capture SystemExit + exceptions generated by sys.exit calls and send them to Sentry. + + This integration, in its default configuration, only captures the sys.exit call if the exit code is a non-zero and + non-None value (unsuccessful exits). Pass `capture_successful_exits=True` to capture successful exits as well. + Note that the integration does not capture SystemExit exceptions raised outside a call to sys.exit. + """ + + identifier = "sys_exit" + + def __init__(self, *, capture_successful_exits=False): + # type: (bool) -> None + self._capture_successful_exits = capture_successful_exits + + @staticmethod + def setup_once(): + # type: () -> None + SysExitIntegration._patch_sys_exit() + + @staticmethod + def _patch_sys_exit(): + # type: () -> None + old_exit = sys.exit # type: Callable[[Union[str, int, None]], NoReturn] + + @functools.wraps(old_exit) + def sentry_patched_exit(__status=0): + # type: (Union[str, int, None]) -> NoReturn + # @ensure_integration_enabled ensures that this is non-None + integration = sentry_sdk_alpha.get_client().get_integration(SysExitIntegration) + if integration is None: + old_exit(__status) + + try: + old_exit(__status) + except SystemExit as e: + with capture_internal_exceptions(): + if integration._capture_successful_exits or __status not in ( + 0, + None, + ): + _capture_exception(e) + raise e + + sys.exit = sentry_patched_exit + + +def _capture_exception(exc): + # type: (SystemExit) -> None + event, hint = event_from_exception( + exc, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": SysExitIntegration.identifier, "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) diff --git a/src/sentry_sdk_alpha/integrations/threading.py b/src/sentry_sdk_alpha/integrations/threading.py new file mode 100644 index 00000000000000..4ee77c65065b9c --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/threading.py @@ -0,0 +1,134 @@ +import sys +import warnings +from functools import wraps +from threading import Thread, current_thread + +import sentry_sdk_alpha +from sentry_sdk_alpha import Scope +from sentry_sdk_alpha.scope import ScopeType +from sentry_sdk_alpha.integrations import Integration +from sentry_sdk_alpha.utils import ( + event_from_exception, + capture_internal_exceptions, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import TypeVar + from typing import Callable + + from sentry_sdk_alpha._types import ExcInfo + + F = TypeVar("F", bound=Callable[..., Any]) + + +class ThreadingIntegration(Integration): + identifier = "threading" + + def __init__(self, propagate_scope=True): + # type: (bool) -> None + self.propagate_scope = propagate_scope + + @staticmethod + def setup_once(): + # type: () -> None + old_start = Thread.start + + try: + from django import VERSION as django_version # noqa: N811 + import channels # type: ignore[import-not-found] + + channels_version = channels.__version__ + except ImportError: + django_version = None + channels_version = None + + @wraps(old_start) + def sentry_start(self, *a, **kw): + # type: (Thread, *Any, **Any) -> Any + integration = sentry_sdk_alpha.get_client().get_integration(ThreadingIntegration) + if integration is None: + return old_start(self, *a, **kw) + + if integration.propagate_scope: + if ( + sys.version_info < (3, 9) + and channels_version is not None + and channels_version < "4.0.0" + and django_version is not None + and django_version >= (3, 0) + and django_version < (4, 0) + ): + warnings.warn( + "There is a known issue with Django channels 2.x and 3.x when using Python 3.8 or older. " + "(Async support is emulated using threads and some Sentry data may be leaked between those threads.) " + "Please either upgrade to Django channels 4.0+, use Django's async features " + "available in Django 3.1+ instead of Django channels, or upgrade to Python 3.9+.", + stacklevel=2, + ) + isolation_scope = sentry_sdk_alpha.get_isolation_scope() + current_scope = sentry_sdk_alpha.get_current_scope() + + else: + isolation_scope = sentry_sdk_alpha.get_isolation_scope().fork() + current_scope = sentry_sdk_alpha.get_current_scope().fork() + else: + isolation_scope = Scope(ty=ScopeType.ISOLATION) + current_scope = Scope(ty=ScopeType.CURRENT) + + # Patching instance methods in `start()` creates a reference cycle if + # done in a naive way. See + # https://github.com/getsentry/sentry-python/pull/434 + # + # In threading module, using current_thread API will access current thread instance + # without holding it to avoid a reference cycle in an easier way. + with capture_internal_exceptions(): + new_run = _wrap_run( + isolation_scope, + current_scope, + getattr(self.run, "__func__", self.run), + ) + self.run = new_run # type: ignore + + return old_start(self, *a, **kw) + + Thread.start = sentry_start # type: ignore + + +def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func): + # type: (sentry_sdk.Scope, sentry_sdk.Scope, F) -> F + @wraps(old_run_func) + def run(*a, **kw): + # type: (*Any, **Any) -> Any + def _run_old_run_func(): + # type: () -> Any + try: + self = current_thread() + return old_run_func(self, *a, **kw) + except Exception: + reraise(*_capture_exception()) + + with sentry_sdk_alpha.use_isolation_scope(isolation_scope_to_use): + with sentry_sdk_alpha.use_scope(current_scope_to_use): + return _run_old_run_func() + + return run # type: ignore + + +def _capture_exception(): + # type: () -> ExcInfo + exc_info = sys.exc_info() + + client = sentry_sdk_alpha.get_client() + if client.get_integration(ThreadingIntegration) is not None: + event, hint = event_from_exception( + exc_info, + client_options=client.options, + mechanism={"type": "threading", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return exc_info diff --git a/src/sentry_sdk_alpha/integrations/tornado.py b/src/sentry_sdk_alpha/integrations/tornado.py new file mode 100644 index 00000000000000..057981ad3cb12c --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/tornado.py @@ -0,0 +1,259 @@ +import weakref +import contextlib +from inspect import iscoroutinefunction + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.tracing import TransactionSource +from sentry_sdk_alpha.utils import ( + HAS_REAL_CONTEXTVARS, + CONTEXTVARS_ERROR_MESSAGE, + ensure_integration_enabled, + event_from_exception, + capture_internal_exceptions, + transaction_from_function, +) +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable +from sentry_sdk_alpha.integrations._wsgi_common import ( + RequestExtractor, + _filter_headers, + _is_json_content_type, + _request_headers_to_span_attributes, +) +from sentry_sdk_alpha.integrations.logging import ignore_logger + +try: + from tornado import version_info as TORNADO_VERSION + from tornado.gen import coroutine + from tornado.httputil import HTTPServerRequest + from tornado.web import RequestHandler, HTTPError +except ImportError: + raise DidNotEnable("Tornado not installed") + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Optional + from typing import Dict + from typing import Callable + from typing import Generator + + from sentry_sdk_alpha._types import Event, EventProcessor + + +REQUEST_PROPERTY_TO_ATTRIBUTE = { + "method": "http.request.method", + "path": "url.path", + "query": "url.query", + "protocol": "url.scheme", +} + + +class TornadoIntegration(Integration): + identifier = "tornado" + origin = f"auto.http.{identifier}" + + @staticmethod + def setup_once(): + # type: () -> None + _check_minimum_version(TornadoIntegration, TORNADO_VERSION) + + if not HAS_REAL_CONTEXTVARS: + # Tornado is async. We better have contextvars or we're going to leak + # state between requests. + raise DidNotEnable( + "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package" + + CONTEXTVARS_ERROR_MESSAGE + ) + + ignore_logger("tornado.access") + + old_execute = RequestHandler._execute + + awaitable = iscoroutinefunction(old_execute) + + if awaitable: + # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await) + # In that case our method should be a coroutine function too + async def sentry_execute_request_handler(self, *args, **kwargs): + # type: (RequestHandler, *Any, **Any) -> Any + with _handle_request_impl(self): + return await old_execute(self, *args, **kwargs) + + else: + + @coroutine # type: ignore + def sentry_execute_request_handler(self, *args, **kwargs): # type: ignore + # type: (RequestHandler, *Any, **Any) -> Any + with _handle_request_impl(self): + result = yield from old_execute(self, *args, **kwargs) + return result + + RequestHandler._execute = sentry_execute_request_handler + + old_log_exception = RequestHandler.log_exception + + def sentry_log_exception(self, ty, value, tb, *args, **kwargs): + # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any] + _capture_exception(ty, value, tb) + return old_log_exception(self, ty, value, tb, *args, **kwargs) + + RequestHandler.log_exception = sentry_log_exception + + +@contextlib.contextmanager +def _handle_request_impl(self): + # type: (RequestHandler) -> Generator[None, None, None] + integration = sentry_sdk_alpha.get_client().get_integration(TornadoIntegration) + + if integration is None: + yield + + weak_handler = weakref.ref(self) + + with sentry_sdk_alpha.isolation_scope() as scope: + headers = self.request.headers + + scope.clear_breadcrumbs() + processor = _make_event_processor(weak_handler) + scope.add_event_processor(processor) + + with sentry_sdk_alpha.continue_trace(headers): + with sentry_sdk_alpha.start_span( + op=OP.HTTP_SERVER, + # Like with all other integrations, this is our + # fallback transaction in case there is no route. + # sentry_urldispatcher_resolve is responsible for + # setting a transaction name later. + name="generic Tornado request", + source=TransactionSource.ROUTE, + origin=TornadoIntegration.origin, + attributes=_prepopulate_attributes(self.request), + ): + yield + + +@ensure_integration_enabled(TornadoIntegration) +def _capture_exception(ty, value, tb): + # type: (type, BaseException, Any) -> None + if isinstance(value, HTTPError): + return + + event, hint = event_from_exception( + (ty, value, tb), + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "tornado", "handled": False}, + ) + + sentry_sdk_alpha.capture_event(event, hint=hint) + + +def _make_event_processor(weak_handler): + # type: (Callable[[], RequestHandler]) -> EventProcessor + def tornado_processor(event, hint): + # type: (Event, dict[str, Any]) -> Event + handler = weak_handler() + if handler is None: + return event + + request = handler.request + + with capture_internal_exceptions(): + method = getattr(handler, handler.request.method.lower()) + event["transaction"] = transaction_from_function(method) or "" + event["transaction_info"] = {"source": TransactionSource.COMPONENT} + + with capture_internal_exceptions(): + extractor = TornadoRequestExtractor(request) + extractor.extract_into_event(event) + + request_info = event["request"] + + request_info["url"] = "%s://%s%s" % ( + request.protocol, + request.host, + request.path, + ) + + request_info["query_string"] = request.query + request_info["method"] = request.method + request_info["env"] = {"REMOTE_ADDR": request.remote_ip} + request_info["headers"] = _filter_headers(dict(request.headers)) + + with capture_internal_exceptions(): + if handler.current_user and should_send_default_pii(): + event.setdefault("user", {}).setdefault("is_authenticated", True) + + return event + + return tornado_processor + + +class TornadoRequestExtractor(RequestExtractor): + def content_length(self): + # type: () -> int + if self.request.body is None: + return 0 + return len(self.request.body) + + def cookies(self): + # type: () -> Dict[str, str] + return {k: v.value for k, v in self.request.cookies.items()} + + def raw_data(self): + # type: () -> bytes + return self.request.body + + def form(self): + # type: () -> Dict[str, Any] + return { + k: [v.decode("latin1", "replace") for v in vs] + for k, vs in self.request.body_arguments.items() + } + + def is_json(self): + # type: () -> bool + return _is_json_content_type(self.request.headers.get("content-type")) + + def files(self): + # type: () -> Dict[str, Any] + return {k: v[0] for k, v in self.request.files.items() if v} + + def size_of_file(self, file): + # type: (Any) -> int + return len(file.body or ()) + + +def _prepopulate_attributes(request): + # type: (HTTPServerRequest) -> dict[str, Any] + # https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest + attributes = {} + + for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items(): + if getattr(request, prop, None) is not None: + attributes[attr] = getattr(request, prop) + + if getattr(request, "version", None): + try: + proto, version = request.version.split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except ValueError: + attributes["network.protocol.name"] = request.version + + if getattr(request, "host", None): + try: + address, port = request.host.split(":") + attributes["server.address"] = address + attributes["server.port"] = port + except ValueError: + attributes["server.address"] = request.host + + with capture_internal_exceptions(): + attributes["url.full"] = request.full_url() + + attributes.update(_request_headers_to_span_attributes(request.headers)) + + return attributes diff --git a/src/sentry_sdk_alpha/integrations/trytond.py b/src/sentry_sdk_alpha/integrations/trytond.py new file mode 100644 index 00000000000000..b178c6bf3500e4 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/trytond.py @@ -0,0 +1,53 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.integrations import _check_minimum_version, Integration +from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware +from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception + +from trytond import __version__ as trytond_version # type: ignore +from trytond.exceptions import TrytonException # type: ignore +from trytond.wsgi import app # type: ignore + + +# TODO: trytond-worker, trytond-cron and trytond-admin intergations + + +class TrytondWSGIIntegration(Integration): + identifier = "trytond_wsgi" + origin = f"auto.http.{identifier}" + + def __init__(self): # type: () -> None + pass + + @staticmethod + def setup_once(): # type: () -> None + _check_minimum_version(TrytondWSGIIntegration, trytond_version) + + app.wsgi_app = SentryWsgiMiddleware( + app.wsgi_app, + span_origin=TrytondWSGIIntegration.origin, + ) + + @ensure_integration_enabled(TrytondWSGIIntegration) + def error_handler(e): # type: (Exception) -> None + if isinstance(e, TrytonException): + return + else: + client = sentry_sdk_alpha.get_client() + event, hint = event_from_exception( + e, + client_options=client.options, + mechanism={"type": "trytond", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + # Expected error handlers signature was changed + # when the error_handler decorator was introduced + # in Tryton-5.4 + if hasattr(app, "error_handler"): + + @app.error_handler + def _(app, request, e): # type: ignore + error_handler(e) + + else: + app.error_handlers.append(error_handler) diff --git a/src/sentry_sdk_alpha/integrations/typer.py b/src/sentry_sdk_alpha/integrations/typer.py new file mode 100644 index 00000000000000..429f2e7a12acd1 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/typer.py @@ -0,0 +1,60 @@ +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + event_from_exception, +) +from sentry_sdk_alpha.integrations import Integration, DidNotEnable + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Callable + from typing import Any + from typing import Type + from typing import Optional + + from types import TracebackType + + Excepthook = Callable[ + [Type[BaseException], BaseException, Optional[TracebackType]], + Any, + ] + +try: + import typer +except ImportError: + raise DidNotEnable("Typer not installed") + + +class TyperIntegration(Integration): + identifier = "typer" + + @staticmethod + def setup_once(): + # type: () -> None + typer.main.except_hook = _make_excepthook(typer.main.except_hook) # type: ignore + + +def _make_excepthook(old_excepthook): + # type: (Excepthook) -> Excepthook + def sentry_sdk_excepthook(type_, value, traceback): + # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None + integration = sentry_sdk_alpha.get_client().get_integration(TyperIntegration) + + # Note: If we replace this with ensure_integration_enabled then + # we break the exceptiongroup backport; + # See: https://github.com/getsentry/sentry-python/issues/3097 + if integration is None: + return old_excepthook(type_, value, traceback) + + with capture_internal_exceptions(): + event, hint = event_from_exception( + (type_, value, traceback), + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "typer", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return old_excepthook(type_, value, traceback) + + return sentry_sdk_excepthook diff --git a/src/sentry_sdk_alpha/integrations/unleash.py b/src/sentry_sdk_alpha/integrations/unleash.py new file mode 100644 index 00000000000000..45809337d6d8f2 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/unleash.py @@ -0,0 +1,33 @@ +from functools import wraps +from typing import Any + +from sentry_sdk_alpha.feature_flags import add_feature_flag +from sentry_sdk_alpha.integrations import Integration, DidNotEnable + +try: + from UnleashClient import UnleashClient +except ImportError: + raise DidNotEnable("UnleashClient is not installed") + + +class UnleashIntegration(Integration): + identifier = "unleash" + + @staticmethod + def setup_once(): + # type: () -> None + # Wrap and patch evaluation methods (class methods) + old_is_enabled = UnleashClient.is_enabled + + @wraps(old_is_enabled) + def sentry_is_enabled(self, feature, *args, **kwargs): + # type: (UnleashClient, str, *Any, **Any) -> Any + enabled = old_is_enabled(self, feature, *args, **kwargs) + + # We have no way of knowing what type of unleash feature this is, so we have to treat + # it as a boolean / toggle feature. + add_feature_flag(feature, enabled) + + return enabled + + UnleashClient.is_enabled = sentry_is_enabled # type: ignore diff --git a/src/sentry_sdk_alpha/integrations/wsgi.py b/src/sentry_sdk_alpha/integrations/wsgi.py new file mode 100644 index 00000000000000..f156d6f6fa9b55 --- /dev/null +++ b/src/sentry_sdk_alpha/integrations/wsgi.py @@ -0,0 +1,356 @@ +import sys +from functools import partial + +import sentry_sdk_alpha +from sentry_sdk_alpha._werkzeug import get_host, _get_headers +from sentry_sdk_alpha.consts import OP +from sentry_sdk_alpha.scope import should_send_default_pii +from sentry_sdk_alpha.integrations._wsgi_common import ( + DEFAULT_HTTP_METHODS_TO_CAPTURE, + _filter_headers, + _request_headers_to_span_attributes, +) +from sentry_sdk_alpha.sessions import track_session +from sentry_sdk_alpha.tracing import Span, TransactionSource +from sentry_sdk_alpha.utils import ( + ContextVar, + capture_internal_exceptions, + event_from_exception, + reraise, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Callable + from typing import Dict + from typing import Iterator + from typing import Any + from typing import Tuple + from typing import Optional + from typing import TypeVar + from typing import Protocol + + from sentry_sdk_alpha.utils import ExcInfo + from sentry_sdk_alpha._types import Event, EventProcessor + + WsgiResponseIter = TypeVar("WsgiResponseIter") + WsgiResponseHeaders = TypeVar("WsgiResponseHeaders") + WsgiExcInfo = TypeVar("WsgiExcInfo") + + class StartResponse(Protocol): + def __call__(self, status, response_headers, exc_info=None): # type: ignore + # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter + pass + + +_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied") + +DEFAULT_TRANSACTION_NAME = "generic WSGI request" + +ENVIRON_TO_ATTRIBUTE = { + "PATH_INFO": "url.path", + "QUERY_STRING": "url.query", + "REQUEST_METHOD": "http.request.method", + "SERVER_NAME": "server.address", + "SERVER_PORT": "server.port", + "wsgi.url_scheme": "url.scheme", +} + + +def wsgi_decoding_dance(s, charset="utf-8", errors="replace"): + # type: (str, str, str) -> str + return s.encode("latin1").decode(charset, errors) + + +def get_request_url(environ, use_x_forwarded_for=False): + # type: (Dict[str, str], bool) -> str + """Return the absolute URL without query string for the given WSGI + environment.""" + script_name = environ.get("SCRIPT_NAME", "").rstrip("/") + path_info = environ.get("PATH_INFO", "").lstrip("/") + path = f"{script_name}/{path_info}" + + return "%s://%s/%s" % ( + environ.get("wsgi.url_scheme"), + get_host(environ, use_x_forwarded_for), + wsgi_decoding_dance(path).lstrip("/"), + ) + + +class SentryWsgiMiddleware: + __slots__ = ( + "app", + "use_x_forwarded_for", + "span_origin", + "http_methods_to_capture", + ) + + def __init__( + self, + app, # type: Callable[[Dict[str, str], Callable[..., Any]], Any] + use_x_forwarded_for=False, # type: bool + span_origin=None, # type: Optional[str] + http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE, # type: Tuple[str, ...] + ): + # type: (...) -> None + self.app = app + self.use_x_forwarded_for = use_x_forwarded_for + self.span_origin = span_origin + self.http_methods_to_capture = http_methods_to_capture + + def __call__(self, environ, start_response): + # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse + if _wsgi_middleware_applied.get(False): + return self.app(environ, start_response) + + _wsgi_middleware_applied.set(True) + try: + with sentry_sdk_alpha.isolation_scope() as scope: + scope.set_transaction_name( + DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE + ) + + with track_session(scope, session_mode="request"): + with capture_internal_exceptions(): + scope.clear_breadcrumbs() + scope._name = "wsgi" + scope.add_event_processor( + _make_wsgi_event_processor( + environ, self.use_x_forwarded_for + ) + ) + method = environ.get("REQUEST_METHOD", "").upper() + should_trace = method in self.http_methods_to_capture + if should_trace: + with sentry_sdk_alpha.continue_trace(environ): + with sentry_sdk_alpha.start_span( + op=OP.HTTP_SERVER, + name=DEFAULT_TRANSACTION_NAME, + source=TransactionSource.ROUTE, + origin=self.span_origin, + attributes=_prepopulate_attributes( + environ, self.use_x_forwarded_for + ), + ) as span: + response = self._run_original_app( + environ, start_response, span + ) + else: + response = self._run_original_app(environ, start_response, None) + + finally: + _wsgi_middleware_applied.set(False) + + return _ScopedResponse(scope, response) + + def _run_original_app(self, environ, start_response, span): + # type: (dict[str, str], StartResponse, Optional[Span]) -> Any + try: + return self.app( + environ, + partial( + _sentry_start_response, + start_response, + span, + ), + ) + except BaseException: + reraise(*_capture_exception()) + + +def _sentry_start_response( # type: ignore + old_start_response, # type: StartResponse + span, # type: Optional[Span] + status, # type: str + response_headers, # type: WsgiResponseHeaders + exc_info=None, # type: Optional[WsgiExcInfo] +): + # type: (...) -> WsgiResponseIter + with capture_internal_exceptions(): + status_int = int(status.split(" ", 1)[0]) + if span is not None: + span.set_http_status(status_int) + + if exc_info is None: + # The Django Rest Framework WSGI test client, and likely other + # (incorrect) implementations, cannot deal with the exc_info argument + # if one is present. Avoid providing a third argument if not necessary. + return old_start_response(status, response_headers) + else: + return old_start_response(status, response_headers, exc_info) + + +def _get_environ(environ): + # type: (Dict[str, str]) -> Iterator[Tuple[str, str]] + """ + Returns our explicitly included environment variables we want to + capture (server name, port and remote addr if pii is enabled). + """ + keys = ["SERVER_NAME", "SERVER_PORT"] + if should_send_default_pii(): + # make debugging of proxy setup easier. Proxy headers are + # in headers. + keys += ["REMOTE_ADDR"] + + for key in keys: + if key in environ: + yield key, environ[key] + + +def get_client_ip(environ): + # type: (Dict[str, str]) -> Optional[Any] + """ + Infer the user IP address from various headers. This cannot be used in + security sensitive situations since the value may be forged from a client, + but it's good enough for the event payload. + """ + try: + return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip() + except (KeyError, IndexError): + pass + + try: + return environ["HTTP_X_REAL_IP"] + except KeyError: + pass + + return environ.get("REMOTE_ADDR") + + +def _capture_exception(): + # type: () -> ExcInfo + """ + Captures the current exception and sends it to Sentry. + Returns the ExcInfo tuple to it can be reraised afterwards. + """ + exc_info = sys.exc_info() + e = exc_info[1] + + # SystemExit(0) is the only uncaught exception that is expected behavior + should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None) + if not should_skip_capture: + event, hint = event_from_exception( + exc_info, + client_options=sentry_sdk_alpha.get_client().options, + mechanism={"type": "wsgi", "handled": False}, + ) + sentry_sdk_alpha.capture_event(event, hint=hint) + + return exc_info + + +class _ScopedResponse: + """ + Users a separate scope for each response chunk. + + This will make WSGI apps more tolerant against: + - WSGI servers streaming responses from a different thread/from + different threads than the one that called start_response + - close() not being called + - WSGI servers streaming responses interleaved from the same thread + """ + + __slots__ = ("_response", "_scope") + + def __init__(self, scope, response): + # type: (sentry_sdk.Scope, Iterator[bytes]) -> None + self._scope = scope + self._response = response + + def __iter__(self): + # type: () -> Iterator[bytes] + iterator = iter(self._response) + + while True: + with sentry_sdk_alpha.use_isolation_scope(self._scope): + try: + chunk = next(iterator) + except StopIteration: + break + except BaseException: + reraise(*_capture_exception()) + + yield chunk + + def close(self): + # type: () -> None + with sentry_sdk_alpha.use_isolation_scope(self._scope): + try: + self._response.close() # type: ignore + except AttributeError: + pass + except BaseException: + reraise(*_capture_exception()) + + +def _make_wsgi_event_processor(environ, use_x_forwarded_for): + # type: (Dict[str, str], bool) -> EventProcessor + # It's a bit unfortunate that we have to extract and parse the request data + # from the environ so eagerly, but there are a few good reasons for this. + # + # We might be in a situation where the scope never gets torn down + # properly. In that case we will have an unnecessary strong reference to + # all objects in the environ (some of which may take a lot of memory) when + # we're really just interested in a few of them. + # + # Keeping the environment around for longer than the request lifecycle is + # also not necessarily something uWSGI can deal with: + # https://github.com/unbit/uwsgi/issues/1950 + + client_ip = get_client_ip(environ) + request_url = get_request_url(environ, use_x_forwarded_for) + query_string = environ.get("QUERY_STRING") + method = environ.get("REQUEST_METHOD") + env = dict(_get_environ(environ)) + headers = _filter_headers(dict(_get_headers(environ))) + + def event_processor(event, hint): + # type: (Event, Dict[str, Any]) -> Event + with capture_internal_exceptions(): + # if the code below fails halfway through we at least have some data + request_info = event.setdefault("request", {}) + + if should_send_default_pii(): + user_info = event.setdefault("user", {}) + if client_ip: + user_info.setdefault("ip_address", client_ip) + + request_info["url"] = request_url + request_info["query_string"] = query_string + request_info["method"] = method + request_info["env"] = env + request_info["headers"] = headers + + return event + + return event_processor + + +def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False): + # type: (dict[str, str], bool) -> dict[str, str] + """Extract span attributes from the WSGI environment.""" + attributes = {} + + for property, attr in ENVIRON_TO_ATTRIBUTE.items(): + if wsgi_environ.get(property) is not None: + attributes[attr] = wsgi_environ[property] + + if wsgi_environ.get("SERVER_PROTOCOL") is not None: + try: + proto, version = wsgi_environ["SERVER_PROTOCOL"].split("/") + attributes["network.protocol.name"] = proto + attributes["network.protocol.version"] = version + except Exception: + attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"] + + with capture_internal_exceptions(): + url = get_request_url(wsgi_environ, use_x_forwarded_for) + query = wsgi_environ.get("QUERY_STRING") + attributes["url.full"] = f"{url}?{query}" + + attributes.update( + _request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))) + ) + + return attributes diff --git a/src/sentry_sdk_alpha/logger.py b/src/sentry_sdk_alpha/logger.py new file mode 100644 index 00000000000000..dcf09c322e3f38 --- /dev/null +++ b/src/sentry_sdk_alpha/logger.py @@ -0,0 +1,56 @@ +# NOTE: this is the logger sentry exposes to users, not some generic logger. +import functools +import time +from typing import Any + +from sentry_sdk_alpha import get_client, get_current_scope +from sentry_sdk_alpha.utils import safe_repr + + +def _capture_log(severity_text, severity_number, template, **kwargs): + # type: (str, int, str, **Any) -> None + client = get_client() + scope = get_current_scope() + + attrs = { + "sentry.message.template": template, + } # type: dict[str, str | bool | float | int] + if "attributes" in kwargs: + attrs.update(kwargs.pop("attributes")) + for k, v in kwargs.items(): + attrs[f"sentry.message.parameters.{k}"] = v + + attrs = { + k: ( + v + if ( + isinstance(v, str) + or isinstance(v, int) + or isinstance(v, bool) + or isinstance(v, float) + ) + else safe_repr(v) + ) + for (k, v) in attrs.items() + } + + # noinspection PyProtectedMember + client._capture_experimental_log( + scope, + { + "severity_text": severity_text, + "severity_number": severity_number, + "attributes": attrs, + "body": template.format(**kwargs), + "time_unix_nano": time.time_ns(), + "trace_id": None, + }, + ) + + +trace = functools.partial(_capture_log, "trace", 1) +debug = functools.partial(_capture_log, "debug", 5) +info = functools.partial(_capture_log, "info", 9) +warning = functools.partial(_capture_log, "warning", 13) +error = functools.partial(_capture_log, "error", 17) +fatal = functools.partial(_capture_log, "fatal", 21) diff --git a/src/sentry_sdk_alpha/monitor.py b/src/sentry_sdk_alpha/monitor.py new file mode 100644 index 00000000000000..08820a7adb035d --- /dev/null +++ b/src/sentry_sdk_alpha/monitor.py @@ -0,0 +1,124 @@ +import os +import time +from threading import Thread, Lock + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + + +MAX_DOWNSAMPLE_FACTOR = 10 + + +class Monitor: + """ + Performs health checks in a separate thread once every interval seconds + and updates the internal state. Other parts of the SDK only read this state + and act accordingly. + """ + + name = "sentry.monitor" + + def __init__(self, transport, interval=10): + # type: (sentry_sdk.transport.Transport, float) -> None + self.transport = transport # type: sentry_sdk.transport.Transport + self.interval = interval # type: float + + self._healthy = True + self._downsample_factor = 0 # type: int + + self._thread = None # type: Optional[Thread] + self._thread_lock = Lock() + self._thread_for_pid = None # type: Optional[int] + self._running = True + + def _ensure_running(self): + # type: () -> None + """ + Check that the monitor has an active thread to run in, or create one if not. + + Note that this might fail (e.g. in Python 3.12 it's not possible to + spawn new threads at interpreter shutdown). In that case self._running + will be False after running this function. + """ + if self._thread_for_pid == os.getpid() and self._thread is not None: + return None + + with self._thread_lock: + if self._thread_for_pid == os.getpid() and self._thread is not None: + return None + + def _thread(): + # type: (...) -> None + while self._running: + time.sleep(self.interval) + if self._running: + self.run() + + thread = Thread(name=self.name, target=_thread) + thread.daemon = True + try: + thread.start() + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self._running = False + return None + + self._thread = thread + self._thread_for_pid = os.getpid() + + return None + + def run(self): + # type: () -> None + self.check_health() + self.set_downsample_factor() + + def set_downsample_factor(self): + # type: () -> None + if self._healthy: + if self._downsample_factor > 0: + logger.debug( + "[Monitor] health check positive, reverting to normal sampling" + ) + self._downsample_factor = 0 + else: + if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR: + self._downsample_factor += 1 + logger.debug( + "[Monitor] health check negative, downsampling with a factor of %d", + self._downsample_factor, + ) + + def check_health(self): + # type: () -> None + """ + Perform the actual health checks, + currently only checks if the transport is rate-limited. + TODO: augment in the future with more checks. + """ + self._healthy = self.transport.is_healthy() + + def is_healthy(self): + # type: () -> bool + self._ensure_running() + return self._healthy + + @property + def downsample_factor(self): + # type: () -> int + self._ensure_running() + return self._downsample_factor + + def kill(self): + # type: () -> None + self._running = False + + def __del__(self): + # type: () -> None + self.kill() diff --git a/src/sentry_sdk_alpha/opentelemetry/__init__.py b/src/sentry_sdk_alpha/opentelemetry/__init__.py new file mode 100644 index 00000000000000..299496a7ca4f8d --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/__init__.py @@ -0,0 +1,9 @@ +from sentry_sdk_alpha.opentelemetry.propagator import SentryPropagator +from sentry_sdk_alpha.opentelemetry.sampler import SentrySampler +from sentry_sdk_alpha.opentelemetry.span_processor import SentrySpanProcessor + +__all__ = [ + "SentryPropagator", + "SentrySampler", + "SentrySpanProcessor", +] diff --git a/src/sentry_sdk_alpha/opentelemetry/consts.py b/src/sentry_sdk_alpha/opentelemetry/consts.py new file mode 100644 index 00000000000000..8da847b4c5159e --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/consts.py @@ -0,0 +1,33 @@ +from opentelemetry.context import create_key +from sentry_sdk_alpha.tracing_utils import Baggage + + +# propagation keys +SENTRY_TRACE_KEY = create_key("sentry-trace") +SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") + +# scope management keys +SENTRY_SCOPES_KEY = create_key("sentry_scopes") +SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope") +SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope") +SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope") + +# trace state keys +TRACESTATE_SAMPLED_KEY = Baggage.SENTRY_PREFIX + "sampled" +TRACESTATE_SAMPLE_RATE_KEY = Baggage.SENTRY_PREFIX + "sample_rate" +TRACESTATE_SAMPLE_RAND_KEY = Baggage.SENTRY_PREFIX + "sample_rand" + +# misc +OTEL_SENTRY_CONTEXT = "otel" +SPAN_ORIGIN = "auto.otel" + + +class SentrySpanAttribute: + DESCRIPTION = "sentry.description" + OP = "sentry.op" + ORIGIN = "sentry.origin" + TAG = "sentry.tag" + NAME = "sentry.name" + SOURCE = "sentry.source" + CONTEXT = "sentry.context" + CUSTOM_SAMPLED = "sentry.custom_sampled" # used for saving start_span(sampled=X) diff --git a/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py b/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py new file mode 100644 index 00000000000000..4f489ad11d8add --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py @@ -0,0 +1,73 @@ +from typing import cast, TYPE_CHECKING + +from opentelemetry.trace import set_span_in_context +from opentelemetry.context import Context, get_value, set_value +from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext + +import sentry_sdk_alpha +from sentry_sdk_alpha.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, +) + +if TYPE_CHECKING: + from typing import Optional + from contextvars import Token + import sentry_sdk_alpha.opentelemetry.scope as scope + + +class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext): + def attach(self, context): + # type: (Context) -> Token[Context] + scopes = get_value(SENTRY_SCOPES_KEY, context) + + should_fork_isolation_scope = context.pop( + SENTRY_FORK_ISOLATION_SCOPE_KEY, False + ) + should_fork_isolation_scope = cast("bool", should_fork_isolation_scope) + + should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None) + should_use_isolation_scope = cast( + "Optional[scope.PotelScope]", should_use_isolation_scope + ) + + should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None) + should_use_current_scope = cast( + "Optional[scope.PotelScope]", should_use_current_scope + ) + + if scopes: + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) + (current_scope, isolation_scope) = scopes + else: + current_scope = sentry_sdk_alpha.get_current_scope() + isolation_scope = sentry_sdk_alpha.get_isolation_scope() + + new_context = context + + if should_use_current_scope: + new_scope = should_use_current_scope + + # the main case where we use use_scope is for + # scope propagation in the ThreadingIntegration + # so we need to carry forward the span reference explicitly too + span = should_use_current_scope.span + if span: + new_context = set_span_in_context(span._otel_span, new_context) + + else: + new_scope = current_scope.fork() + + if should_use_isolation_scope: + new_isolation_scope = should_use_isolation_scope + elif should_fork_isolation_scope: + new_isolation_scope = isolation_scope.fork() + else: + new_isolation_scope = isolation_scope + + new_scopes = (new_scope, new_isolation_scope) + + new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, new_context) + return super().attach(new_context) diff --git a/src/sentry_sdk_alpha/opentelemetry/propagator.py b/src/sentry_sdk_alpha/opentelemetry/propagator.py new file mode 100644 index 00000000000000..b4fb6bc73ce23b --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/propagator.py @@ -0,0 +1,108 @@ +from typing import cast + +from opentelemetry import trace +from opentelemetry.context import ( + Context, + get_current, + get_value, + set_value, +) +from opentelemetry.propagators.textmap import ( + CarrierT, + Getter, + Setter, + TextMapPropagator, + default_getter, + default_setter, +) +from opentelemetry.trace import ( + NonRecordingSpan, + SpanContext, + TraceFlags, +) + +from sentry_sdk_alpha.consts import ( + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk_alpha.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, + SENTRY_SCOPES_KEY, +) +from sentry_sdk_alpha.tracing_utils import Baggage, extract_sentrytrace_data + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Set + import sentry_sdk.opentelemetry.scope as scope + + +class SentryPropagator(TextMapPropagator): + """ + Propagates tracing headers for Sentry's tracing system in a way OTel understands. + """ + + def extract(self, carrier, context=None, getter=default_getter): + # type: (CarrierT, Optional[Context], Getter[CarrierT]) -> Context + if context is None: + context = get_current() + + # TODO-neel-potel cleanup with continue_trace / isolation_scope + sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) + if not sentry_trace: + return context + + sentrytrace = extract_sentrytrace_data(sentry_trace[0]) + if not sentrytrace: + return context + + context = set_value(SENTRY_TRACE_KEY, sentrytrace, context) + + trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"] + + span_context = SpanContext( + trace_id=int(trace_id, 16), # type: ignore + span_id=int(span_id, 16), # type: ignore + # we simulate a sampled trace on the otel side and leave the sampling to sentry + trace_flags=TraceFlags(TraceFlags.SAMPLED), + is_remote=True, + ) + + baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME) + + if baggage_header: + baggage = Baggage.from_incoming_header(baggage_header[0]) + else: + # If there's an incoming sentry-trace but no incoming baggage header, + # for instance in traces coming from older SDKs, + # baggage will be empty and frozen and won't be populated as head SDK. + baggage = Baggage(sentry_items={}) + + baggage.freeze() + context = set_value(SENTRY_BAGGAGE_KEY, baggage, context) + + span = NonRecordingSpan(span_context) + modified_context = trace.set_span_in_context(span, context) + return modified_context + + def inject(self, carrier, context=None, setter=default_setter): + # type: (CarrierT, Optional[Context], Setter[CarrierT]) -> None + if context is None: + context = get_current() + + scopes = get_value(SENTRY_SCOPES_KEY, context) + if scopes: + scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes) + (current_scope, _) = scopes + + # TODO-neel-potel check trace_propagation_targets + # TODO-neel-potel test propagator works with twp + for key, value in current_scope.iter_trace_propagation_headers(): + setter.set(carrier, key, value) + + @property + def fields(self): + # type: () -> Set[str] + return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} diff --git a/src/sentry_sdk_alpha/opentelemetry/sampler.py b/src/sentry_sdk_alpha/opentelemetry/sampler.py new file mode 100644 index 00000000000000..2bf2b4cadcb10f --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/sampler.py @@ -0,0 +1,326 @@ +from decimal import Decimal +from typing import cast + +from opentelemetry import trace +from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision +from opentelemetry.trace.span import TraceState + +import sentry_sdk_alpha +from sentry_sdk_alpha.opentelemetry.consts import ( + TRACESTATE_SAMPLED_KEY, + TRACESTATE_SAMPLE_RAND_KEY, + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk_alpha.tracing_utils import ( + _generate_sample_rand, + has_tracing_enabled, +) +from sentry_sdk_alpha.utils import is_valid_sample_rate, logger + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Sequence, Union + from opentelemetry.context import Context + from opentelemetry.trace import Link, SpanKind + from opentelemetry.trace.span import SpanContext + from opentelemetry.util.types import Attributes + + +def get_parent_sampled(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[bool] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + # Only inherit sample rate if `traceId` is the same + if is_span_context_valid and parent_context.trace_id == trace_id: + # this is getSamplingDecision in JS + # if there was no sampling flag, defer the decision + dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY) + if dsc_sampled == "deferred": + return None + + if parent_context.trace_flags.sampled is not None: + return parent_context.trace_flags.sampled + + if dsc_sampled == "true": + return True + elif dsc_sampled == "false": + return False + + return None + + +def get_parent_sample_rate(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[float] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rate = parent_context.trace_state.get(TRACESTATE_SAMPLE_RATE_KEY) + if parent_sample_rate is None: + return None + + try: + return float(parent_sample_rate) + except Exception: + return None + + return None + + +def get_parent_sample_rand(parent_context, trace_id): + # type: (Optional[SpanContext], int) -> Optional[Decimal] + if parent_context is None: + return None + + is_span_context_valid = parent_context is not None and parent_context.is_valid + + if is_span_context_valid and parent_context.trace_id == trace_id: + parent_sample_rand = parent_context.trace_state.get(TRACESTATE_SAMPLE_RAND_KEY) + if parent_sample_rand is None: + return None + + return Decimal(parent_sample_rand) + + return None + + +def dropped_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span getting unsampled and return a DROP SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + record that we dropped the event for client report purposes, and return + an OTel SamplingResult with Decision.DROP. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=False, sample_rate=sample_rate, sample_rand=sample_rand + ) + + is_root_span = not (span_context.is_valid and not span_context.is_remote) + if is_root_span: + # Tell Sentry why we dropped the transaction/root-span + client = sentry_sdk_alpha.get_client() + if client.monitor and client.monitor.downsample_factor > 0: + reason = "backpressure" + else: + reason = "sample_rate" + + if client.transport and has_tracing_enabled(client.options): + client.transport.record_lost_event(reason, data_category="transaction") + + # Only one span (the transaction itself) is discarded, since we did not record any spans here. + client.transport.record_lost_event(reason, data_category="span") + + return SamplingResult( + Decision.DROP, + attributes=attributes, + trace_state=trace_state, + ) + + +def sampled_result(span_context, attributes, sample_rate=None, sample_rand=None): + # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult + """ + React to a span being sampled and return a sampled SamplingResult. + + Update the trace_state with the effective sampled, sample_rate and sample_rand, + and return an OTel SamplingResult with Decision.RECORD_AND_SAMPLE. + + See for more info about OTel sampling: + https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html + """ + trace_state = _update_trace_state( + span_context, sampled=True, sample_rate=sample_rate, sample_rand=sample_rand + ) + + return SamplingResult( + Decision.RECORD_AND_SAMPLE, + attributes=attributes, + trace_state=trace_state, + ) + + +def _update_trace_state(span_context, sampled, sample_rate=None, sample_rand=None): + # type: (SpanContext, bool, Optional[float], Optional[Decimal]) -> TraceState + trace_state = span_context.trace_state + + sampled = "true" if sampled else "false" + if TRACESTATE_SAMPLED_KEY not in trace_state: + trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, sampled) + elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred": + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, sampled) + + if sample_rate is not None: + trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate)) + + if sample_rand is not None: + trace_state = trace_state.update( + TRACESTATE_SAMPLE_RAND_KEY, f"{sample_rand:.6f}" # noqa: E231 + ) + + return trace_state + + +class SentrySampler(Sampler): + def should_sample( + self, + parent_context, # type: Optional[Context] + trace_id, # type: int + name, # type: str + kind=None, # type: Optional[SpanKind] + attributes=None, # type: Attributes + links=None, # type: Optional[Sequence[Link]] + trace_state=None, # type: Optional[TraceState] + ): + # type: (...) -> SamplingResult + client = sentry_sdk_alpha.get_client() + + parent_span_context = trace.get_current_span(parent_context).get_span_context() + + attributes = attributes or {} + + # No tracing enabled, thus no sampling + if not has_tracing_enabled(client.options): + return dropped_result(parent_span_context, attributes) + + # parent_span_context.is_valid means this span has a parent, remote or local + is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote + + sample_rate = None + + parent_sampled = get_parent_sampled(parent_span_context, trace_id) + parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id) + parent_sample_rand = get_parent_sample_rand(parent_span_context, trace_id) + + if parent_sample_rand is not None: + # We have a sample_rand on the incoming trace or we already backfilled + # it in PropagationContext + sample_rand = parent_sample_rand + else: + # We are the head SDK and we need to generate a new sample_rand + sample_rand = cast(Decimal, _generate_sample_rand(str(trace_id), (0, 1))) + + # Explicit sampled value provided at start_span + custom_sampled = cast( + "Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED) + ) + if custom_sampled is not None: + if is_root_span: + sample_rate = float(custom_sampled) + if sample_rate > 0: + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate, + sample_rand=sample_rand, + ) + else: + logger.debug( + f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}" + ) + + # Check if there is a traces_sampler + # Traces_sampler is responsible to check parent sampled to have full transactions. + has_traces_sampler = callable(client.options.get("traces_sampler")) + + sample_rate_to_propagate = None + + if is_root_span and has_traces_sampler: + sampling_context = create_sampling_context( + name, attributes, parent_span_context, trace_id + ) + sample_rate = client.options["traces_sampler"](sampling_context) + sample_rate_to_propagate = sample_rate + else: + # Check if there is a parent with a sampling decision + if parent_sampled is not None: + sample_rate = bool(parent_sampled) + sample_rate_to_propagate = ( + parent_sample_rate if parent_sample_rate else sample_rate + ) + else: + # Check if there is a traces_sample_rate + sample_rate = client.options.get("traces_sample_rate") + sample_rate_to_propagate = sample_rate + + # If the sample rate is invalid, drop the span + if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__): + logger.warning( + f"[Tracing.Sampler] Discarding {name} because of invalid sample rate." + ) + return dropped_result(parent_span_context, attributes) + + # Down-sample in case of back pressure monitor says so + if is_root_span and client.monitor: + sample_rate /= 2**client.monitor.downsample_factor + if client.monitor.downsample_factor > 0: + sample_rate_to_propagate = sample_rate + + # Compare sample_rand to sample_rate to make the final sampling decision + sample_rate = float(cast("Union[bool, float, int]", sample_rate)) + sampled = sample_rand < Decimal.from_float(sample_rate) + + if sampled: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Sampled #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + + return sampled_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + else: + if is_root_span: + logger.debug( + f"[Tracing.Sampler] Dropped #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}" + ) + + return dropped_result( + parent_span_context, + attributes, + sample_rate=sample_rate_to_propagate, + sample_rand=None if sample_rand == parent_sample_rand else sample_rand, + ) + + def get_description(self) -> str: + return self.__class__.__name__ + + +def create_sampling_context(name, attributes, parent_span_context, trace_id): + # type: (str, Attributes, Optional[SpanContext], int) -> dict[str, Any] + sampling_context = { + "transaction_context": { + "name": name, + "op": attributes.get(SentrySpanAttribute.OP) if attributes else None, + "source": ( + attributes.get(SentrySpanAttribute.SOURCE) if attributes else None + ), + }, + "parent_sampled": get_parent_sampled(parent_span_context, trace_id), + } # type: dict[str, Any] + + if attributes is not None: + sampling_context.update(attributes) + + return sampling_context diff --git a/src/sentry_sdk_alpha/opentelemetry/scope.py b/src/sentry_sdk_alpha/opentelemetry/scope.py new file mode 100644 index 00000000000000..2c0b030ba3914c --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/scope.py @@ -0,0 +1,218 @@ +from typing import cast +from contextlib import contextmanager +import warnings + +from opentelemetry.context import ( + get_value, + set_value, + attach, + detach, + get_current, +) +from opentelemetry.trace import ( + SpanContext, + NonRecordingSpan, + TraceFlags, + TraceState, + use_span, +) + +from sentry_sdk_alpha.opentelemetry.consts import ( + SENTRY_SCOPES_KEY, + SENTRY_FORK_ISOLATION_SCOPE_KEY, + SENTRY_USE_CURRENT_SCOPE_KEY, + SENTRY_USE_ISOLATION_SCOPE_KEY, + TRACESTATE_SAMPLED_KEY, +) +from sentry_sdk_alpha.opentelemetry.contextvars_context import ( + SentryContextVarsRuntimeContext, +) +from sentry_sdk_alpha.opentelemetry.utils import trace_state_from_baggage +from sentry_sdk_alpha.scope import Scope, ScopeType +from sentry_sdk_alpha.tracing import Span +from sentry_sdk_alpha._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Tuple, Optional, Generator, Dict, Any + + +class PotelScope(Scope): + @classmethod + def _get_scopes(cls): + # type: () -> Optional[Tuple[PotelScope, PotelScope]] + """ + Returns the current scopes tuple on the otel context. Internal use only. + """ + return cast( + "Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY) + ) + + @classmethod + def get_current_scope(cls): + # type: () -> PotelScope + """ + Returns the current scope. + """ + return cls._get_current_scope() or _INITIAL_CURRENT_SCOPE + + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[PotelScope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[0] if scopes else None + + @classmethod + def get_isolation_scope(cls): + # type: () -> PotelScope + """ + Returns the isolation scope. + """ + return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE + + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[PotelScope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + scopes = cls._get_scopes() + return scopes[1] if scopes else None + + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] + """ + Sets the propagation context from environment or headers to continue an incoming trace. + Any span started within this context manager will use the same trace_id, parent_span_id + and inherit the sampling decision from the incoming trace. + """ + self.generate_propagation_context(environ_or_headers) + + span_context = self._incoming_otel_span_context() + if span_context is None: + yield + else: + with use_span(NonRecordingSpan(span_context)): + yield + + def _incoming_otel_span_context(self): + # type: () -> Optional[SpanContext] + if self._propagation_context is None: + return None + # If sentry-trace extraction didn't have a parent_span_id, we don't have an upstream header + if self._propagation_context.parent_span_id is None: + return None + + trace_flags = TraceFlags( + TraceFlags.SAMPLED + if self._propagation_context.parent_sampled + else TraceFlags.DEFAULT + ) + + if self._propagation_context.baggage: + trace_state = trace_state_from_baggage(self._propagation_context.baggage) + else: + trace_state = TraceState() + + # for twp to work, we also need to consider deferred sampling when the sampling + # flag is not present, so the above TraceFlags are not sufficient + if self._propagation_context.parent_sampled is None: + trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "deferred") + + span_context = SpanContext( + trace_id=int(self._propagation_context.trace_id, 16), + span_id=int(self._propagation_context.parent_span_id, 16), + is_remote=True, + trace_flags=trace_flags, + trace_state=trace_state, + ) + + return span_context + + def start_transaction(self, **kwargs): + # type: (Any) -> Span + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + """ + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return self.start_span(**kwargs) + + def start_span(self, **kwargs): + # type: (Any) -> Span + return Span(**kwargs) + + +_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) +_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + +def setup_initial_scopes(): + # type: () -> None + global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE + _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT) + _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION) + + scopes = (_INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE) + attach(set_value(SENTRY_SCOPES_KEY, scopes)) + + +def setup_scope_context_management(): + # type: () -> None + import opentelemetry.context + + opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext() + setup_initial_scopes() + + +@contextmanager +def isolation_scope(): + # type: () -> Generator[PotelScope, None, None] + context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True) + token = attach(context) + try: + yield PotelScope.get_isolation_scope() + finally: + detach(token) + + +@contextmanager +def new_scope(): + # type: () -> Generator[PotelScope, None, None] + token = attach(get_current()) + try: + yield PotelScope.get_current_scope() + finally: + detach(token) + + +@contextmanager +def use_scope(scope): + # type: (PotelScope) -> Generator[PotelScope, None, None] + context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope) + token = attach(context) + + try: + yield scope + finally: + detach(token) + + +@contextmanager +def use_isolation_scope(isolation_scope): + # type: (PotelScope) -> Generator[PotelScope, None, None] + context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope) + token = attach(context) + + try: + yield isolation_scope + finally: + detach(token) diff --git a/src/sentry_sdk_alpha/opentelemetry/span_processor.py b/src/sentry_sdk_alpha/opentelemetry/span_processor.py new file mode 100644 index 00000000000000..614db99f8a8756 --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/span_processor.py @@ -0,0 +1,329 @@ +from collections import deque, defaultdict +from typing import cast + +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + get_current_span, + INVALID_SPAN, + Span as AbstractSpan, +) +from opentelemetry.context import Context +from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import SPANDATA, DEFAULT_SPAN_ORIGIN +from sentry_sdk_alpha.utils import get_current_thread_meta +from sentry_sdk_alpha.opentelemetry.consts import ( + OTEL_SENTRY_CONTEXT, + SentrySpanAttribute, +) +from sentry_sdk_alpha.opentelemetry.sampler import create_sampling_context +from sentry_sdk_alpha.opentelemetry.utils import ( + is_sentry_span, + convert_from_otel_timestamp, + extract_span_attributes, + extract_span_data, + extract_transaction_name_source, + get_trace_context, + get_profile_context, + get_sentry_meta, + set_sentry_meta, +) +from sentry_sdk_alpha.profiler.continuous_profiler import ( + try_autostart_continuous_profiler, + get_profiler_id, + try_profile_lifecycle_trace_start, +) +from sentry_sdk_alpha.profiler.transaction_profiler import Profile +from sentry_sdk_alpha._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, List, Any, Deque, DefaultDict + from sentry_sdk_alpha._types import Event + + +DEFAULT_MAX_SPANS = 1000 + + +class SentrySpanProcessor(SpanProcessor): + """ + Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. + """ + + def __new__(cls): + # type: () -> SentrySpanProcessor + if not hasattr(cls, "instance"): + cls.instance = super().__new__(cls) + + return cls.instance + + def __init__(self): + # type: () -> None + self._children_spans = defaultdict( + list + ) # type: DefaultDict[int, List[ReadableSpan]] + self._dropped_spans = defaultdict(lambda: 0) # type: DefaultDict[int, int] + + def on_start(self, span, parent_context=None): + # type: (Span, Optional[Context]) -> None + if is_sentry_span(span): + return + + self._add_root_span(span, get_current_span(parent_context)) + self._start_profile(span) + + def on_end(self, span): + # type: (ReadableSpan) -> None + if is_sentry_span(span): + return + + is_root_span = not span.parent or span.parent.is_remote + if is_root_span: + # if have a root span ending, stop the profiler, build a transaction and send it + self._stop_profile(span) + self._flush_root_span(span) + else: + self._append_child_span(span) + + # TODO-neel-potel not sure we need a clear like JS + def shutdown(self): + # type: () -> None + pass + + # TODO-neel-potel change default? this is 30 sec + # TODO-neel-potel call this in client.flush + def force_flush(self, timeout_millis=30000): + # type: (int) -> bool + return True + + def _add_root_span(self, span, parent_span): + # type: (Span, AbstractSpan) -> None + """ + This is required to make Span.root_span work + since we can't traverse back to the root purely with otel efficiently. + """ + if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote: + # child span points to parent's root or parent + parent_root_span = get_sentry_meta(parent_span, "root_span") + set_sentry_meta(span, "root_span", parent_root_span or parent_span) + else: + # root span points to itself + set_sentry_meta(span, "root_span", span) + + def _start_profile(self, span): + # type: (Span) -> None + try_autostart_continuous_profiler() + + profiler_id = get_profiler_id() + thread_id, thread_name = get_current_thread_meta() + + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + if thread_id: + span.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + if thread_name: + span.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + is_root_span = not span.parent or span.parent.is_remote + sampled = span.context and span.context.trace_flags.sampled + + if is_root_span and sampled: + # profiler uses time.perf_counter_ns() so we cannot use the + # unix timestamp that is on span.start_time + # setting it to 0 means the profiler will internally measure time on start + profile = Profile(sampled, 0) + + sampling_context = create_sampling_context( + span.name, span.attributes, span.parent, span.context.trace_id + ) + profile._set_initial_sampling_decision(sampling_context) + profile.__enter__() + set_sentry_meta(span, "profile", profile) + + continuous_profile = try_profile_lifecycle_trace_start() + profiler_id = get_profiler_id() + if profiler_id: + span.set_attribute(SPANDATA.PROFILER_ID, profiler_id) + set_sentry_meta(span, "continuous_profile", continuous_profile) + + def _stop_profile(self, span): + # type: (ReadableSpan) -> None + continuous_profiler = get_sentry_meta(span, "continuous_profile") + if continuous_profiler: + continuous_profiler.stop() + + def _flush_root_span(self, span): + # type: (ReadableSpan) -> None + transaction_event = self._root_span_to_transaction_event(span) + if not transaction_event: + return + + collected_spans, dropped_spans = self._collect_children(span) + spans = [] + for child in collected_spans: + span_json = self._span_to_json(child) + if span_json: + spans.append(span_json) + + transaction_event["spans"] = spans + if dropped_spans > 0: + transaction_event["_dropped_spans"] = dropped_spans + + # TODO-neel-potel sort and cutoff max spans + + sentry_sdk_alpha.capture_event(transaction_event) + + def _append_child_span(self, span): + # type: (ReadableSpan) -> None + if not span.parent: + return + + max_spans = ( + sentry_sdk_alpha.get_client().options["_experiments"].get("max_spans") + or DEFAULT_MAX_SPANS + ) + + children_spans = self._children_spans[span.parent.span_id] + if len(children_spans) < max_spans: + children_spans.append(span) + else: + self._dropped_spans[span.parent.span_id] += 1 + + def _collect_children(self, span): + # type: (ReadableSpan) -> tuple[List[ReadableSpan], int] + if not span.context: + return [], 0 + + children = [] + dropped_spans = 0 + bfs_queue = deque() # type: Deque[int] + bfs_queue.append(span.context.span_id) + + while bfs_queue: + parent_span_id = bfs_queue.popleft() + node_children = self._children_spans.pop(parent_span_id, []) + dropped_spans += self._dropped_spans.pop(parent_span_id, 0) + children.extend(node_children) + bfs_queue.extend( + [child.context.span_id for child in node_children if child.context] + ) + + return children, dropped_spans + + # we construct the event from scratch here + # and not use the current Transaction class for easier refactoring + def _root_span_to_transaction_event(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.context: + return None + + event = self._common_span_transaction_attributes_as_json(span) + if event is None: + return None + + transaction_name, transaction_source = extract_transaction_name_source(span) + span_data = extract_span_data(span) + trace_context = get_trace_context(span, span_data=span_data) + contexts = {"trace": trace_context} + + profile_context = get_profile_context(span) + if profile_context: + contexts["profile"] = profile_context + + (_, description, _, http_status, _) = span_data + + if http_status: + contexts["response"] = {"status_code": http_status} + + if span.resource.attributes: + contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)} + + event.update( + { + "type": "transaction", + "transaction": transaction_name or description, + "transaction_info": {"source": transaction_source or "custom"}, + "contexts": contexts, + } + ) + + profile = cast("Optional[Profile]", get_sentry_meta(span, "profile")) + if profile: + profile.__exit__(None, None, None) + if profile.valid(): + event["profile"] = profile + set_sentry_meta(span, "profile", None) + + return event + + def _span_to_json(self, span): + # type: (ReadableSpan) -> Optional[dict[str, Any]] + if not span.context: + return None + + # This is a safe cast because dict[str, Any] is a superset of Event + span_json = cast( + "dict[str, Any]", self._common_span_transaction_attributes_as_json(span) + ) + if span_json is None: + return None + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + (op, description, status, _, origin) = extract_span_data(span) + + span_json.update( + { + "trace_id": trace_id, + "span_id": span_id, + "op": op, + "description": description, + "status": status, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } + ) + + if parent_span_id: + span_json["parent_span_id"] = parent_span_id + + attributes = getattr(span, "attributes", {}) or {} + if attributes: + span_json["data"] = {} + for key, value in attributes.items(): + if not key.startswith("_"): + span_json["data"][key] = value + + return span_json + + def _common_span_transaction_attributes_as_json(self, span): + # type: (ReadableSpan) -> Optional[Event] + if not span.start_time or not span.end_time: + return None + + common_json = { + "start_timestamp": convert_from_otel_timestamp(span.start_time), + "timestamp": convert_from_otel_timestamp(span.end_time), + } # type: Event + + tags = extract_span_attributes(span, SentrySpanAttribute.TAG) + if tags: + common_json["tags"] = tags + + return common_json + + def _log_debug_info(self): + # type: () -> None + import pprint + + pprint.pprint( + { + format_span_id(span_id): [ + (format_span_id(child.context.span_id), child.name) + for child in children + ] + for span_id, children in self._children_spans.items() + } + ) diff --git a/src/sentry_sdk_alpha/opentelemetry/tracing.py b/src/sentry_sdk_alpha/opentelemetry/tracing.py new file mode 100644 index 00000000000000..0e7d8290605c47 --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/tracing.py @@ -0,0 +1,35 @@ +from opentelemetry import trace +from opentelemetry.propagate import set_global_textmap +from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan + +from sentry_sdk_alpha.opentelemetry import ( + SentryPropagator, + SentrySampler, + SentrySpanProcessor, +) + + +def patch_readable_span(): + # type: () -> None + """ + We need to pass through sentry specific metadata/objects from Span to ReadableSpan + to work with them consistently in the SpanProcessor. + """ + old_readable_span = Span._readable_span + + def sentry_patched_readable_span(self): + # type: (Span) -> ReadableSpan + readable_span = old_readable_span(self) + readable_span._sentry_meta = getattr(self, "_sentry_meta", {}) # type: ignore[attr-defined] + return readable_span + + Span._readable_span = sentry_patched_readable_span # type: ignore[method-assign] + + +def setup_sentry_tracing(): + # type: () -> None + provider = TracerProvider(sampler=SentrySampler()) + provider.add_span_processor(SentrySpanProcessor()) + trace.set_tracer_provider(provider) + + set_global_textmap(SentryPropagator()) diff --git a/src/sentry_sdk_alpha/opentelemetry/utils.py b/src/sentry_sdk_alpha/opentelemetry/utils.py new file mode 100644 index 00000000000000..609cee587687a0 --- /dev/null +++ b/src/sentry_sdk_alpha/opentelemetry/utils.py @@ -0,0 +1,476 @@ +import re +from typing import cast +from datetime import datetime, timezone + +from urllib3.util import parse_url as urlparse +from urllib.parse import quote, unquote +from opentelemetry.trace import ( + Span as AbstractSpan, + SpanKind, + StatusCode, + format_trace_id, + format_span_id, + TraceState, +) +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.sdk.trace import ReadableSpan + +import sentry_sdk_alpha +from sentry_sdk_alpha.utils import Dsn +from sentry_sdk_alpha.consts import ( + SPANSTATUS, + OP, + SPANDATA, + DEFAULT_SPAN_ORIGIN, + LOW_QUALITY_TRANSACTION_SOURCES, +) +from sentry_sdk_alpha.opentelemetry.consts import SentrySpanAttribute +from sentry_sdk_alpha.tracing_utils import Baggage, get_span_status_from_http_code + +from sentry_sdk_alpha._types import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any, Optional, Mapping, Sequence, Union + from sentry_sdk_alpha._types import OtelExtractedSpanData + + +GRPC_ERROR_MAP = { + "1": SPANSTATUS.CANCELLED, + "2": SPANSTATUS.UNKNOWN_ERROR, + "3": SPANSTATUS.INVALID_ARGUMENT, + "4": SPANSTATUS.DEADLINE_EXCEEDED, + "5": SPANSTATUS.NOT_FOUND, + "6": SPANSTATUS.ALREADY_EXISTS, + "7": SPANSTATUS.PERMISSION_DENIED, + "8": SPANSTATUS.RESOURCE_EXHAUSTED, + "9": SPANSTATUS.FAILED_PRECONDITION, + "10": SPANSTATUS.ABORTED, + "11": SPANSTATUS.OUT_OF_RANGE, + "12": SPANSTATUS.UNIMPLEMENTED, + "13": SPANSTATUS.INTERNAL_ERROR, + "14": SPANSTATUS.UNAVAILABLE, + "15": SPANSTATUS.DATA_LOSS, + "16": SPANSTATUS.UNAUTHENTICATED, +} + + +def is_sentry_span(span): + # type: (ReadableSpan) -> bool + """ + Break infinite loop: + HTTP requests to Sentry are caught by OTel and send again to Sentry. + """ + from sentry_sdk_alpha import get_client + + if not span.attributes: + return False + + span_url = span.attributes.get(SpanAttributes.HTTP_URL, None) + span_url = cast("Optional[str]", span_url) + + if not span_url: + return False + + dsn_url = None + client = get_client() + + if client.dsn: + try: + dsn_url = Dsn(client.dsn).netloc + except Exception: + pass + + if not dsn_url: + return False + + if dsn_url in span_url: + return True + + return False + + +def convert_from_otel_timestamp(time): + # type: (int) -> datetime + """Convert an OTel nanosecond-level timestamp to a datetime.""" + return datetime.fromtimestamp(time / 1e9, timezone.utc) + + +def convert_to_otel_timestamp(time): + # type: (Union[datetime, float]) -> int + """Convert a datetime to an OTel timestamp (with nanosecond precision).""" + if isinstance(time, datetime): + return int(time.timestamp() * 1e9) + return int(time * 1e9) + + +def extract_transaction_name_source(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[str]] + if not span.attributes: + return (None, None) + return ( + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.NAME)), + cast("Optional[str]", span.attributes.get(SentrySpanAttribute.SOURCE)), + ) + + +def extract_span_data(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + op = span.name + description = span.name + status, http_status = extract_span_status(span) + origin = None + if span.attributes is None: + return (op, description, status, http_status, origin) + + attribute_op = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.OP)) + op = attribute_op or op + description = cast( + "str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description + ) + origin = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.ORIGIN)) + + http_method = span.attributes.get(SpanAttributes.HTTP_METHOD) + http_method = cast("Optional[str]", http_method) + if http_method: + return span_data_for_http_method(span) + + db_query = span.attributes.get(SpanAttributes.DB_SYSTEM) + if db_query: + return span_data_for_db_query(span) + + rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE) + if rpc_service: + return ( + attribute_op or "rpc", + description, + status, + http_status, + origin, + ) + + messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM) + if messaging_system: + return ( + attribute_op or "message", + description, + status, + http_status, + origin, + ) + + faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER) + if faas_trigger: + return (str(faas_trigger), description, status, http_status, origin) + + return (op, description, status, http_status, origin) + + +def span_data_for_http_method(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + span_attributes = span.attributes or {} + + op = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.OP)) + if op is None: + op = "http" + + if span.kind == SpanKind.SERVER: + op += ".server" + elif span.kind == SpanKind.CLIENT: + op += ".client" + + http_method = span_attributes.get(SpanAttributes.HTTP_METHOD) + route = span_attributes.get(SpanAttributes.HTTP_ROUTE) + target = span_attributes.get(SpanAttributes.HTTP_TARGET) + peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME) + + # TODO-neel-potel remove description completely + description = span_attributes.get( + SentrySpanAttribute.DESCRIPTION + ) or span_attributes.get(SentrySpanAttribute.NAME) + description = cast("Optional[str]", description) + if description is None: + description = f"{http_method}" + + if route: + description = f"{http_method} {route}" + elif target: + description = f"{http_method} {target}" + elif peer_name: + description = f"{http_method} {peer_name}" + else: + url = span_attributes.get(SpanAttributes.HTTP_URL) + url = cast("Optional[str]", url) + + if url: + parsed_url = urlparse(url) + url = "{}://{}{}".format( + parsed_url.scheme, parsed_url.netloc, parsed_url.path + ) + description = f"{http_method} {url}" + + status, http_status = extract_span_status(span) + + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) + + return (op, description, status, http_status, origin) + + +def span_data_for_db_query(span): + # type: (ReadableSpan) -> OtelExtractedSpanData + span_attributes = span.attributes or {} + + op = cast("str", span_attributes.get(SentrySpanAttribute.OP, OP.DB)) + + statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None) + statement = cast("Optional[str]", statement) + + description = statement or span.name + origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN)) + + return (op, description, None, None, origin) + + +def extract_span_status(span): + # type: (ReadableSpan) -> tuple[Optional[str], Optional[int]] + span_attributes = span.attributes or {} + status = span.status or None + + if status: + inferred_status, http_status = infer_status_from_attributes(span_attributes) + + if status.status_code == StatusCode.OK: + return (SPANSTATUS.OK, http_status) + elif status.status_code == StatusCode.ERROR: + if status.description is None: + if inferred_status: + return (inferred_status, http_status) + + if http_status is not None: + return (inferred_status, http_status) + + if ( + status.description is not None + and status.description in GRPC_ERROR_MAP.values() + ): + return (status.description, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + inferred_status, http_status = infer_status_from_attributes(span_attributes) + if inferred_status: + return (inferred_status, http_status) + + if status and status.status_code == StatusCode.UNSET: + return (None, None) + else: + return (SPANSTATUS.UNKNOWN_ERROR, None) + + +def infer_status_from_attributes(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> tuple[Optional[str], Optional[int]] + http_status = get_http_status_code(span_attributes) + + if http_status: + return (get_span_status_from_http_code(http_status), http_status) + + grpc_status = span_attributes.get(SpanAttributes.RPC_GRPC_STATUS_CODE) + if grpc_status: + return (GRPC_ERROR_MAP.get(str(grpc_status), SPANSTATUS.UNKNOWN_ERROR), None) + + return (None, None) + + +def get_http_status_code(span_attributes): + # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> Optional[int] + try: + http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE) + except AttributeError: + # HTTP_RESPONSE_STATUS_CODE was added in 1.21, so if we're on an older + # OTel version SpanAttributes.HTTP_RESPONSE_STATUS_CODE will throw an + # AttributeError + http_status = None + + if http_status is None: + # Fall back to the deprecated attribute + http_status = span_attributes.get(SpanAttributes.HTTP_STATUS_CODE) + + http_status = cast("Optional[int]", http_status) + + return http_status + + +def extract_span_attributes(span, namespace): + # type: (ReadableSpan, str) -> dict[str, Any] + """ + Extract Sentry-specific span attributes and make them look the way Sentry expects. + """ + extracted_attrs = {} # type: dict[str, Any] + + for attr, value in (span.attributes or {}).items(): + if attr.startswith(namespace): + key = attr[len(namespace) + 1 :] + extracted_attrs[key] = value + + return extracted_attrs + + +def get_trace_context(span, span_data=None): + # type: (ReadableSpan, Optional[OtelExtractedSpanData]) -> dict[str, Any] + if not span.context: + return {} + + trace_id = format_trace_id(span.context.trace_id) + span_id = format_span_id(span.context.span_id) + parent_span_id = format_span_id(span.parent.span_id) if span.parent else None + + if span_data is None: + span_data = extract_span_data(span) + + (op, _, status, _, origin) = span_data + + trace_context = { + "trace_id": trace_id, + "span_id": span_id, + "parent_span_id": parent_span_id, + "op": op, + "origin": origin or DEFAULT_SPAN_ORIGIN, + } # type: dict[str, Any] + + if status: + trace_context["status"] = status + + if span.attributes: + trace_context["data"] = dict(span.attributes) + + trace_state = get_trace_state(span) + trace_context["dynamic_sampling_context"] = dsc_from_trace_state(trace_state) + + # TODO-neel-potel profiler thread_id, thread_name + + return trace_context + + +def trace_state_from_baggage(baggage): + # type: (Baggage) -> TraceState + items = [] + for k, v in baggage.sentry_items.items(): + key = Baggage.SENTRY_PREFIX + quote(k) + val = quote(str(v)) + items.append((key, val)) + return TraceState(items) + + +def baggage_from_trace_state(trace_state): + # type: (TraceState) -> Baggage + return Baggage(dsc_from_trace_state(trace_state)) + + +def serialize_trace_state(trace_state): + # type: (TraceState) -> str + sentry_items = [] + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + sentry_items.append((k, v)) + return ",".join(key + "=" + value for key, value in sentry_items) + + +def dsc_from_trace_state(trace_state): + # type: (TraceState) -> dict[str, str] + dsc = {} + for k, v in trace_state.items(): + if Baggage.SENTRY_PREFIX_REGEX.match(k): + key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k) + dsc[unquote(key)] = unquote(v) + return dsc + + +def has_incoming_trace(trace_state): + # type: (TraceState) -> bool + """ + The existence of a sentry-trace_id in the baggage implies we continued an upstream trace. + """ + return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state + + +def get_trace_state(span): + # type: (Union[AbstractSpan, ReadableSpan]) -> TraceState + """ + Get the existing trace_state with sentry items + or populate it if we are the head SDK. + """ + span_context = span.get_span_context() + if not span_context: + return TraceState() + + trace_state = span_context.trace_state + + if has_incoming_trace(trace_state): + return trace_state + else: + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return trace_state + + options = client.options or {} + + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "trace_id", + quote(format_trace_id(span_context.trace_id)), + ) + + if options.get("environment"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "environment", quote(options["environment"]) + ) + + if options.get("release"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "release", quote(options["release"]) + ) + + if options.get("dsn"): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "public_key", + quote(Dsn(options["dsn"]).public_key), + ) + + root_span = get_sentry_meta(span, "root_span") + if root_span and isinstance(root_span, ReadableSpan): + transaction_name, transaction_source = extract_transaction_name_source( + root_span + ) + + if ( + transaction_name + and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES + ): + trace_state = trace_state.update( + Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name) + ) + + return trace_state + + +def get_sentry_meta(span, key): + # type: (Union[AbstractSpan, ReadableSpan], str) -> Any + sentry_meta = getattr(span, "_sentry_meta", None) + return sentry_meta.get(key) if sentry_meta else None + + +def set_sentry_meta(span, key, value): + # type: (Union[AbstractSpan, ReadableSpan], str, Any) -> None + sentry_meta = getattr(span, "_sentry_meta", {}) + sentry_meta[key] = value + span._sentry_meta = sentry_meta # type: ignore[union-attr] + + +def get_profile_context(span): + # type: (ReadableSpan) -> Optional[dict[str, str]] + if not span.attributes: + return None + + profiler_id = cast("Optional[str]", span.attributes.get(SPANDATA.PROFILER_ID)) + if profiler_id is None: + return None + + return {"profiler_id": profiler_id} diff --git a/src/sentry_sdk_alpha/profiler/__init__.py b/src/sentry_sdk_alpha/profiler/__init__.py new file mode 100644 index 00000000000000..853eea8233485d --- /dev/null +++ b/src/sentry_sdk_alpha/profiler/__init__.py @@ -0,0 +1,9 @@ +from sentry_sdk_alpha.profiler.continuous_profiler import ( + start_profiler, + stop_profiler, +) + +__all__ = [ + "start_profiler", + "stop_profiler", +] diff --git a/src/sentry_sdk_alpha/profiler/continuous_profiler.py b/src/sentry_sdk_alpha/profiler/continuous_profiler.py new file mode 100644 index 00000000000000..6945f84b3ddcc4 --- /dev/null +++ b/src/sentry_sdk_alpha/profiler/continuous_profiler.py @@ -0,0 +1,675 @@ +import atexit +import os +import random +import sys +import threading +import time +import uuid +from collections import deque +from datetime import datetime, timezone + +from sentry_sdk_alpha.consts import VERSION +from sentry_sdk_alpha.envelope import Envelope +from sentry_sdk_alpha._lru_cache import LRUCache +from sentry_sdk_alpha.profiler.utils import ( + DEFAULT_SAMPLING_FREQUENCY, + extract_stack, +) +from sentry_sdk_alpha.utils import ( + capture_internal_exception, + is_gevent, + logger, + now, + set_in_app_in_frames, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Deque + from typing import Dict + from typing import List + from typing import Optional + from typing import Set + from typing import Type + from typing import Union + from typing_extensions import TypedDict + from sentry_sdk_alpha._types import ContinuousProfilerMode, SDKInfo + from sentry_sdk_alpha.profiler.utils import ( + ExtractedSample, + FrameId, + StackId, + ThreadId, + ProcessedFrame, + ProcessedStack, + ) + + ProcessedSample = TypedDict( + "ProcessedSample", + { + "timestamp": float, + "thread_id": ThreadId, + "stack_id": int, + }, + ) + + +try: + from gevent.monkey import get_original + from gevent.threadpool import ThreadPool as _ThreadPool + + ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] + thread_sleep = get_original("time", "sleep") +except ImportError: + thread_sleep = time.sleep + ThreadPool = None + + +_scheduler = None # type: Optional[ContinuousScheduler] + + +def setup_continuous_profiler(options, sdk_info, capture_func): + # type: (Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> bool + global _scheduler + + if _scheduler is not None: + logger.debug("[Profiling] Continuous Profiler is already setup") + return False + + if is_gevent(): + # If gevent has patched the threading modules then we cannot rely on + # them to spawn a native thread for sampling. + # Instead we default to the GeventContinuousScheduler which is capable of + # spawning native threads within gevent. + default_profiler_mode = GeventContinuousScheduler.mode + else: + default_profiler_mode = ThreadContinuousScheduler.mode + + profiler_mode = default_profiler_mode + if options.get("profiler_mode") is not None: + profiler_mode = options["profiler_mode"] + + frequency = DEFAULT_SAMPLING_FREQUENCY + + if profiler_mode == ThreadContinuousScheduler.mode: + _scheduler = ThreadContinuousScheduler( + frequency, options, sdk_info, capture_func + ) + elif profiler_mode == GeventContinuousScheduler.mode: + _scheduler = GeventContinuousScheduler( + frequency, options, sdk_info, capture_func + ) + else: + raise ValueError("Unknown continuous profiler mode: {}".format(profiler_mode)) + + logger.debug( + "[Profiling] Setting up continuous profiler in {mode} mode".format( + mode=_scheduler.mode + ) + ) + + atexit.register(teardown_continuous_profiler) + + return True + + +def try_autostart_continuous_profiler(): + # type: () -> None + + # TODO: deprecate this as it'll be replaced by the auto lifecycle option + + if _scheduler is None: + return + + if not _scheduler.is_auto_start_enabled(): + return + + _scheduler.manual_start() + + +def try_profile_lifecycle_trace_start(): + # type: () -> Union[ContinuousProfile, None] + if _scheduler is None: + return None + + return _scheduler.auto_start() + + +def start_profiler(): + # type: () -> None + if _scheduler is None: + return + + _scheduler.manual_start() + + +def stop_profiler(): + # type: () -> None + if _scheduler is None: + return + + _scheduler.manual_stop() + + +def teardown_continuous_profiler(): + # type: () -> None + stop_profiler() + + global _scheduler + _scheduler = None + + +def get_profiler_id(): + # type: () -> Union[str, None] + if _scheduler is None: + return None + return _scheduler.profiler_id + + +def determine_profile_session_sampling_decision(sample_rate): + # type: (Union[float, None]) -> bool + + # `None` is treated as `0.0` + if not sample_rate: + return False + + return random.random() < float(sample_rate) + + +class ContinuousProfile: + active: bool = True + + def stop(self): + # type: () -> None + self.active = False + + +class ContinuousScheduler: + mode = "unknown" # type: ContinuousProfilerMode + + def __init__(self, frequency, options, sdk_info, capture_func): + # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + self.interval = 1.0 / frequency + self.options = options + self.sdk_info = sdk_info + self.capture_func = capture_func + + self.lifecycle = self.options.get("profile_lifecycle") + profile_session_sample_rate = self.options.get("profile_session_sample_rate") + self.sampled = determine_profile_session_sampling_decision( + profile_session_sample_rate + ) + + self.sampler = self.make_sampler() + self.buffer = None # type: Optional[ProfileBuffer] + self.pid = None # type: Optional[int] + + self.running = False + + self.new_profiles = deque(maxlen=128) # type: Deque[ContinuousProfile] + self.active_profiles = set() # type: Set[ContinuousProfile] + + def is_auto_start_enabled(self): + # type: () -> bool + + # Ensure that the scheduler only autostarts once per process. + # This is necessary because many web servers use forks to spawn + # additional processes. And the profiler is only spawned on the + # master process, then it often only profiles the main process + # and not the ones where the requests are being handled. + if self.pid == os.getpid(): + return False + + experiments = self.options.get("_experiments") + if not experiments: + return False + + return experiments.get("continuous_profiling_auto_start") + + def auto_start(self): + # type: () -> Union[ContinuousProfile, None] + if not self.sampled: + return None + + if self.lifecycle != "trace": + return None + + logger.debug("[Profiling] Auto starting profiler") + + profile = ContinuousProfile() + + self.new_profiles.append(profile) + self.ensure_running() + + return profile + + def manual_start(self): + # type: () -> None + if not self.sampled: + return + + if self.lifecycle != "manual": + return + + self.ensure_running() + + def manual_stop(self): + # type: () -> None + if self.lifecycle != "manual": + return + + self.teardown() + + def ensure_running(self): + # type: () -> None + raise NotImplementedError + + def teardown(self): + # type: () -> None + raise NotImplementedError + + def pause(self): + # type: () -> None + raise NotImplementedError + + def reset_buffer(self): + # type: () -> None + self.buffer = ProfileBuffer( + self.options, self.sdk_info, PROFILE_BUFFER_SECONDS, self.capture_func + ) + + @property + def profiler_id(self): + # type: () -> Union[str, None] + if self.buffer is None: + return None + return self.buffer.profiler_id + + def make_sampler(self): + # type: () -> Callable[..., None] + cwd = os.getcwd() + + cache = LRUCache(max_size=256) + + if self.lifecycle == "trace": + + def _sample_stack(*args, **kwargs): + # type: (*Any, **Any) -> None + """ + Take a sample of the stack on all the threads in the process. + This should be called at a regular interval to collect samples. + """ + + # no profiles taking place, so we can stop early + if not self.new_profiles and not self.active_profiles: + self.running = False + return + + # This is the number of profiles we want to pop off. + # It's possible another thread adds a new profile to + # the list and we spend longer than we want inside + # the loop below. + # + # Also make sure to set this value before extracting + # frames so we do not write to any new profiles that + # were started after this point. + new_profiles = len(self.new_profiles) + + ts = now() + + try: + sample = [ + (str(tid), extract_stack(frame, cache, cwd)) + for tid, frame in sys._current_frames().items() + ] + except AttributeError: + # For some reason, the frame we get doesn't have certain attributes. + # When this happens, we abandon the current sample as it's bad. + capture_internal_exception(sys.exc_info()) + return + + # Move the new profiles into the active_profiles set. + # + # We cannot directly add the to active_profiles set + # in `start_profiling` because it is called from other + # threads which can cause a RuntimeError when it the + # set sizes changes during iteration without a lock. + # + # We also want to avoid using a lock here so threads + # that are starting profiles are not blocked until it + # can acquire the lock. + for _ in range(new_profiles): + self.active_profiles.add(self.new_profiles.popleft()) + inactive_profiles = [] + + for profile in self.active_profiles: + if profile.active: + pass + else: + # If a profile is marked inactive, we buffer it + # to `inactive_profiles` so it can be removed. + # We cannot remove it here as it would result + # in a RuntimeError. + inactive_profiles.append(profile) + + for profile in inactive_profiles: + self.active_profiles.remove(profile) + + if self.buffer is not None: + self.buffer.write(ts, sample) + + else: + + def _sample_stack(*args, **kwargs): + # type: (*Any, **Any) -> None + """ + Take a sample of the stack on all the threads in the process. + This should be called at a regular interval to collect samples. + """ + + ts = now() + + try: + sample = [ + (str(tid), extract_stack(frame, cache, cwd)) + for tid, frame in sys._current_frames().items() + ] + except AttributeError: + # For some reason, the frame we get doesn't have certain attributes. + # When this happens, we abandon the current sample as it's bad. + capture_internal_exception(sys.exc_info()) + return + + if self.buffer is not None: + self.buffer.write(ts, sample) + + return _sample_stack + + def run(self): + # type: () -> None + last = time.perf_counter() + + while self.running: + self.sampler() + + # some time may have elapsed since the last time + # we sampled, so we need to account for that and + # not sleep for too long + elapsed = time.perf_counter() - last + if elapsed < self.interval: + thread_sleep(self.interval - elapsed) + + # after sleeping, make sure to take the current + # timestamp so we can use it next iteration + last = time.perf_counter() + + if self.buffer is not None: + self.buffer.flush() + self.buffer = None + + +class ThreadContinuousScheduler(ContinuousScheduler): + """ + This scheduler is based on running a daemon thread that will call + the sampler at a regular interval. + """ + + mode = "thread" # type: ContinuousProfilerMode + name = "sentry.profiler.ThreadContinuousScheduler" + + def __init__(self, frequency, options, sdk_info, capture_func): + # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + super().__init__(frequency, options, sdk_info, capture_func) + + self.thread = None # type: Optional[threading.Thread] + self.lock = threading.Lock() + + def ensure_running(self): + # type: () -> None + + pid = os.getpid() + + # is running on the right process + if self.running and self.pid == pid: + return + + with self.lock: + # another thread may have tried to acquire the lock + # at the same time so it may start another thread + # make sure to check again before proceeding + if self.running and self.pid == pid: + return + + self.pid = pid + self.running = True + + # if the profiler thread is changing, + # we should create a new buffer along with it + self.reset_buffer() + + # make sure the thread is a daemon here otherwise this + # can keep the application running after other threads + # have exited + self.thread = threading.Thread(name=self.name, target=self.run, daemon=True) + + try: + self.thread.start() + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self.running = False + self.thread = None + + def teardown(self): + # type: () -> None + if self.running: + self.running = False + + if self.thread is not None: + self.thread.join() + self.thread = None + + self.buffer = None + + +class GeventContinuousScheduler(ContinuousScheduler): + """ + This scheduler is based on the thread scheduler but adapted to work with + gevent. When using gevent, it may monkey patch the threading modules + (`threading` and `_thread`). This results in the use of greenlets instead + of native threads. + + This is an issue because the sampler CANNOT run in a greenlet because + 1. Other greenlets doing sync work will prevent the sampler from running + 2. The greenlet runs in the same thread as other greenlets so when taking + a sample, other greenlets will have been evicted from the thread. This + results in a sample containing only the sampler's code. + """ + + mode = "gevent" # type: ContinuousProfilerMode + + def __init__(self, frequency, options, sdk_info, capture_func): + # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None + + if ThreadPool is None: + raise ValueError("Profiler mode: {} is not available".format(self.mode)) + + super().__init__(frequency, options, sdk_info, capture_func) + + self.thread = None # type: Optional[_ThreadPool] + self.lock = threading.Lock() + + def ensure_running(self): + # type: () -> None + pid = os.getpid() + + # is running on the right process + if self.running and self.pid == pid: + return + + with self.lock: + # another thread may have tried to acquire the lock + # at the same time so it may start another thread + # make sure to check again before proceeding + if self.running and self.pid == pid: + return + + self.pid = pid + self.running = True + + # if the profiler thread is changing, + # we should create a new buffer along with it + self.reset_buffer() + + self.thread = ThreadPool(1) # type: ignore[misc] + try: + self.thread.spawn(self.run) + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self.running = False + self.thread = None + + def teardown(self): + # type: () -> None + if self.running: + self.running = False + + if self.thread is not None: + self.thread.join() + self.thread = None + + self.buffer = None + + +PROFILE_BUFFER_SECONDS = 60 + + +class ProfileBuffer: + def __init__(self, options, sdk_info, buffer_size, capture_func): + # type: (Dict[str, Any], SDKInfo, int, Callable[[Envelope], None]) -> None + self.options = options + self.sdk_info = sdk_info + self.buffer_size = buffer_size + self.capture_func = capture_func + + self.profiler_id = uuid.uuid4().hex + self.chunk = ProfileChunk() + + # Make sure to use the same clock to compute a sample's monotonic timestamp + # to ensure the timestamps are correctly aligned. + self.start_monotonic_time = now() + + # Make sure the start timestamp is defined only once per profiler id. + # This prevents issues with clock drift within a single profiler session. + # + # Subtracting the start_monotonic_time here to find a fixed starting position + # for relative monotonic timestamps for each sample. + self.start_timestamp = ( + datetime.now(timezone.utc).timestamp() - self.start_monotonic_time + ) + + def write(self, monotonic_time, sample): + # type: (float, ExtractedSample) -> None + if self.should_flush(monotonic_time): + self.flush() + self.chunk = ProfileChunk() + self.start_monotonic_time = now() + + self.chunk.write(self.start_timestamp + monotonic_time, sample) + + def should_flush(self, monotonic_time): + # type: (float) -> bool + + # If the delta between the new monotonic time and the start monotonic time + # exceeds the buffer size, it means we should flush the chunk + return monotonic_time - self.start_monotonic_time >= self.buffer_size + + def flush(self): + # type: () -> None + chunk = self.chunk.to_json(self.profiler_id, self.options, self.sdk_info) + envelope = Envelope() + envelope.add_profile_chunk(chunk) + self.capture_func(envelope) + + +class ProfileChunk: + def __init__(self): + # type: () -> None + self.chunk_id = uuid.uuid4().hex + + self.indexed_frames = {} # type: Dict[FrameId, int] + self.indexed_stacks = {} # type: Dict[StackId, int] + self.frames = [] # type: List[ProcessedFrame] + self.stacks = [] # type: List[ProcessedStack] + self.samples = [] # type: List[ProcessedSample] + + def write(self, ts, sample): + # type: (float, ExtractedSample) -> None + for tid, (stack_id, frame_ids, frames) in sample: + try: + # Check if the stack is indexed first, this lets us skip + # indexing frames if it's not necessary + if stack_id not in self.indexed_stacks: + for i, frame_id in enumerate(frame_ids): + if frame_id not in self.indexed_frames: + self.indexed_frames[frame_id] = len(self.indexed_frames) + self.frames.append(frames[i]) + + self.indexed_stacks[stack_id] = len(self.indexed_stacks) + self.stacks.append( + [self.indexed_frames[frame_id] for frame_id in frame_ids] + ) + + self.samples.append( + { + "timestamp": ts, + "thread_id": tid, + "stack_id": self.indexed_stacks[stack_id], + } + ) + except AttributeError: + # For some reason, the frame we get doesn't have certain attributes. + # When this happens, we abandon the current sample as it's bad. + capture_internal_exception(sys.exc_info()) + + def to_json(self, profiler_id, options, sdk_info): + # type: (str, Dict[str, Any], SDKInfo) -> Dict[str, Any] + profile = { + "frames": self.frames, + "stacks": self.stacks, + "samples": self.samples, + "thread_metadata": { + str(thread.ident): { + "name": str(thread.name), + } + for thread in threading.enumerate() + }, + } + + set_in_app_in_frames( + profile["frames"], + options["in_app_exclude"], + options["in_app_include"], + options["project_root"], + ) + + payload = { + "chunk_id": self.chunk_id, + "client_sdk": { + "name": sdk_info["name"], + "version": VERSION, + }, + "platform": "python", + "profile": profile, + "profiler_id": profiler_id, + "version": "2", + } + + for key in "release", "environment", "dist": + if options[key] is not None: + payload[key] = str(options[key]).strip() + + return payload diff --git a/src/sentry_sdk_alpha/profiler/transaction_profiler.py b/src/sentry_sdk_alpha/profiler/transaction_profiler.py new file mode 100644 index 00000000000000..266dc376734f34 --- /dev/null +++ b/src/sentry_sdk_alpha/profiler/transaction_profiler.py @@ -0,0 +1,786 @@ +""" +This file is originally based on code from https://github.com/nylas/nylas-perftools, +which is published under the following license: + +The MIT License (MIT) + +Copyright (c) 2014 Nylas + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +import atexit +import os +import platform +import random +import sys +import threading +import time +import uuid +from abc import ABC, abstractmethod +from collections import deque + +import sentry_sdk_alpha +from sentry_sdk_alpha._lru_cache import LRUCache +from sentry_sdk_alpha.profiler.utils import ( + DEFAULT_SAMPLING_FREQUENCY, + extract_stack, +) +from sentry_sdk_alpha.utils import ( + capture_internal_exception, + get_current_thread_meta, + is_gevent, + is_valid_sample_rate, + logger, + set_in_app_in_frames, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Deque + from typing import Dict + from typing import List + from typing import Optional + from typing import Set + from typing import Type + from typing_extensions import TypedDict + + from sentry_sdk_alpha.profiler.utils import ( + ProcessedStack, + ProcessedFrame, + ProcessedThreadMetadata, + FrameId, + StackId, + ThreadId, + ExtractedSample, + ) + from sentry_sdk_alpha._types import Event, SamplingContext, ProfilerMode + + ProcessedSample = TypedDict( + "ProcessedSample", + { + "elapsed_since_start_ns": str, + "thread_id": ThreadId, + "stack_id": int, + }, + ) + + ProcessedProfile = TypedDict( + "ProcessedProfile", + { + "frames": List[ProcessedFrame], + "stacks": List[ProcessedStack], + "samples": List[ProcessedSample], + "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata], + }, + ) + + +try: + from gevent.monkey import get_original + from gevent.threadpool import ThreadPool as _ThreadPool + + ThreadPool = _ThreadPool # type: Optional[Type[_ThreadPool]] + thread_sleep = get_original("time", "sleep") +except ImportError: + thread_sleep = time.sleep + + ThreadPool = None + + +_scheduler = None # type: Optional[Scheduler] + + +# The minimum number of unique samples that must exist in a profile to be +# considered valid. +PROFILE_MINIMUM_SAMPLES = 2 + + +def has_profiling_enabled(options): + # type: (Dict[str, Any]) -> bool + profiles_sampler = options["profiles_sampler"] + if profiles_sampler is not None: + return True + + profiles_sample_rate = options["profiles_sample_rate"] + if profiles_sample_rate is not None and profiles_sample_rate > 0: + return True + + return False + + +def setup_profiler(options): + # type: (Dict[str, Any]) -> bool + global _scheduler + + if _scheduler is not None: + logger.debug("[Profiling] Profiler is already setup") + return False + + frequency = DEFAULT_SAMPLING_FREQUENCY + + if is_gevent(): + # If gevent has patched the threading modules then we cannot rely on + # them to spawn a native thread for sampling. + # Instead we default to the GeventScheduler which is capable of + # spawning native threads within gevent. + default_profiler_mode = GeventScheduler.mode + else: + default_profiler_mode = ThreadScheduler.mode + + profiler_mode = default_profiler_mode + if options.get("profiler_mode") is not None: + profiler_mode = options["profiler_mode"] + + if ( + profiler_mode == ThreadScheduler.mode + # for legacy reasons, we'll keep supporting sleep mode for this scheduler + or profiler_mode == "sleep" + ): + _scheduler = ThreadScheduler(frequency=frequency) + elif profiler_mode == GeventScheduler.mode: + _scheduler = GeventScheduler(frequency=frequency) + else: + raise ValueError("Unknown profiler mode: {}".format(profiler_mode)) + + logger.debug( + "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode) + ) + _scheduler.setup() + + atexit.register(teardown_profiler) + + return True + + +def teardown_profiler(): + # type: () -> None + + global _scheduler + + if _scheduler is not None: + _scheduler.teardown() + + _scheduler = None + + +MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds + + +class Profile: + def __init__( + self, + sampled, # type: Optional[bool] + start_ns, # type: int + scheduler=None, # type: Optional[Scheduler] + ): + # type: (...) -> None + self.scheduler = _scheduler if scheduler is None else scheduler + + self.event_id = uuid.uuid4().hex # type: str + + self.sampled = sampled # type: Optional[bool] + + # Various framework integrations are capable of overwriting the active thread id. + # If it is set to `None` at the end of the profile, we fall back to the default. + self._default_active_thread_id = get_current_thread_meta()[0] or 0 # type: int + self.active_thread_id = None # type: Optional[int] + + try: + self.start_ns = start_ns # type: int + except AttributeError: + self.start_ns = 0 + + self.stop_ns = 0 # type: int + self.active = False # type: bool + + self.indexed_frames = {} # type: Dict[FrameId, int] + self.indexed_stacks = {} # type: Dict[StackId, int] + self.frames = [] # type: List[ProcessedFrame] + self.stacks = [] # type: List[ProcessedStack] + self.samples = [] # type: List[ProcessedSample] + + self.unique_samples = 0 + + def update_active_thread_id(self): + # type: () -> None + self.active_thread_id = get_current_thread_meta()[0] + logger.debug( + "[Profiling] updating active thread id to {tid}".format( + tid=self.active_thread_id + ) + ) + + def _set_initial_sampling_decision(self, sampling_context): + # type: (SamplingContext) -> None + """ + Sets the profile's sampling decision according to the following + precedence rules: + + 1. If the transaction to be profiled is not sampled, that decision + will be used, regardless of anything else. + + 2. Use `profiles_sample_rate` to decide. + """ + + # The corresponding transaction was not sampled, + # so don't generate a profile for it. + if not self.sampled: + logger.debug( + "[Profiling] Discarding profile because transaction is discarded." + ) + self.sampled = False + return + + # The profiler hasn't been properly initialized. + if self.scheduler is None: + logger.debug( + "[Profiling] Discarding profile because profiler was not started." + ) + self.sampled = False + return + + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + self.sampled = False + return + + options = client.options + + sample_rate = None + if callable(options.get("profiles_sampler")): + sample_rate = options["profiles_sampler"](sampling_context) + elif options["profiles_sample_rate"] is not None: + sample_rate = options["profiles_sample_rate"] + + # The profiles_sample_rate option was not set, so profiling + # was never enabled. + if sample_rate is None: + logger.debug( + "[Profiling] Discarding profile because profiling was not enabled." + ) + self.sampled = False + return + + if not is_valid_sample_rate(sample_rate, source="Profiling"): + logger.warning( + "[Profiling] Discarding profile because of invalid sample rate." + ) + self.sampled = False + return + + # Now we roll the dice. random.random is inclusive of 0, but not of 1, + # so strict < is safe here. In case sample_rate is a boolean, cast it + # to a float (True becomes 1.0 and False becomes 0.0) + self.sampled = random.random() < float(sample_rate) + + if self.sampled: + logger.debug("[Profiling] Initializing profile") + else: + logger.debug( + "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format( + sample_rate=float(sample_rate) + ) + ) + + def start(self): + # type: () -> None + if not self.sampled or self.active: + return + + assert self.scheduler, "No scheduler specified" + logger.debug("[Profiling] Starting profile") + self.active = True + if not self.start_ns: + self.start_ns = time.perf_counter_ns() + self.scheduler.start_profiling(self) + + def stop(self): + # type: () -> None + if not self.sampled or not self.active: + return + + assert self.scheduler, "No scheduler specified" + logger.debug("[Profiling] Stopping profile") + self.active = False + self.stop_ns = time.perf_counter_ns() + + def __enter__(self): + # type: () -> Profile + scope = sentry_sdk_alpha.get_isolation_scope() + old_profile = scope.profile + scope.profile = self + + self._context_manager_state = (scope, old_profile) + + self.start() + + return self + + def __exit__(self, ty, value, tb): + # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + self.stop() + + scope, old_profile = self._context_manager_state + del self._context_manager_state + + scope.profile = old_profile + + def write(self, ts, sample): + # type: (int, ExtractedSample) -> None + if not self.active: + return + + if ts < self.start_ns: + return + + offset = ts - self.start_ns + if offset > MAX_PROFILE_DURATION_NS: + self.stop() + return + + self.unique_samples += 1 + + elapsed_since_start_ns = str(offset) + + for tid, (stack_id, frame_ids, frames) in sample: + try: + # Check if the stack is indexed first, this lets us skip + # indexing frames if it's not necessary + if stack_id not in self.indexed_stacks: + for i, frame_id in enumerate(frame_ids): + if frame_id not in self.indexed_frames: + self.indexed_frames[frame_id] = len(self.indexed_frames) + self.frames.append(frames[i]) + + self.indexed_stacks[stack_id] = len(self.indexed_stacks) + self.stacks.append( + [self.indexed_frames[frame_id] for frame_id in frame_ids] + ) + + self.samples.append( + { + "elapsed_since_start_ns": elapsed_since_start_ns, + "thread_id": tid, + "stack_id": self.indexed_stacks[stack_id], + } + ) + except AttributeError: + # For some reason, the frame we get doesn't have certain attributes. + # When this happens, we abandon the current sample as it's bad. + capture_internal_exception(sys.exc_info()) + + def process(self): + # type: () -> ProcessedProfile + + # This collects the thread metadata at the end of a profile. Doing it + # this way means that any threads that terminate before the profile ends + # will not have any metadata associated with it. + thread_metadata = { + str(thread.ident): { + "name": str(thread.name), + } + for thread in threading.enumerate() + } # type: Dict[str, ProcessedThreadMetadata] + + return { + "frames": self.frames, + "stacks": self.stacks, + "samples": self.samples, + "thread_metadata": thread_metadata, + } + + def to_json(self, event_opt, options): + # type: (Event, Dict[str, Any]) -> Dict[str, Any] + profile = self.process() + + set_in_app_in_frames( + profile["frames"], + options["in_app_exclude"], + options["in_app_include"], + options["project_root"], + ) + + return { + "environment": event_opt.get("environment"), + "event_id": self.event_id, + "platform": "python", + "profile": profile, + "release": event_opt.get("release", ""), + "timestamp": event_opt["start_timestamp"], + "version": "1", + "device": { + "architecture": platform.machine(), + }, + "os": { + "name": platform.system(), + "version": platform.release(), + }, + "runtime": { + "name": platform.python_implementation(), + "version": platform.python_version(), + }, + "transactions": [ + { + "id": event_opt["event_id"], + "name": event_opt["transaction"], + # we start the transaction before the profile and this is + # the transaction start time relative to the profile, so we + # hardcode it to 0 until we can start the profile before + "relative_start_ns": "0", + # use the duration of the profile instead of the transaction + # because we end the transaction after the profile + "relative_end_ns": str(self.stop_ns - self.start_ns), + "trace_id": event_opt["contexts"]["trace"]["trace_id"], + "active_thread_id": str( + self._default_active_thread_id + if self.active_thread_id is None + else self.active_thread_id + ), + } + ], + } + + def valid(self): + # type: () -> bool + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return False + + if not has_profiling_enabled(client.options): + return False + + if self.sampled is None or not self.sampled: + if client.transport: + client.transport.record_lost_event( + "sample_rate", data_category="profile" + ) + return False + + if self.unique_samples < PROFILE_MINIMUM_SAMPLES: + if client.transport: + client.transport.record_lost_event( + "insufficient_data", data_category="profile" + ) + logger.debug("[Profiling] Discarding profile because insufficient samples.") + return False + + return True + + +class Scheduler(ABC): + mode = "unknown" # type: ProfilerMode + + def __init__(self, frequency): + # type: (int) -> None + self.interval = 1.0 / frequency + + self.sampler = self.make_sampler() + + # cap the number of new profiles at any time so it does not grow infinitely + self.new_profiles = deque(maxlen=128) # type: Deque[Profile] + self.active_profiles = set() # type: Set[Profile] + + def __enter__(self): + # type: () -> Scheduler + self.setup() + return self + + def __exit__(self, ty, value, tb): + # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + self.teardown() + + @abstractmethod + def setup(self): + # type: () -> None + pass + + @abstractmethod + def teardown(self): + # type: () -> None + pass + + def ensure_running(self): + # type: () -> None + """ + Ensure the scheduler is running. By default, this method is a no-op. + The method should be overridden by any implementation for which it is + relevant. + """ + return None + + def start_profiling(self, profile): + # type: (Profile) -> None + self.ensure_running() + self.new_profiles.append(profile) + + def make_sampler(self): + # type: () -> Callable[..., None] + cwd = os.getcwd() + + cache = LRUCache(max_size=256) + + def _sample_stack(*args, **kwargs): + # type: (*Any, **Any) -> None + """ + Take a sample of the stack on all the threads in the process. + This should be called at a regular interval to collect samples. + """ + # no profiles taking place, so we can stop early + if not self.new_profiles and not self.active_profiles: + # make sure to clear the cache if we're not profiling so we dont + # keep a reference to the last stack of frames around + return + + # This is the number of profiles we want to pop off. + # It's possible another thread adds a new profile to + # the list and we spend longer than we want inside + # the loop below. + # + # Also make sure to set this value before extracting + # frames so we do not write to any new profiles that + # were started after this point. + new_profiles = len(self.new_profiles) + + now = time.perf_counter_ns() + + try: + sample = [ + (str(tid), extract_stack(frame, cache, cwd)) + for tid, frame in sys._current_frames().items() + ] + except AttributeError: + # For some reason, the frame we get doesn't have certain attributes. + # When this happens, we abandon the current sample as it's bad. + capture_internal_exception(sys.exc_info()) + return + + # Move the new profiles into the active_profiles set. + # + # We cannot directly add the to active_profiles set + # in `start_profiling` because it is called from other + # threads which can cause a RuntimeError when it the + # set sizes changes during iteration without a lock. + # + # We also want to avoid using a lock here so threads + # that are starting profiles are not blocked until it + # can acquire the lock. + for _ in range(new_profiles): + self.active_profiles.add(self.new_profiles.popleft()) + + inactive_profiles = [] + + for profile in self.active_profiles: + if profile.active: + profile.write(now, sample) + else: + # If a profile is marked inactive, we buffer it + # to `inactive_profiles` so it can be removed. + # We cannot remove it here as it would result + # in a RuntimeError. + inactive_profiles.append(profile) + + for profile in inactive_profiles: + self.active_profiles.remove(profile) + + return _sample_stack + + +class ThreadScheduler(Scheduler): + """ + This scheduler is based on running a daemon thread that will call + the sampler at a regular interval. + """ + + mode = "thread" # type: ProfilerMode + name = "sentry.profiler.ThreadScheduler" + + def __init__(self, frequency): + # type: (int) -> None + super().__init__(frequency=frequency) + + # used to signal to the thread that it should stop + self.running = False + self.thread = None # type: Optional[threading.Thread] + self.pid = None # type: Optional[int] + self.lock = threading.Lock() + + def setup(self): + # type: () -> None + pass + + def teardown(self): + # type: () -> None + if self.running: + self.running = False + if self.thread is not None: + self.thread.join() + + def ensure_running(self): + # type: () -> None + """ + Check that the profiler has an active thread to run in, and start one if + that's not the case. + + Note that this might fail (e.g. in Python 3.12 it's not possible to + spawn new threads at interpreter shutdown). In that case self.running + will be False after running this function. + """ + pid = os.getpid() + + # is running on the right process + if self.running and self.pid == pid: + return + + with self.lock: + # another thread may have tried to acquire the lock + # at the same time so it may start another thread + # make sure to check again before proceeding + if self.running and self.pid == pid: + return + + self.pid = pid + self.running = True + + # make sure the thread is a daemon here otherwise this + # can keep the application running after other threads + # have exited + self.thread = threading.Thread(name=self.name, target=self.run, daemon=True) + try: + self.thread.start() + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self.running = False + self.thread = None + return + + def run(self): + # type: () -> None + last = time.perf_counter() + + while self.running: + self.sampler() + + # some time may have elapsed since the last time + # we sampled, so we need to account for that and + # not sleep for too long + elapsed = time.perf_counter() - last + if elapsed < self.interval: + thread_sleep(self.interval - elapsed) + + # after sleeping, make sure to take the current + # timestamp so we can use it next iteration + last = time.perf_counter() + + +class GeventScheduler(Scheduler): + """ + This scheduler is based on the thread scheduler but adapted to work with + gevent. When using gevent, it may monkey patch the threading modules + (`threading` and `_thread`). This results in the use of greenlets instead + of native threads. + + This is an issue because the sampler CANNOT run in a greenlet because + 1. Other greenlets doing sync work will prevent the sampler from running + 2. The greenlet runs in the same thread as other greenlets so when taking + a sample, other greenlets will have been evicted from the thread. This + results in a sample containing only the sampler's code. + """ + + mode = "gevent" # type: ProfilerMode + name = "sentry.profiler.GeventScheduler" + + def __init__(self, frequency): + # type: (int) -> None + + if ThreadPool is None: + raise ValueError("Profiler mode: {} is not available".format(self.mode)) + + super().__init__(frequency=frequency) + + # used to signal to the thread that it should stop + self.running = False + self.thread = None # type: Optional[_ThreadPool] + self.pid = None # type: Optional[int] + + # This intentionally uses the gevent patched threading.Lock. + # The lock will be required when first trying to start profiles + # as we need to spawn the profiler thread from the greenlets. + self.lock = threading.Lock() + + def setup(self): + # type: () -> None + pass + + def teardown(self): + # type: () -> None + if self.running: + self.running = False + if self.thread is not None: + self.thread.join() + + def ensure_running(self): + # type: () -> None + pid = os.getpid() + + # is running on the right process + if self.running and self.pid == pid: + return + + with self.lock: + # another thread may have tried to acquire the lock + # at the same time so it may start another thread + # make sure to check again before proceeding + if self.running and self.pid == pid: + return + + self.pid = pid + self.running = True + + self.thread = ThreadPool(1) # type: ignore[misc] + try: + self.thread.spawn(self.run) + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self.running = False + self.thread = None + return + + def run(self): + # type: () -> None + last = time.perf_counter() + + while self.running: + self.sampler() + + # some time may have elapsed since the last time + # we sampled, so we need to account for that and + # not sleep for too long + elapsed = time.perf_counter() - last + if elapsed < self.interval: + thread_sleep(self.interval - elapsed) + + # after sleeping, make sure to take the current + # timestamp so we can use it next iteration + last = time.perf_counter() diff --git a/src/sentry_sdk_alpha/profiler/utils.py b/src/sentry_sdk_alpha/profiler/utils.py new file mode 100644 index 00000000000000..a62c79aedbfa42 --- /dev/null +++ b/src/sentry_sdk_alpha/profiler/utils.py @@ -0,0 +1,199 @@ +import os +from collections import deque + +from sentry_sdk_alpha._compat import PY311 +from sentry_sdk_alpha.utils import filename_for_module + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from sentry_sdk_alpha._lru_cache import LRUCache + from types import FrameType + from typing import Deque + from typing import List + from typing import Optional + from typing import Sequence + from typing import Tuple + from typing_extensions import TypedDict + + ThreadId = str + + ProcessedStack = List[int] + + ProcessedFrame = TypedDict( + "ProcessedFrame", + { + "abs_path": str, + "filename": Optional[str], + "function": str, + "lineno": int, + "module": Optional[str], + }, + ) + + ProcessedThreadMetadata = TypedDict( + "ProcessedThreadMetadata", + {"name": str}, + ) + + FrameId = Tuple[ + str, # abs_path + int, # lineno + str, # function + ] + FrameIds = Tuple[FrameId, ...] + + # The exact value of this id is not very meaningful. The purpose + # of this id is to give us a compact and unique identifier for a + # raw stack that can be used as a key to a dictionary so that it + # can be used during the sampled format generation. + StackId = Tuple[int, int] + + ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]] + ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]] + +# The default sampling frequency to use. This is set at 101 in order to +# mitigate the effects of lockstep sampling. +DEFAULT_SAMPLING_FREQUENCY = 101 + + +# We want to impose a stack depth limit so that samples aren't too large. +MAX_STACK_DEPTH = 128 + + +if PY311: + + def get_frame_name(frame): + # type: (FrameType) -> str + return frame.f_code.co_qualname + +else: + + def get_frame_name(frame): + # type: (FrameType) -> str + + f_code = frame.f_code + co_varnames = f_code.co_varnames + + # co_name only contains the frame name. If the frame was a method, + # the class name will NOT be included. + name = f_code.co_name + + # if it was a method, we can get the class name by inspecting + # the f_locals for the `self` argument + try: + if ( + # the co_varnames start with the frame's positional arguments + # and we expect the first to be `self` if its an instance method + co_varnames + and co_varnames[0] == "self" + and "self" in frame.f_locals + ): + for cls in type(frame.f_locals["self"]).__mro__: + if name in cls.__dict__: + return "{}.{}".format(cls.__name__, name) + except (AttributeError, ValueError): + pass + + # if it was a class method, (decorated with `@classmethod`) + # we can get the class name by inspecting the f_locals for the `cls` argument + try: + if ( + # the co_varnames start with the frame's positional arguments + # and we expect the first to be `cls` if its a class method + co_varnames + and co_varnames[0] == "cls" + and "cls" in frame.f_locals + ): + for cls in frame.f_locals["cls"].__mro__: + if name in cls.__dict__: + return "{}.{}".format(cls.__name__, name) + except (AttributeError, ValueError): + pass + + # nothing we can do if it is a staticmethod (decorated with @staticmethod) + + # we've done all we can, time to give up and return what we have + return name + + +def frame_id(raw_frame): + # type: (FrameType) -> FrameId + return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame)) + + +def extract_frame(fid, raw_frame, cwd): + # type: (FrameId, FrameType, str) -> ProcessedFrame + abs_path = raw_frame.f_code.co_filename + + try: + module = raw_frame.f_globals["__name__"] + except Exception: + module = None + + # namedtuples can be many times slower when initialing + # and accessing attribute so we opt to use a tuple here instead + return { + # This originally was `os.path.abspath(abs_path)` but that had + # a large performance overhead. + # + # According to docs, this is equivalent to + # `os.path.normpath(os.path.join(os.getcwd(), path))`. + # The `os.getcwd()` call is slow here, so we precompute it. + # + # Additionally, since we are using normalized path already, + # we skip calling `os.path.normpath` entirely. + "abs_path": os.path.join(cwd, abs_path), + "module": module, + "filename": filename_for_module(module, abs_path) or None, + "function": fid[2], + "lineno": raw_frame.f_lineno, + } + + +def extract_stack( + raw_frame, # type: Optional[FrameType] + cache, # type: LRUCache + cwd, # type: str + max_stack_depth=MAX_STACK_DEPTH, # type: int +): + # type: (...) -> ExtractedStack + """ + Extracts the stack starting the specified frame. The extracted stack + assumes the specified frame is the top of the stack, and works back + to the bottom of the stack. + + In the event that the stack is more than `MAX_STACK_DEPTH` frames deep, + only the first `MAX_STACK_DEPTH` frames will be returned. + """ + + raw_frames = deque(maxlen=max_stack_depth) # type: Deque[FrameType] + + while raw_frame is not None: + f_back = raw_frame.f_back + raw_frames.append(raw_frame) + raw_frame = f_back + + frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames) + frames = [] + for i, fid in enumerate(frame_ids): + frame = cache.get(fid) + if frame is None: + frame = extract_frame(fid, raw_frames[i], cwd) + cache.set(fid, frame) + frames.append(frame) + + # Instead of mapping the stack into frame ids and hashing + # that as a tuple, we can directly hash the stack. + # This saves us from having to generate yet another list. + # Additionally, using the stack as the key directly is + # costly because the stack can be large, so we pre-hash + # the stack, and use the hash as the key as this will be + # needed a few times to improve performance. + # + # To Reduce the likelihood of hash collisions, we include + # the stack depth. This means that only stacks of the same + # depth can suffer from hash collisions. + stack_id = len(raw_frames), hash(frame_ids) + + return stack_id, frame_ids, frames diff --git a/src/sentry_sdk_alpha/py.typed b/src/sentry_sdk_alpha/py.typed new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/src/sentry_sdk_alpha/scope.py b/src/sentry_sdk_alpha/scope.py new file mode 100644 index 00000000000000..49b14443731960 --- /dev/null +++ b/src/sentry_sdk_alpha/scope.py @@ -0,0 +1,1563 @@ +import os +import sys +import warnings +from copy import copy, deepcopy +from collections import deque +from contextlib import contextmanager +from enum import Enum +from datetime import datetime, timezone +from functools import wraps +from itertools import chain + +from sentry_sdk_alpha._types import AnnotatedValue +from sentry_sdk_alpha.attachments import Attachment +from sentry_sdk_alpha.consts import ( + DEFAULT_MAX_BREADCRUMBS, + FALSE_VALUES, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk_alpha.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY +from sentry_sdk_alpha.profiler.transaction_profiler import Profile +from sentry_sdk_alpha.session import Session +from sentry_sdk_alpha.tracing_utils import ( + Baggage, + has_tracing_enabled, + PropagationContext, +) +from sentry_sdk_alpha.tracing import ( + NoOpSpan, + Span, +) +from sentry_sdk_alpha.utils import ( + capture_internal_exception, + capture_internal_exceptions, + ContextVar, + datetime_from_isoformat, + disable_capture_event, + event_from_exception, + exc_info_from_error, + logger, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from collections.abc import Mapping, MutableMapping + + from typing import Any + from typing import Callable + from typing import Deque + from typing import Dict + from typing import Generator + from typing import Iterator + from typing import List + from typing import Optional + from typing import ParamSpec + from typing import Tuple + from typing import TypeVar + from typing import Union + from typing import Self + + from sentry_sdk_alpha._types import ( + Breadcrumb, + BreadcrumbHint, + ErrorProcessor, + Event, + EventProcessor, + ExcInfo, + Hint, + LogLevelStr, + Type, + ) + + import sentry_sdk_alpha + + P = ParamSpec("P") + R = TypeVar("R") + + F = TypeVar("F", bound=Callable[..., Any]) + T = TypeVar("T") + + +# Holds data that will be added to **all** events sent by this process. +# In case this is a http server (think web framework) with multiple users +# the data will be added to events of all users. +# Typically this is used for process wide data such as the release. +_global_scope = None # type: Optional[Scope] + +# Holds data for the active request. +# This is used to isolate data for different requests or users. +# The isolation scope is usually created by integrations, but may also +# be created manually +_isolation_scope = ContextVar("isolation_scope", default=None) + +# Holds data for the active span. +# This can be used to manually add additional data to a span. +_current_scope = ContextVar("current_scope", default=None) + +global_event_processors = [] # type: List[EventProcessor] + + +class ScopeType(Enum): + CURRENT = "current" + ISOLATION = "isolation" + GLOBAL = "global" + MERGED = "merged" + + +def add_global_event_processor(processor): + # type: (EventProcessor) -> None + global_event_processors.append(processor) + + +def _attr_setter(fn): + # type: (Any) -> Any + return property(fset=fn, doc=fn.__doc__) + + +def _disable_capture(fn): + # type: (F) -> F + @wraps(fn) + def wrapper(self, *args, **kwargs): + # type: (Any, *Dict[str, Any], **Any) -> Any + if not self._should_capture: + return + try: + self._should_capture = False + return fn(self, *args, **kwargs) + finally: + self._should_capture = True + + return wrapper # type: ignore + + +class Scope: + """The scope holds extra information that should be sent with all + events that belong to it. + """ + + # NOTE: Even though it should not happen, the scope needs to not crash when + # accessed by multiple threads. It's fine if it's full of races, but those + # races should never make the user application crash. + # + # The same needs to hold for any accesses of the scope the SDK makes. + + __slots__ = ( + "_level", + "_name", + "_fingerprint", + # note that for legacy reasons, _transaction is the transaction *name*, + # not a Transaction object (the object is stored in _span) + "_transaction", + "_transaction_info", + "_user", + "_tags", + "_contexts", + "_extras", + "_breadcrumbs", + "_n_breadcrumbs_truncated", + "_event_processors", + "_error_processors", + "_should_capture", + "_span", + "_session", + "_attachments", + "_force_auto_session_tracking", + "_profile", + "_propagation_context", + "client", + "_type", + "_last_event_id", + "_flags", + ) + + def __init__(self, ty=None, client=None): + # type: (Optional[ScopeType], Optional[sentry_sdk.Client]) -> None + self._type = ty + + self._event_processors = [] # type: List[EventProcessor] + self._error_processors = [] # type: List[ErrorProcessor] + + self._name = None # type: Optional[str] + self._propagation_context = None # type: Optional[PropagationContext] + self._n_breadcrumbs_truncated = 0 # type: int + + self.client = NonRecordingClient() # type: sentry_sdk.client.BaseClient + + if client is not None: + self.set_client(client) + + self.clear() + + incoming_trace_information = self._load_trace_data_from_env() + self.generate_propagation_context(incoming_data=incoming_trace_information) + + def __copy__(self): + # type: () -> Self + """ + Returns a copy of this scope. + This also creates a copy of all referenced data structures. + """ + rv = object.__new__(self.__class__) # type: Self + + rv._type = self._type + rv.client = self.client + rv._level = self._level + rv._name = self._name + rv._fingerprint = self._fingerprint + rv._transaction = self._transaction + rv._transaction_info = dict(self._transaction_info) + rv._user = self._user + + rv._tags = dict(self._tags) + rv._contexts = dict(self._contexts) + rv._extras = dict(self._extras) + + rv._breadcrumbs = copy(self._breadcrumbs) + rv._n_breadcrumbs_truncated = copy(self._n_breadcrumbs_truncated) + rv._event_processors = list(self._event_processors) + rv._error_processors = list(self._error_processors) + rv._propagation_context = self._propagation_context + + rv._should_capture = self._should_capture + rv._span = self._span + rv._session = self._session + rv._force_auto_session_tracking = self._force_auto_session_tracking + rv._attachments = list(self._attachments) + + rv._profile = self._profile + + rv._last_event_id = self._last_event_id + + rv._flags = deepcopy(self._flags) + + return rv + + @classmethod + def get_current_scope(cls): + # type: () -> Scope + """ + .. versionadded:: 2.0.0 + + Returns the current scope. + """ + current_scope = cls._get_current_scope() + if current_scope is None: + current_scope = Scope(ty=ScopeType.CURRENT) + _current_scope.set(current_scope) + + return current_scope + + @classmethod + def _get_current_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the current scope without creating a new one. Internal use only. + """ + return _current_scope.get() + + @classmethod + def set_current_scope(cls, new_current_scope): + # type: (Scope) -> None + """ + .. versionadded:: 2.0.0 + + Sets the given scope as the new current scope overwriting the existing current scope. + :param new_current_scope: The scope to set as the new current scope. + """ + _current_scope.set(new_current_scope) + + @classmethod + def get_isolation_scope(cls): + # type: () -> Scope + """ + .. versionadded:: 2.0.0 + + Returns the isolation scope. + """ + isolation_scope = cls._get_isolation_scope() + if isolation_scope is None: + isolation_scope = Scope(ty=ScopeType.ISOLATION) + _isolation_scope.set(isolation_scope) + + return isolation_scope + + @classmethod + def _get_isolation_scope(cls): + # type: () -> Optional[Scope] + """ + Returns the isolation scope without creating a new one. Internal use only. + """ + return _isolation_scope.get() + + @classmethod + def set_isolation_scope(cls, new_isolation_scope): + # type: (Scope) -> None + """ + .. versionadded:: 2.0.0 + + Sets the given scope as the new isolation scope overwriting the existing isolation scope. + :param new_isolation_scope: The scope to set as the new isolation scope. + """ + _isolation_scope.set(new_isolation_scope) + + @classmethod + def get_global_scope(cls): + # type: () -> Scope + """ + .. versionadded:: 2.0.0 + + Returns the global scope. + """ + global _global_scope + if _global_scope is None: + _global_scope = Scope(ty=ScopeType.GLOBAL) + + return _global_scope + + @classmethod + def last_event_id(cls): + # type: () -> Optional[str] + """ + .. versionadded:: 2.2.0 + + Returns event ID of the event most recently captured by the isolation scope, or None if no event + has been captured. We do not consider events that are dropped, e.g. by a before_send hook. + Transactions also are not considered events in this context. + + The event corresponding to the returned event ID is NOT guaranteed to actually be sent to Sentry; + whether the event is sent depends on the transport. The event could be sent later or not at all. + Even a sent event could fail to arrive in Sentry due to network issues, exhausted quotas, or + various other reasons. + """ + return cls.get_isolation_scope()._last_event_id + + def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None): + # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Self + """ + Merges global, isolation and current scope into a new scope and + adds the given additional scope or additional scope kwargs to it. + """ + if additional_scope and additional_scope_kwargs: + raise TypeError("cannot provide scope and kwargs") + + final_scope = self.__class__() + final_scope._type = ScopeType.MERGED + + global_scope = self.get_global_scope() + final_scope.update_from_scope(global_scope) + + isolation_scope = self.get_isolation_scope() + final_scope.update_from_scope(self.get_isolation_scope()) + + current_scope = self.get_current_scope() + final_scope.update_from_scope(current_scope) + + if self != current_scope and self != isolation_scope: + final_scope.update_from_scope(self) + + if additional_scope is not None: + if callable(additional_scope): + additional_scope(final_scope) + else: + final_scope.update_from_scope(additional_scope) + + elif additional_scope_kwargs: + final_scope.update_from_kwargs(**additional_scope_kwargs) + + return final_scope + + @classmethod + def get_client(cls): + # type: () -> sentry_sdk.client.BaseClient + """ + .. versionadded:: 2.0.0 + + Returns the currently used :py:class:`sentry_sdk.Client`. + This checks the current scope, the isolation scope and the global scope for a client. + If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned. + """ + current_scope = cls.get_current_scope() + try: + client = current_scope.client + except AttributeError: + client = None + + if client is not None and client.is_active(): + return client + + isolation_scope = cls.get_isolation_scope() + try: + client = isolation_scope.client + except AttributeError: + client = None + + if client is not None and client.is_active(): + return client + + try: + client = _global_scope.client # type: ignore + except AttributeError: + client = None + + if client is not None and client.is_active(): + return client + + return NonRecordingClient() + + def set_client(self, client=None): + # type: (Optional[sentry_sdk.client.BaseClient]) -> None + """ + .. versionadded:: 2.0.0 + + Sets the client for this scope. + + :param client: The client to use in this scope. + If `None` the client of the scope will be replaced by a :py:class:`sentry_sdk.NonRecordingClient`. + + """ + self.client = client if client is not None else NonRecordingClient() + + def fork(self): + # type: () -> Self + """ + .. versionadded:: 2.0.0 + + Returns a fork of this scope. + """ + forked_scope = copy(self) + return forked_scope + + def _load_trace_data_from_env(self): + # type: () -> Optional[Dict[str, str]] + """ + Load Sentry trace id and baggage from environment variables. + Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false". + """ + incoming_trace_information = None + + sentry_use_environment = ( + os.environ.get("SENTRY_USE_ENVIRONMENT") or "" + ).lower() + use_environment = sentry_use_environment not in FALSE_VALUES + if use_environment: + incoming_trace_information = {} + + if os.environ.get("SENTRY_TRACE"): + incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = ( + os.environ.get("SENTRY_TRACE") or "" + ) + + if os.environ.get("SENTRY_BAGGAGE"): + incoming_trace_information[BAGGAGE_HEADER_NAME] = ( + os.environ.get("SENTRY_BAGGAGE") or "" + ) + + return incoming_trace_information or None + + def set_new_propagation_context(self): + # type: () -> None + """ + Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one. + """ + self._propagation_context = PropagationContext() + + def generate_propagation_context(self, incoming_data=None): + # type: (Optional[Dict[str, str]]) -> None + """ + Makes sure the propagation context is set on the scope. + If there is `incoming_data` overwrite existing propagation context. + If there is no `incoming_data` create new propagation context, but do NOT overwrite if already existing. + """ + if incoming_data: + propagation_context = PropagationContext.from_incoming_data(incoming_data) + if propagation_context is not None: + self._propagation_context = propagation_context + + if self._type != ScopeType.CURRENT: + if self._propagation_context is None: + self.set_new_propagation_context() + + def get_dynamic_sampling_context(self): + # type: () -> Optional[Dict[str, str]] + """ + Returns the Dynamic Sampling Context from the baggage or populates one. + """ + baggage = self.get_baggage() + return baggage.dynamic_sampling_context() if baggage else None + + def get_traceparent(self, *args, **kwargs): + # type: (Any, Any) -> Optional[str] + """ + Returns the Sentry "sentry-trace" header (aka the traceparent) from the + currently active span or the scopes Propagation Context. + """ + client = self.get_client() + + # If we have an active span, return traceparent from there + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): + return self.span.to_traceparent() + + # If this scope has a propagation context, return traceparent from there + if self._propagation_context is not None: + return self._propagation_context.to_traceparent() + + # Fall back to isolation scope's traceparent. It always has one + return self.get_isolation_scope().get_traceparent() + + def get_baggage(self, *args, **kwargs): + # type: (Any, Any) -> Optional[Baggage] + """ + Returns the Sentry "baggage" header containing trace information from the + currently active span or the scopes Propagation Context. + If not existing, creates a new one. + """ + client = self.get_client() + + # If we have an active span, return baggage from there + if ( + has_tracing_enabled(client.options) + and self.span is not None + and self.span.is_valid + ): + return self.span.to_baggage() + + # If this scope has a propagation context, return baggage from there + # populate a fresh one if it doesn't exist + if self._propagation_context is not None: + if self._propagation_context.baggage is None: + self._propagation_context.baggage = Baggage.from_options(self) + return self._propagation_context.baggage + + # Fall back to isolation scope's baggage. It always has one + return self.get_isolation_scope().get_baggage() + + def get_trace_context(self): + # type: () -> Any + """ + Returns the Sentry "trace" context from the Propagation Context. + """ + if self._propagation_context is None: + return None + + trace_context = { + "trace_id": self._propagation_context.trace_id, + "span_id": self._propagation_context.span_id, + "parent_span_id": self._propagation_context.parent_span_id, + "dynamic_sampling_context": self.get_dynamic_sampling_context(), + } # type: Dict[str, Any] + + return trace_context + + def trace_propagation_meta(self, *args, **kwargs): + # type: (*Any, **Any) -> str + """ + Return meta tags which should be injected into HTML templates + to allow propagation of trace information. + """ + meta = "" + + sentry_trace = self.get_traceparent() + if sentry_trace is not None: + meta += '' % ( + SENTRY_TRACE_HEADER_NAME, + sentry_trace, + ) + + baggage = self.get_baggage() + if baggage is not None: + meta += '' % ( + BAGGAGE_HEADER_NAME, + baggage.serialize(), + ) + + return meta + + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + """ + Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context. + """ + if self._propagation_context is not None: + traceparent = self.get_traceparent() + if traceparent is not None: + yield SENTRY_TRACE_HEADER_NAME, traceparent + + baggage = self.get_baggage() + if baggage is not None: + yield BAGGAGE_HEADER_NAME, baggage.serialize() + + def iter_trace_propagation_headers(self, *args, **kwargs): + # type: (Any, Any) -> Generator[Tuple[str, str], None, None] + """ + Return HTTP headers which allow propagation of trace data. + + If a span is given, the trace data will taken from the span. + If no span is given, the trace data is taken from the scope. + """ + client = self.get_client() + + span = kwargs.pop("span", None) + span = span or self.span + + if has_tracing_enabled(client.options) and span is not None and span.is_valid: + for header in span.iter_headers(): + yield header + else: + # If this scope has a propagation context, return headers from there + # (it could be that self is not the current scope nor the isolation scope) + if self._propagation_context is not None: + for header in self.iter_headers(): + yield header + else: + # otherwise try headers from current scope + current_scope = self.get_current_scope() + if current_scope._propagation_context is not None: + for header in current_scope.iter_headers(): + yield header + else: + # otherwise fall back to headers from isolation scope + isolation_scope = self.get_isolation_scope() + if isolation_scope._propagation_context is not None: + for header in isolation_scope.iter_headers(): + yield header + + def get_active_propagation_context(self): + # type: () -> Optional[PropagationContext] + if self._propagation_context is not None: + return self._propagation_context + + current_scope = self.get_current_scope() + if current_scope._propagation_context is not None: + return current_scope._propagation_context + + isolation_scope = self.get_isolation_scope() + if isolation_scope._propagation_context is not None: + return isolation_scope._propagation_context + + return None + + def clear(self): + # type: () -> None + """Clears the entire scope.""" + self._level = None # type: Optional[LogLevelStr] + self._fingerprint = None # type: Optional[List[str]] + self._transaction = None # type: Optional[str] + self._transaction_info = {} # type: MutableMapping[str, str] + self._user = None # type: Optional[Dict[str, Any]] + + self._tags = {} # type: Dict[str, Any] + self._contexts = {} # type: Dict[str, Dict[str, Any]] + self._extras = {} # type: MutableMapping[str, Any] + self._attachments = [] # type: List[Attachment] + + self.clear_breadcrumbs() + self._should_capture = True # type: bool + + self._span = None # type: Optional[Span] + self._session = None # type: Optional[Session] + self._force_auto_session_tracking = None # type: Optional[bool] + + self._profile = None # type: Optional[Profile] + + self._propagation_context = None + + # self._last_event_id is only applicable to isolation scopes + self._last_event_id = None # type: Optional[str] + self._flags = None # type: Optional[FlagBuffer] + + def set_level(self, value): + # type: (LogLevelStr) -> None + """ + Sets the level for the scope. + + :param value: The level to set. + """ + self._level = value + + @_attr_setter + def fingerprint(self, value): + # type: (Optional[List[str]]) -> None + """When set this overrides the default fingerprint.""" + self._fingerprint = value + + @property + def root_span(self): + # type: () -> Optional[Span] + """Return the root span in the scope, if any.""" + if self._span is None: + return None + + return self._span.root_span + + def set_transaction_name(self, name, source=None): + # type: (str, Optional[str]) -> None + """Set the transaction name and optionally the transaction source.""" + self._transaction = name + + if self._span and self._span.root_span: + self._span.root_span.name = name + if source: + self._span.root_span.source = source + + if source: + self._transaction_info["source"] = source + + @property + def transaction_name(self): + # type: () -> Optional[str] + return self._transaction + + @property + def transaction_source(self): + # type: () -> Optional[str] + return self._transaction_info.get("source") + + def set_user(self, value): + # type: (Optional[Dict[str, Any]]) -> None + """Sets a user for the scope.""" + self._user = value + session = self.get_isolation_scope()._session + if session is not None: + session.update(user=value) + + @property + def span(self): + # type: () -> Optional[Span] + """Get current tracing span.""" + return self._span + + @span.setter + def span(self, span): + # type: (Optional[Span]) -> None + """Set current tracing span.""" + self._span = span + + @property + def profile(self): + # type: () -> Optional[Profile] + return self._profile + + @profile.setter + def profile(self, profile): + # type: (Optional[Profile]) -> None + + self._profile = profile + + def set_tag(self, key, value): + # type: (str, Any) -> None + """ + Sets a tag for a key to a specific value. + + :param key: Key of the tag to set. + + :param value: Value of the tag to set. + """ + self._tags[key] = value + + def set_tags(self, tags): + # type: (Mapping[str, object]) -> None + """Sets multiple tags at once. + + This method updates multiple tags at once. The tags are passed as a dictionary + or other mapping type. + + Calling this method is equivalent to calling `set_tag` on each key-value pair + in the mapping. If a tag key already exists in the scope, its value will be + updated. If the tag key does not exist in the scope, the key-value pair will + be added to the scope. + + This method only modifies tag keys in the `tags` mapping passed to the method. + `scope.set_tags({})` is, therefore, a no-op. + + :param tags: A mapping of tag keys to tag values to set. + """ + self._tags.update(tags) + + def remove_tag(self, key): + # type: (str) -> None + """ + Removes a specific tag. + + :param key: Key of the tag to remove. + """ + self._tags.pop(key, None) + + def set_context( + self, + key, # type: str + value, # type: Dict[str, Any] + ): + # type: (...) -> None + """ + Binds a context at a certain key to a specific value. + """ + self._contexts[key] = value + + def remove_context( + self, key # type: str + ): + # type: (...) -> None + """Removes a context.""" + self._contexts.pop(key, None) + + def set_extra( + self, + key, # type: str + value, # type: Any + ): + # type: (...) -> None + """Sets an extra key to a specific value.""" + self._extras[key] = value + + def remove_extra( + self, key # type: str + ): + # type: (...) -> None + """Removes a specific extra key.""" + self._extras.pop(key, None) + + def clear_breadcrumbs(self): + # type: () -> None + """Clears breadcrumb buffer.""" + self._breadcrumbs = deque() # type: Deque[Breadcrumb] + self._n_breadcrumbs_truncated = 0 + + def add_attachment( + self, + bytes=None, # type: Union[None, bytes, Callable[[], bytes]] + filename=None, # type: Optional[str] + path=None, # type: Optional[str] + content_type=None, # type: Optional[str] + add_to_transactions=False, # type: bool + ): + # type: (...) -> None + """Adds an attachment to future events sent from this scope. + + The parameters are the same as for the :py:class:`sentry_sdk.attachments.Attachment` constructor. + """ + self._attachments.append( + Attachment( + bytes=bytes, + path=path, + filename=filename, + content_type=content_type, + add_to_transactions=add_to_transactions, + ) + ) + + def add_breadcrumb(self, crumb=None, hint=None, **kwargs): + # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None + """ + Adds a breadcrumb. + + :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects. + + :param hint: An optional value that can be used by `before_breadcrumb` + to customize the breadcrumbs that are emitted. + """ + client = self.get_client() + + if not client.is_active(): + logger.info("Dropped breadcrumb because no client bound") + return + + before_breadcrumb = client.options.get("before_breadcrumb") + max_breadcrumbs = client.options.get("max_breadcrumbs", DEFAULT_MAX_BREADCRUMBS) + + crumb = dict(crumb or ()) # type: Breadcrumb + crumb.update(kwargs) + if not crumb: + return + + hint = dict(hint or ()) # type: Hint + + if crumb.get("timestamp") is None: + crumb["timestamp"] = datetime.now(timezone.utc) + if crumb.get("type") is None: + crumb["type"] = "default" + + if before_breadcrumb is not None: + new_crumb = before_breadcrumb(crumb, hint) + else: + new_crumb = crumb + + if new_crumb is not None: + self._breadcrumbs.append(new_crumb) + else: + logger.info("before breadcrumb dropped breadcrumb (%s)", crumb) + + while len(self._breadcrumbs) > max_breadcrumbs: + self._breadcrumbs.popleft() + self._n_breadcrumbs_truncated += 1 + + def start_transaction(self, **kwargs): + # type: (Any) -> Union[NoOpSpan, Span] + """ + .. deprecated:: 3.0.0 + This function is deprecated and will be removed in a future release. + Use :py:meth:`sentry_sdk.start_span` instead. + """ + warnings.warn( + "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`", + DeprecationWarning, + stacklevel=2, + ) + return NoOpSpan(**kwargs) + + def start_span(self, **kwargs): + # type: (Any) -> Union[NoOpSpan, Span] + """ + Start a span whose parent is the currently active span, if any. + + The return value is a :py:class:`sentry_sdk.tracing.Span` instance, + typically used as a context manager to start and stop timing in a `with` + block. + + For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`. + """ + return NoOpSpan(**kwargs) + + @contextmanager + def continue_trace(self, environ_or_headers): + # type: (Dict[str, Any]) -> Generator[None, None, None] + """ + Sets the propagation context from environment or headers to continue an incoming trace. + """ + self.generate_propagation_context(environ_or_headers) + yield + + def capture_event(self, event, hint=None, scope=None, **scope_kwargs): + # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str] + """ + Captures an event. + + Merges given scope data and calls :py:meth:`sentry_sdk.client._Client.capture_event`. + + :param event: A ready-made event that can be directly sent to Sentry. + + :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object. + + :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :param scope_kwargs: Optional data to apply to event. + For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). + """ + if disable_capture_event.get(False): + return None + + scope = self._merge_scopes(scope, scope_kwargs) + + event_id = self.get_client().capture_event(event=event, hint=hint, scope=scope) + + if event_id is not None and event.get("type") != "transaction": + self.get_isolation_scope()._last_event_id = event_id + + return event_id + + def capture_message(self, message, level=None, scope=None, **scope_kwargs): + # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str] + """ + Captures a message. + + :param message: The string to send as the message. + + :param level: If no level is provided, the default level is `info`. + + :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :param scope_kwargs: Optional data to apply to event. + For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). + """ + if disable_capture_event.get(False): + return None + + if level is None: + level = "info" + + event = { + "message": message, + "level": level, + } # type: Event + + return self.capture_event(event, scope=scope, **scope_kwargs) + + def capture_exception(self, error=None, scope=None, **scope_kwargs): + # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str] + """Captures an exception. + + :param error: An exception to capture. If `None`, `sys.exc_info()` will be used. + + :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :param scope_kwargs: Optional data to apply to event. + For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`. + The `scope` and `scope_kwargs` parameters are mutually exclusive. + + :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`). + """ + if disable_capture_event.get(False): + return None + + if error is not None: + exc_info = exc_info_from_error(error) + else: + exc_info = sys.exc_info() + + event, hint = event_from_exception( + exc_info, client_options=self.get_client().options + ) + + try: + return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs) + except Exception: + capture_internal_exception(sys.exc_info()) + + return None + + def start_session(self, *args, **kwargs): + # type: (*Any, **Any) -> None + """Starts a new session.""" + session_mode = kwargs.pop("session_mode", "application") + + self.end_session() + + client = self.get_client() + self._session = Session( + release=client.options.get("release"), + environment=client.options.get("environment"), + user=self._user, + session_mode=session_mode, + ) + + def end_session(self, *args, **kwargs): + # type: (*Any, **Any) -> None + """Ends the current session if there is one.""" + session = self._session + self._session = None + + if session is not None: + session.close() + self.get_client().capture_session(session) + + def stop_auto_session_tracking(self, *args, **kwargs): + # type: (*Any, **Any) -> None + """Stops automatic session tracking. + + This temporarily session tracking for the current scope when called. + To resume session tracking call `resume_auto_session_tracking`. + """ + self.end_session() + self._force_auto_session_tracking = False + + def resume_auto_session_tracking(self): + # type: (...) -> None + """Resumes automatic session tracking for the current scope if + disabled earlier. This requires that generally automatic session + tracking is enabled. + """ + self._force_auto_session_tracking = None + + def add_event_processor( + self, func # type: EventProcessor + ): + # type: (...) -> None + """Register a scope local event processor on the scope. + + :param func: This function behaves like `before_send.` + """ + if len(self._event_processors) > 20: + logger.warning( + "Too many event processors on scope! Clearing list to free up some memory: %r", + self._event_processors, + ) + del self._event_processors[:] + + self._event_processors.append(func) + + def add_error_processor( + self, + func, # type: ErrorProcessor + cls=None, # type: Optional[Type[BaseException]] + ): + # type: (...) -> None + """Register a scope local error processor on the scope. + + :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument. + + :param cls: Optionally, only process exceptions of this type. + """ + if cls is not None: + cls_ = cls # For mypy. + real_func = func + + def func(event, exc_info): + # type: (Event, ExcInfo) -> Optional[Event] + try: + is_inst = isinstance(exc_info[1], cls_) + except Exception: + is_inst = False + if is_inst: + return real_func(event, exc_info) + return event + + self._error_processors.append(func) + + def _apply_level_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if self._level is not None: + event["level"] = self._level + + def _apply_breadcrumbs_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + event.setdefault("breadcrumbs", {}) + + # This check is just for mypy - + if not isinstance(event["breadcrumbs"], AnnotatedValue): + event["breadcrumbs"].setdefault("values", []) + event["breadcrumbs"]["values"].extend(self._breadcrumbs) + + # Attempt to sort timestamps + try: + if not isinstance(event["breadcrumbs"], AnnotatedValue): + for crumb in event["breadcrumbs"]["values"]: + if isinstance(crumb["timestamp"], str): + crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"]) + + event["breadcrumbs"]["values"].sort( + key=lambda crumb: crumb["timestamp"] + ) + except Exception as err: + logger.debug("Error when sorting breadcrumbs", exc_info=err) + pass + + def _apply_user_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if event.get("user") is None and self._user is not None: + event["user"] = self._user + + def _apply_transaction_name_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if event.get("transaction") is None and self._transaction is not None: + event["transaction"] = self._transaction + + def _apply_transaction_info_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if event.get("transaction_info") is None and self._transaction_info is not None: + event["transaction_info"] = self._transaction_info + + def _apply_fingerprint_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if event.get("fingerprint") is None and self._fingerprint is not None: + event["fingerprint"] = self._fingerprint + + def _apply_extra_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if self._extras: + event.setdefault("extra", {}).update(self._extras) + + def _apply_tags_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if self._tags: + event.setdefault("tags", {}).update(self._tags) + + def _apply_contexts_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + if self._contexts: + event.setdefault("contexts", {}).update(self._contexts) + + contexts = event.setdefault("contexts", {}) + + # Add "trace" context + if contexts.get("trace") is None: + if ( + has_tracing_enabled(options) + and self._span is not None + and self._span.is_valid + ): + contexts["trace"] = self._span.get_trace_context() + else: + contexts["trace"] = self.get_trace_context() + + def _apply_flags_to_event(self, event, hint, options): + # type: (Event, Hint, Optional[Dict[str, Any]]) -> None + flags = self.flags.get() + if len(flags) > 0: + event.setdefault("contexts", {}).setdefault("flags", {}).update( + {"values": flags} + ) + + def _drop(self, cause, ty): + # type: (Any, str) -> Optional[Any] + logger.info("%s (%s) dropped event", ty, cause) + return None + + def run_error_processors(self, event, hint): + # type: (Event, Hint) -> Optional[Event] + """ + Runs the error processors on the event and returns the modified event. + """ + exc_info = hint.get("exc_info") + if exc_info is not None: + error_processors = chain( + self.get_global_scope()._error_processors, + self.get_isolation_scope()._error_processors, + self.get_current_scope()._error_processors, + ) + + for error_processor in error_processors: + new_event = error_processor(event, exc_info) + if new_event is None: + return self._drop(error_processor, "error processor") + + event = new_event + + return event + + def run_event_processors(self, event, hint): + # type: (Event, Hint) -> Optional[Event] + """ + Runs the event processors on the event and returns the modified event. + """ + ty = event.get("type") + is_check_in = ty == "check_in" + + if not is_check_in: + # Get scopes without creating them to prevent infinite recursion + isolation_scope = self._get_isolation_scope() + current_scope = self._get_current_scope() + + event_processors = chain( + global_event_processors, + _global_scope and _global_scope._event_processors or [], + isolation_scope and isolation_scope._event_processors or [], + current_scope and current_scope._event_processors or [], + ) + + for event_processor in event_processors: + new_event = event # type: Optional[Event] + with capture_internal_exceptions(): + new_event = event_processor(event, hint) + if new_event is None: + return self._drop(event_processor, "event processor") + event = new_event + + return event + + @_disable_capture + def apply_to_event( + self, + event, # type: Event + hint, # type: Hint + options=None, # type: Optional[Dict[str, Any]] + ): + # type: (...) -> Optional[Event] + """Applies the information contained on the scope to the given event.""" + ty = event.get("type") + is_transaction = ty == "transaction" + is_check_in = ty == "check_in" + + # put all attachments into the hint. This lets callbacks play around + # with attachments. We also later pull this out of the hint when we + # create the envelope. + attachments_to_send = hint.get("attachments") or [] + for attachment in self._attachments: + if not is_transaction or attachment.add_to_transactions: + attachments_to_send.append(attachment) + hint["attachments"] = attachments_to_send + + self._apply_contexts_to_event(event, hint, options) + + if is_check_in: + # Check-ins only support the trace context, strip all others + event["contexts"] = { + "trace": event.setdefault("contexts", {}).get("trace", {}) + } + + if not is_check_in: + self._apply_level_to_event(event, hint, options) + self._apply_fingerprint_to_event(event, hint, options) + self._apply_user_to_event(event, hint, options) + self._apply_transaction_name_to_event(event, hint, options) + self._apply_transaction_info_to_event(event, hint, options) + self._apply_tags_to_event(event, hint, options) + self._apply_extra_to_event(event, hint, options) + + if not is_transaction and not is_check_in: + self._apply_breadcrumbs_to_event(event, hint, options) + self._apply_flags_to_event(event, hint, options) + + event = self.run_error_processors(event, hint) + if event is None: + return None + + event = self.run_event_processors(event, hint) + if event is None: + return None + + return event + + def update_from_scope(self, scope): + # type: (Scope) -> None + """Update the scope with another scope's data.""" + if scope._level is not None: + self._level = scope._level + if scope._fingerprint is not None: + self._fingerprint = scope._fingerprint + if scope._transaction is not None: + self._transaction = scope._transaction + if scope._transaction_info is not None: + self._transaction_info.update(scope._transaction_info) + if scope._user is not None: + self._user = scope._user + if scope._tags: + self._tags.update(scope._tags) + if scope._contexts: + self._contexts.update(scope._contexts) + if scope._extras: + self._extras.update(scope._extras) + if scope._breadcrumbs: + self._breadcrumbs.extend(scope._breadcrumbs) + if scope._n_breadcrumbs_truncated: + self._n_breadcrumbs_truncated = ( + self._n_breadcrumbs_truncated + scope._n_breadcrumbs_truncated + ) + if scope._span: + self._span = scope._span + if scope._attachments: + self._attachments.extend(scope._attachments) + if scope._profile: + self._profile = scope._profile + if scope._propagation_context: + self._propagation_context = scope._propagation_context + if scope._session: + self._session = scope._session + if scope._flags: + if not self._flags: + self._flags = deepcopy(scope._flags) + else: + for flag in scope._flags.get(): + self._flags.set(flag["flag"], flag["result"]) + + def update_from_kwargs( + self, + user=None, # type: Optional[Any] + level=None, # type: Optional[LogLevelStr] + extras=None, # type: Optional[Dict[str, Any]] + contexts=None, # type: Optional[Dict[str, Dict[str, Any]]] + tags=None, # type: Optional[Dict[str, str]] + fingerprint=None, # type: Optional[List[str]] + ): + # type: (...) -> None + """Update the scope's attributes.""" + if level is not None: + self._level = level + if user is not None: + self._user = user + if extras is not None: + self._extras.update(extras) + if contexts is not None: + self._contexts.update(contexts) + if tags is not None: + self._tags.update(tags) + if fingerprint is not None: + self._fingerprint = fingerprint + + def __repr__(self): + # type: () -> str + return "<%s id=%s name=%s type=%s>" % ( + self.__class__.__name__, + hex(id(self)), + self._name, + self._type, + ) + + @property + def flags(self): + # type: () -> FlagBuffer + if self._flags is None: + max_flags = ( + self.get_client().options["_experiments"].get("max_flags") + or DEFAULT_FLAG_CAPACITY + ) + self._flags = FlagBuffer(capacity=max_flags) + return self._flags + + +@contextmanager +def new_scope(): + # type: () -> Generator[Scope, None, None] + """ + .. versionadded:: 2.0.0 + + Context manager that forks the current scope and runs the wrapped code in it. + After the wrapped code is executed, the original scope is restored. + + Example Usage: + + .. code-block:: python + + import sentry_sdk + + with sentry_sdk.new_scope() as scope: + scope.set_tag("color", "green") + sentry_sdk.capture_message("hello") # will include `color` tag. + + sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. + + """ + # fork current scope + current_scope = Scope.get_current_scope() + new_scope = current_scope.fork() + token = _current_scope.set(new_scope) + + try: + yield new_scope + + finally: + # restore original scope + _current_scope.reset(token) + + +@contextmanager +def use_scope(scope): + # type: (Scope) -> Generator[Scope, None, None] + """ + .. versionadded:: 2.0.0 + + Context manager that uses the given `scope` and runs the wrapped code in it. + After the wrapped code is executed, the original scope is restored. + + Example Usage: + Suppose the variable `scope` contains a `Scope` object, which is not currently + the active scope. + + .. code-block:: python + + import sentry_sdk + + with sentry_sdk.use_scope(scope): + scope.set_tag("color", "green") + sentry_sdk.capture_message("hello") # will include `color` tag. + + sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. + + """ + # set given scope as current scope + token = _current_scope.set(scope) + + try: + yield scope + + finally: + # restore original scope + _current_scope.reset(token) + + +@contextmanager +def isolation_scope(): + # type: () -> Generator[Scope, None, None] + """ + .. versionadded:: 2.0.0 + + Context manager that forks the current isolation scope and runs the wrapped code in it. + The current scope is also forked to not bleed data into the existing current scope. + After the wrapped code is executed, the original scopes are restored. + + Example Usage: + + .. code-block:: python + + import sentry_sdk + + with sentry_sdk.isolation_scope() as scope: + scope.set_tag("color", "green") + sentry_sdk.capture_message("hello") # will include `color` tag. + + sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. + + """ + # fork current scope + current_scope = Scope.get_current_scope() + forked_current_scope = current_scope.fork() + current_token = _current_scope.set(forked_current_scope) + + # fork isolation scope + isolation_scope = Scope.get_isolation_scope() + new_isolation_scope = isolation_scope.fork() + isolation_token = _isolation_scope.set(new_isolation_scope) + + try: + yield new_isolation_scope + + finally: + # restore original scopes + _current_scope.reset(current_token) + _isolation_scope.reset(isolation_token) + + +@contextmanager +def use_isolation_scope(isolation_scope): + # type: (Scope) -> Generator[Scope, None, None] + """ + .. versionadded:: 2.0.0 + + Context manager that uses the given `isolation_scope` and runs the wrapped code in it. + The current scope is also forked to not bleed data into the existing current scope. + After the wrapped code is executed, the original scopes are restored. + + Example Usage: + + .. code-block:: python + + import sentry_sdk + + with sentry_sdk.isolation_scope() as scope: + scope.set_tag("color", "green") + sentry_sdk.capture_message("hello") # will include `color` tag. + + sentry_sdk.capture_message("hello, again") # will NOT include `color` tag. + + """ + # fork current scope + current_scope = Scope.get_current_scope() + forked_current_scope = current_scope.fork() + current_token = _current_scope.set(forked_current_scope) + + # set given scope as isolation scope + isolation_token = _isolation_scope.set(isolation_scope) + + try: + yield isolation_scope + + finally: + # restore original scopes + _current_scope.reset(current_token) + _isolation_scope.reset(isolation_token) + + +def should_send_default_pii(): + # type: () -> bool + """Shortcut for `Scope.get_client().should_send_default_pii()`.""" + return Scope.get_client().should_send_default_pii() + + +# Circular imports +from sentry_sdk_alpha.client import NonRecordingClient + +if TYPE_CHECKING: + import sentry_sdk_alpha.client diff --git a/src/sentry_sdk_alpha/scrubber.py b/src/sentry_sdk_alpha/scrubber.py new file mode 100644 index 00000000000000..77f41ee0a7cd6b --- /dev/null +++ b/src/sentry_sdk_alpha/scrubber.py @@ -0,0 +1,177 @@ +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + AnnotatedValue, + iter_event_frames, +) + +from typing import TYPE_CHECKING, cast, List, Dict + +if TYPE_CHECKING: + from sentry_sdk_alpha._types import Event + from typing import Optional + + +DEFAULT_DENYLIST = [ + # stolen from relay + "password", + "passwd", + "secret", + "api_key", + "apikey", + "auth", + "credentials", + "mysql_pwd", + "privatekey", + "private_key", + "token", + "session", + # django + "csrftoken", + "sessionid", + # wsgi + "x_csrftoken", + "x_forwarded_for", + "set_cookie", + "cookie", + "authorization", + "x_api_key", + # other common names used in the wild + "aiohttp_session", # aiohttp + "connect.sid", # Express + "csrf_token", # Pyramid + "csrf", # (this is a cookie name used in accepted answers on stack overflow) + "_csrf", # Express + "_csrf_token", # Bottle + "PHPSESSID", # PHP + "_session", # Sanic + "symfony", # Symfony + "user_session", # Vue + "_xsrf", # Tornado + "XSRF-TOKEN", # Angular, Laravel +] + +DEFAULT_PII_DENYLIST = [ + "x_forwarded_for", + "x_real_ip", + "ip_address", + "remote_addr", +] + + +class EventScrubber: + def __init__( + self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None + ): + # type: (Optional[List[str]], bool, bool, Optional[List[str]]) -> None + """ + A scrubber that goes through the event payload and removes sensitive data configured through denylists. + + :param denylist: A security denylist that is always scrubbed, defaults to DEFAULT_DENYLIST. + :param recursive: Whether to scrub the event payload recursively, default False. + :param send_default_pii: Whether pii is sending is on, pii fields are not scrubbed. + :param pii_denylist: The denylist to use for scrubbing when pii is not sent, defaults to DEFAULT_PII_DENYLIST. + """ + self.denylist = DEFAULT_DENYLIST.copy() if denylist is None else denylist + + if not send_default_pii: + pii_denylist = ( + DEFAULT_PII_DENYLIST.copy() if pii_denylist is None else pii_denylist + ) + self.denylist += pii_denylist + + self.denylist = [x.lower() for x in self.denylist] + self.recursive = recursive + + def scrub_list(self, lst): + # type: (object) -> None + """ + If a list is passed to this method, the method recursively searches the list and any + nested lists for any dictionaries. The method calls scrub_dict on all dictionaries + it finds. + If the parameter passed to this method is not a list, the method does nothing. + """ + if not isinstance(lst, list): + return + + for v in lst: + self.scrub_dict(v) # no-op unless v is a dict + self.scrub_list(v) # no-op unless v is a list + + def scrub_dict(self, d): + # type: (object) -> None + """ + If a dictionary is passed to this method, the method scrubs the dictionary of any + sensitive data. The method calls itself recursively on any nested dictionaries ( + including dictionaries nested in lists) if self.recursive is True. + This method does nothing if the parameter passed to it is not a dictionary. + """ + if not isinstance(d, dict): + return + + for k, v in d.items(): + # The cast is needed because mypy is not smart enough to figure out that k must be a + # string after the isinstance check. + if isinstance(k, str) and k.lower() in self.denylist: + d[k] = AnnotatedValue.substituted_because_contains_sensitive_data() + elif self.recursive: + self.scrub_dict(v) # no-op unless v is a dict + self.scrub_list(v) # no-op unless v is a list + + def scrub_request(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + if "request" in event: + if "headers" in event["request"]: + self.scrub_dict(event["request"]["headers"]) + if "cookies" in event["request"]: + self.scrub_dict(event["request"]["cookies"]) + if "data" in event["request"]: + self.scrub_dict(event["request"]["data"]) + + def scrub_extra(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + if "extra" in event: + self.scrub_dict(event["extra"]) + + def scrub_user(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + if "user" in event: + self.scrub_dict(event["user"]) + + def scrub_breadcrumbs(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + if "breadcrumbs" in event: + if ( + not isinstance(event["breadcrumbs"], AnnotatedValue) + and "values" in event["breadcrumbs"] + ): + for value in event["breadcrumbs"]["values"]: + if "data" in value: + self.scrub_dict(value["data"]) + + def scrub_frames(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + for frame in iter_event_frames(event): + if "vars" in frame: + self.scrub_dict(frame["vars"]) + + def scrub_spans(self, event): + # type: (Event) -> None + with capture_internal_exceptions(): + if "spans" in event: + for span in cast(List[Dict[str, object]], event["spans"]): + if "data" in span: + self.scrub_dict(span["data"]) + + def scrub_event(self, event): + # type: (Event) -> None + self.scrub_request(event) + self.scrub_extra(event) + self.scrub_user(event) + self.scrub_breadcrumbs(event) + self.scrub_frames(event) + self.scrub_spans(event) diff --git a/src/sentry_sdk_alpha/serializer.py b/src/sentry_sdk_alpha/serializer.py new file mode 100644 index 00000000000000..aaf5dc0a931652 --- /dev/null +++ b/src/sentry_sdk_alpha/serializer.py @@ -0,0 +1,388 @@ +import sys +import math +from collections.abc import Mapping, Sequence, Set +from datetime import datetime + +from sentry_sdk_alpha.utils import ( + AnnotatedValue, + capture_internal_exception, + disable_capture_event, + format_timestamp, + safe_repr, + strip_string, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from types import TracebackType + + from typing import Any + from typing import Callable + from typing import ContextManager + from typing import Dict + from typing import List + from typing import Optional + from typing import Type + from typing import Union + + from sentry_sdk_alpha._types import NotImplementedType + + Span = Dict[str, Any] + + ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]] + Segment = Union[str, int] + + +# Bytes are technically not strings in Python 3, but we can serialize them +serializable_str_types = (str, bytes, bytearray, memoryview) + + +# Maximum length of JSON-serialized event payloads that can be safely sent +# before the server may reject the event due to its size. This is not intended +# to reflect actual values defined server-side, but rather only be an upper +# bound for events sent by the SDK. +# +# Can be overwritten if wanting to send more bytes, e.g. with a custom server. +# When changing this, keep in mind that events may be a little bit larger than +# this value due to attached metadata, so keep the number conservative. +MAX_EVENT_BYTES = 10**6 + +# Maximum depth and breadth of databags. Excess data will be trimmed. If +# max_request_body_size is "always", request bodies won't be trimmed. +MAX_DATABAG_DEPTH = 5 +MAX_DATABAG_BREADTH = 10 +CYCLE_MARKER = "" + + +global_repr_processors = [] # type: List[ReprProcessor] + + +def add_global_repr_processor(processor): + # type: (ReprProcessor) -> None + global_repr_processors.append(processor) + + +class Memo: + __slots__ = ("_ids", "_objs") + + def __init__(self): + # type: () -> None + self._ids = {} # type: Dict[int, Any] + self._objs = [] # type: List[Any] + + def memoize(self, obj): + # type: (Any) -> ContextManager[bool] + self._objs.append(obj) + return self + + def __enter__(self): + # type: () -> bool + obj = self._objs[-1] + if id(obj) in self._ids: + return True + else: + self._ids[id(obj)] = obj + return False + + def __exit__( + self, + ty, # type: Optional[Type[BaseException]] + value, # type: Optional[BaseException] + tb, # type: Optional[TracebackType] + ): + # type: (...) -> None + self._ids.pop(id(self._objs.pop()), None) + + +def serialize(event, **kwargs): + # type: (Dict[str, Any], **Any) -> Dict[str, Any] + """ + A very smart serializer that takes a dict and emits a json-friendly dict. + Currently used for serializing the final Event and also prematurely while fetching the stack + local variables for each frame in a stacktrace. + + It works internally with 'databags' which are arbitrary data structures like Mapping, Sequence and Set. + The algorithm itself is a recursive graph walk down the data structures it encounters. + + It has the following responsibilities: + * Trimming databags and keeping them within MAX_DATABAG_BREADTH and MAX_DATABAG_DEPTH. + * Calling safe_repr() on objects appropriately to keep them informative and readable in the final payload. + * Annotating the payload with the _meta field whenever trimming happens. + + :param max_request_body_size: If set to "always", will never trim request bodies. + :param max_value_length: The max length to strip strings to, defaults to sentry_sdk.consts.DEFAULT_MAX_VALUE_LENGTH + :param is_vars: If we're serializing vars early, we want to repr() things that are JSON-serializable to make their type more apparent. For example, it's useful to see the difference between a unicode-string and a bytestring when viewing a stacktrace. + :param custom_repr: A custom repr function that runs before safe_repr on the object to be serialized. If it returns None or throws internally, we will fallback to safe_repr. + + """ + memo = Memo() + path = [] # type: List[Segment] + meta_stack = [] # type: List[Dict[str, Any]] + + keep_request_bodies = ( + kwargs.pop("max_request_body_size", None) == "always" + ) # type: bool + max_value_length = kwargs.pop("max_value_length", None) # type: Optional[int] + is_vars = kwargs.pop("is_vars", False) + custom_repr = kwargs.pop("custom_repr", None) # type: Callable[..., Optional[str]] + + def _safe_repr_wrapper(value): + # type: (Any) -> str + try: + repr_value = None + if custom_repr is not None: + repr_value = custom_repr(value) + return repr_value or safe_repr(value) + except Exception: + return safe_repr(value) + + def _annotate(**meta): + # type: (**Any) -> None + while len(meta_stack) <= len(path): + try: + segment = path[len(meta_stack) - 1] + node = meta_stack[-1].setdefault(str(segment), {}) + except IndexError: + node = {} + + meta_stack.append(node) + + meta_stack[-1].setdefault("", {}).update(meta) + + def _is_databag(): + # type: () -> Optional[bool] + """ + A databag is any value that we need to trim. + True for stuff like vars, request bodies, breadcrumbs and extra. + + :returns: `True` for "yes", `False` for :"no", `None` for "maybe soon". + """ + try: + if is_vars: + return True + + is_request_body = _is_request_body() + if is_request_body in (True, None): + return is_request_body + + p0 = path[0] + if p0 == "breadcrumbs" and path[1] == "values": + path[2] + return True + + if p0 == "extra": + return True + + except IndexError: + return None + + return False + + def _is_request_body(): + # type: () -> Optional[bool] + try: + if path[0] == "request" and path[1] == "data": + return True + except IndexError: + return None + + return False + + def _serialize_node( + obj, # type: Any + is_databag=None, # type: Optional[bool] + is_request_body=None, # type: Optional[bool] + should_repr_strings=None, # type: Optional[bool] + segment=None, # type: Optional[Segment] + remaining_breadth=None, # type: Optional[Union[int, float]] + remaining_depth=None, # type: Optional[Union[int, float]] + ): + # type: (...) -> Any + if segment is not None: + path.append(segment) + + try: + with memo.memoize(obj) as result: + if result: + return CYCLE_MARKER + + return _serialize_node_impl( + obj, + is_databag=is_databag, + is_request_body=is_request_body, + should_repr_strings=should_repr_strings, + remaining_depth=remaining_depth, + remaining_breadth=remaining_breadth, + ) + except BaseException: + capture_internal_exception(sys.exc_info()) + + if is_databag: + return "" + + return None + finally: + if segment is not None: + path.pop() + del meta_stack[len(path) + 1 :] + + def _flatten_annotated(obj): + # type: (Any) -> Any + if isinstance(obj, AnnotatedValue): + _annotate(**obj.metadata) + obj = obj.value + return obj + + def _serialize_node_impl( + obj, + is_databag, + is_request_body, + should_repr_strings, + remaining_depth, + remaining_breadth, + ): + # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any + if isinstance(obj, AnnotatedValue): + should_repr_strings = False + if should_repr_strings is None: + should_repr_strings = is_vars + + if is_databag is None: + is_databag = _is_databag() + + if is_request_body is None: + is_request_body = _is_request_body() + + if is_databag: + if is_request_body and keep_request_bodies: + remaining_depth = float("inf") + remaining_breadth = float("inf") + else: + if remaining_depth is None: + remaining_depth = MAX_DATABAG_DEPTH + if remaining_breadth is None: + remaining_breadth = MAX_DATABAG_BREADTH + + obj = _flatten_annotated(obj) + + if remaining_depth is not None and remaining_depth <= 0: + _annotate(rem=[["!limit", "x"]]) + if is_databag: + return _flatten_annotated( + strip_string(_safe_repr_wrapper(obj), max_length=max_value_length) + ) + return None + + if is_databag and global_repr_processors: + hints = {"memo": memo, "remaining_depth": remaining_depth} + for processor in global_repr_processors: + result = processor(obj, hints) + if result is not NotImplemented: + return _flatten_annotated(result) + + sentry_repr = getattr(type(obj), "__sentry_repr__", None) + + if obj is None or isinstance(obj, (bool, int, float)): + if should_repr_strings or ( + isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj)) + ): + return _safe_repr_wrapper(obj) + else: + return obj + + elif callable(sentry_repr): + return sentry_repr(obj) + + elif isinstance(obj, datetime): + return ( + str(format_timestamp(obj)) + if not should_repr_strings + else _safe_repr_wrapper(obj) + ) + + elif isinstance(obj, Mapping): + # Create temporary copy here to avoid calling too much code that + # might mutate our dictionary while we're still iterating over it. + obj = dict(obj.items()) + + rv_dict = {} # type: Dict[str, Any] + i = 0 + + for k, v in obj.items(): + if remaining_breadth is not None and i >= remaining_breadth: + _annotate(len=len(obj)) + break + + str_k = str(k) + v = _serialize_node( + v, + segment=str_k, + should_repr_strings=should_repr_strings, + is_databag=is_databag, + is_request_body=is_request_body, + remaining_depth=( + remaining_depth - 1 if remaining_depth is not None else None + ), + remaining_breadth=remaining_breadth, + ) + rv_dict[str_k] = v + i += 1 + + return rv_dict + + elif not isinstance(obj, serializable_str_types) and isinstance( + obj, (Set, Sequence) + ): + rv_list = [] + + for i, v in enumerate(obj): + if remaining_breadth is not None and i >= remaining_breadth: + _annotate(len=len(obj)) + break + + rv_list.append( + _serialize_node( + v, + segment=i, + should_repr_strings=should_repr_strings, + is_databag=is_databag, + is_request_body=is_request_body, + remaining_depth=( + remaining_depth - 1 if remaining_depth is not None else None + ), + remaining_breadth=remaining_breadth, + ) + ) + + return rv_list + + if should_repr_strings: + obj = _safe_repr_wrapper(obj) + else: + if isinstance(obj, bytes) or isinstance(obj, bytearray): + obj = obj.decode("utf-8", "replace") + + if not isinstance(obj, str): + obj = _safe_repr_wrapper(obj) + + is_span_description = ( + len(path) == 3 and path[0] == "spans" and path[-1] == "description" + ) + if is_span_description: + return obj + + return _flatten_annotated(strip_string(obj, max_length=max_value_length)) + + # + # Start of serialize() function + # + disable_capture_event.set(True) + try: + serialized_event = _serialize_node(event, **kwargs) + if not is_vars and meta_stack and isinstance(serialized_event, dict): + serialized_event["_meta"] = meta_stack[0] + + return serialized_event + finally: + disable_capture_event.set(False) diff --git a/src/sentry_sdk_alpha/session.py b/src/sentry_sdk_alpha/session.py new file mode 100644 index 00000000000000..c72a77f4f54c86 --- /dev/null +++ b/src/sentry_sdk_alpha/session.py @@ -0,0 +1,175 @@ +import uuid +from datetime import datetime, timezone + +from sentry_sdk_alpha.utils import format_timestamp + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional + from typing import Union + from typing import Any + from typing import Dict + + from sentry_sdk_alpha._types import SessionStatus + + +def _minute_trunc(ts): + # type: (datetime) -> datetime + return ts.replace(second=0, microsecond=0) + + +def _make_uuid( + val, # type: Union[str, uuid.UUID] +): + # type: (...) -> uuid.UUID + if isinstance(val, uuid.UUID): + return val + return uuid.UUID(val) + + +class Session: + def __init__( + self, + sid=None, # type: Optional[Union[str, uuid.UUID]] + did=None, # type: Optional[str] + timestamp=None, # type: Optional[datetime] + started=None, # type: Optional[datetime] + duration=None, # type: Optional[float] + status=None, # type: Optional[SessionStatus] + release=None, # type: Optional[str] + environment=None, # type: Optional[str] + user_agent=None, # type: Optional[str] + ip_address=None, # type: Optional[str] + errors=None, # type: Optional[int] + user=None, # type: Optional[Any] + session_mode="application", # type: str + ): + # type: (...) -> None + if sid is None: + sid = uuid.uuid4() + if started is None: + started = datetime.now(timezone.utc) + if status is None: + status = "ok" + self.status = status + self.did = None # type: Optional[str] + self.started = started + self.release = None # type: Optional[str] + self.environment = None # type: Optional[str] + self.duration = None # type: Optional[float] + self.user_agent = None # type: Optional[str] + self.ip_address = None # type: Optional[str] + self.session_mode = session_mode # type: str + self.errors = 0 + + self.update( + sid=sid, + did=did, + timestamp=timestamp, + duration=duration, + release=release, + environment=environment, + user_agent=user_agent, + ip_address=ip_address, + errors=errors, + user=user, + ) + + @property + def truncated_started(self): + # type: (...) -> datetime + return _minute_trunc(self.started) + + def update( + self, + sid=None, # type: Optional[Union[str, uuid.UUID]] + did=None, # type: Optional[str] + timestamp=None, # type: Optional[datetime] + started=None, # type: Optional[datetime] + duration=None, # type: Optional[float] + status=None, # type: Optional[SessionStatus] + release=None, # type: Optional[str] + environment=None, # type: Optional[str] + user_agent=None, # type: Optional[str] + ip_address=None, # type: Optional[str] + errors=None, # type: Optional[int] + user=None, # type: Optional[Any] + ): + # type: (...) -> None + # If a user is supplied we pull some data form it + if user: + if ip_address is None: + ip_address = user.get("ip_address") + if did is None: + did = user.get("id") or user.get("email") or user.get("username") + + if sid is not None: + self.sid = _make_uuid(sid) + if did is not None: + self.did = str(did) + if timestamp is None: + timestamp = datetime.now(timezone.utc) + self.timestamp = timestamp + if started is not None: + self.started = started + if duration is not None: + self.duration = duration + if release is not None: + self.release = release + if environment is not None: + self.environment = environment + if ip_address is not None: + self.ip_address = ip_address + if user_agent is not None: + self.user_agent = user_agent + if errors is not None: + self.errors = errors + + if status is not None: + self.status = status + + def close( + self, status=None # type: Optional[SessionStatus] + ): + # type: (...) -> Any + if status is None and self.status == "ok": + status = "exited" + if status is not None: + self.update(status=status) + + def get_json_attrs( + self, with_user_info=True # type: Optional[bool] + ): + # type: (...) -> Any + attrs = {} + if self.release is not None: + attrs["release"] = self.release + if self.environment is not None: + attrs["environment"] = self.environment + if with_user_info: + if self.ip_address is not None: + attrs["ip_address"] = self.ip_address + if self.user_agent is not None: + attrs["user_agent"] = self.user_agent + return attrs + + def to_json(self): + # type: (...) -> Any + rv = { + "sid": str(self.sid), + "init": True, + "started": format_timestamp(self.started), + "timestamp": format_timestamp(self.timestamp), + "status": self.status, + } # type: Dict[str, Any] + if self.errors: + rv["errors"] = self.errors + if self.did is not None: + rv["did"] = self.did + if self.duration is not None: + rv["duration"] = self.duration + attrs = self.get_json_attrs() + if attrs: + rv["attrs"] = attrs + return rv diff --git a/src/sentry_sdk_alpha/sessions.py b/src/sentry_sdk_alpha/sessions.py new file mode 100644 index 00000000000000..a28bb601a3d282 --- /dev/null +++ b/src/sentry_sdk_alpha/sessions.py @@ -0,0 +1,191 @@ +import os +import time +from threading import Thread, Lock +from contextlib import contextmanager + +import sentry_sdk_alpha +from sentry_sdk_alpha.envelope import Envelope +from sentry_sdk_alpha.session import Session +from sentry_sdk_alpha.utils import format_timestamp + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Dict + from typing import Generator + from typing import List + from typing import Optional + + +def _is_auto_session_tracking_enabled(scope): + # type: (sentry_sdk.Scope) -> bool + """ + Utility function to find out if session tracking is enabled. + """ + + should_track = scope._force_auto_session_tracking + if should_track is None: + client_options = sentry_sdk_alpha.get_client().options + should_track = client_options.get("auto_session_tracking", False) + + return should_track + + +@contextmanager +def track_session(scope, session_mode="application"): + # type: (sentry_sdk.Scope, str) -> Generator[None, None, None] + """ + Start a new session in the provided scope, assuming session tracking is enabled. + This is a no-op context manager if session tracking is not enabled. + """ + + should_track = _is_auto_session_tracking_enabled(scope) + if should_track: + scope.start_session(session_mode=session_mode) + try: + yield + finally: + if should_track: + scope.end_session() + + +TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed") +MAX_ENVELOPE_ITEMS = 100 + + +def make_aggregate_envelope(aggregate_states, attrs): + # type: (Any, Any) -> Any + return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())} + + +class SessionFlusher: + def __init__( + self, + capture_func, # type: Callable[[Envelope], None] + flush_interval=60, # type: int + ): + # type: (...) -> None + self.capture_func = capture_func + self.flush_interval = flush_interval + self.pending_sessions = [] # type: List[Any] + self.pending_aggregates = {} # type: Dict[Any, Any] + self._thread = None # type: Optional[Thread] + self._thread_lock = Lock() + self._aggregate_lock = Lock() + self._thread_for_pid = None # type: Optional[int] + self._running = True + + def flush(self): + # type: (...) -> None + pending_sessions = self.pending_sessions + self.pending_sessions = [] + + with self._aggregate_lock: + pending_aggregates = self.pending_aggregates + self.pending_aggregates = {} + + envelope = Envelope() + for session in pending_sessions: + if len(envelope.items) == MAX_ENVELOPE_ITEMS: + self.capture_func(envelope) + envelope = Envelope() + + envelope.add_session(session) + + for attrs, states in pending_aggregates.items(): + if len(envelope.items) == MAX_ENVELOPE_ITEMS: + self.capture_func(envelope) + envelope = Envelope() + + envelope.add_sessions(make_aggregate_envelope(states, attrs)) + + if len(envelope.items) > 0: + self.capture_func(envelope) + + def _ensure_running(self): + # type: (...) -> None + """ + Check that we have an active thread to run in, or create one if not. + + Note that this might fail (e.g. in Python 3.12 it's not possible to + spawn new threads at interpreter shutdown). In that case self._running + will be False after running this function. + """ + if self._thread_for_pid == os.getpid() and self._thread is not None: + return None + with self._thread_lock: + if self._thread_for_pid == os.getpid() and self._thread is not None: + return None + + def _thread(): + # type: (...) -> None + while self._running: + time.sleep(self.flush_interval) + if self._running: + self.flush() + + thread = Thread(target=_thread) + thread.daemon = True + try: + thread.start() + except RuntimeError: + # Unfortunately at this point the interpreter is in a state that no + # longer allows us to spawn a thread and we have to bail. + self._running = False + return None + + self._thread = thread + self._thread_for_pid = os.getpid() + + return None + + def add_aggregate_session( + self, session # type: Session + ): + # type: (...) -> None + # NOTE on `session.did`: + # the protocol can deal with buckets that have a distinct-id, however + # in practice we expect the python SDK to have an extremely high cardinality + # here, effectively making aggregation useless, therefore we do not + # aggregate per-did. + + # For this part we can get away with using the global interpreter lock + with self._aggregate_lock: + attrs = session.get_json_attrs(with_user_info=False) + primary_key = tuple(sorted(attrs.items())) + secondary_key = session.truncated_started # (, session.did) + states = self.pending_aggregates.setdefault(primary_key, {}) + state = states.setdefault(secondary_key, {}) + + if "started" not in state: + state["started"] = format_timestamp(session.truncated_started) + # if session.did is not None: + # state["did"] = session.did + if session.status == "crashed": + state["crashed"] = state.get("crashed", 0) + 1 + elif session.status == "abnormal": + state["abnormal"] = state.get("abnormal", 0) + 1 + elif session.errors > 0: + state["errored"] = state.get("errored", 0) + 1 + else: + state["exited"] = state.get("exited", 0) + 1 + + def add_session( + self, session # type: Session + ): + # type: (...) -> None + if session.session_mode == "request": + self.add_aggregate_session(session) + else: + self.pending_sessions.append(session.to_json()) + self._ensure_running() + + def kill(self): + # type: (...) -> None + self._running = False + + def __del__(self): + # type: (...) -> None + self.kill() diff --git a/src/sentry_sdk_alpha/spotlight.py b/src/sentry_sdk_alpha/spotlight.py new file mode 100644 index 00000000000000..0aaa8127b713a3 --- /dev/null +++ b/src/sentry_sdk_alpha/spotlight.py @@ -0,0 +1,242 @@ +import io +import logging +import os +import urllib.parse +import urllib.request +import urllib.error +import urllib3 +import sys + +from itertools import chain, product + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import Dict + from typing import Optional + from typing import Self + +from sentry_sdk_alpha.utils import ( + logger as sentry_logger, + env_to_bool, + capture_internal_exceptions, +) +from sentry_sdk_alpha.envelope import Envelope + + +logger = logging.getLogger("spotlight") + + +DEFAULT_SPOTLIGHT_URL = "http://localhost:8969/stream" +DJANGO_SPOTLIGHT_MIDDLEWARE_PATH = "sentry_sdk.spotlight.SpotlightMiddleware" + + +class SpotlightClient: + def __init__(self, url): + # type: (str) -> None + self.url = url + self.http = urllib3.PoolManager() + self.fails = 0 + + def capture_envelope(self, envelope): + # type: (Envelope) -> None + body = io.BytesIO() + envelope.serialize_into(body) + try: + req = self.http.request( + url=self.url, + body=body.getvalue(), + method="POST", + headers={ + "Content-Type": "application/x-sentry-envelope", + }, + ) + req.close() + self.fails = 0 + except Exception as e: + if self.fails < 2: + sentry_logger.warning(str(e)) + self.fails += 1 + elif self.fails == 2: + self.fails += 1 + sentry_logger.warning( + "Looks like Spotlight is not running, will keep trying to send events but will not log errors." + ) + # omitting self.fails += 1 in the `else:` case intentionally + # to avoid overflowing the variable if Spotlight never becomes reachable + + +try: + from django.utils.deprecation import MiddlewareMixin + from django.http import HttpResponseServerError, HttpResponse, HttpRequest + from django.conf import settings + + SPOTLIGHT_JS_ENTRY_PATH = "/assets/main.js" + SPOTLIGHT_JS_SNIPPET_PATTERN = ( + "\n" + '\n' + ) + SPOTLIGHT_ERROR_PAGE_SNIPPET = ( + '\n' + '\n' + ) + CHARSET_PREFIX = "charset=" + BODY_TAG_NAME = "body" + BODY_CLOSE_TAG_POSSIBILITIES = tuple( + "".format("".join(chars)) + for chars in product(*zip(BODY_TAG_NAME.upper(), BODY_TAG_NAME.lower())) + ) + + class SpotlightMiddleware(MiddlewareMixin): # type: ignore[misc] + _spotlight_script = None # type: Optional[str] + _spotlight_url = None # type: Optional[str] + + def __init__(self, get_response): + # type: (Self, Callable[..., HttpResponse]) -> None + super().__init__(get_response) + + import sentry_sdk_alpha.api + + self.sentry_sdk = sentry_sdk_alpha.api + + spotlight_client = self.sentry_sdk.get_client().spotlight + if spotlight_client is None: + sentry_logger.warning( + "Cannot find Spotlight client from SpotlightMiddleware, disabling the middleware." + ) + return None + # Spotlight URL has a trailing `/stream` part at the end so split it off + self._spotlight_url = urllib.parse.urljoin(spotlight_client.url, "../") + + @property + def spotlight_script(self): + # type: (Self) -> Optional[str] + if self._spotlight_url is not None and self._spotlight_script is None: + try: + spotlight_js_url = urllib.parse.urljoin( + self._spotlight_url, SPOTLIGHT_JS_ENTRY_PATH + ) + req = urllib.request.Request( + spotlight_js_url, + method="HEAD", + ) + urllib.request.urlopen(req) + self._spotlight_script = SPOTLIGHT_JS_SNIPPET_PATTERN.format( + spotlight_url=self._spotlight_url, + spotlight_js_url=spotlight_js_url, + ) + except urllib.error.URLError as err: + sentry_logger.debug( + "Cannot get Spotlight JS to inject at %s. SpotlightMiddleware will not be very useful.", + spotlight_js_url, + exc_info=err, + ) + + return self._spotlight_script + + def process_response(self, _request, response): + # type: (Self, HttpRequest, HttpResponse) -> Optional[HttpResponse] + content_type_header = tuple( + p.strip() + for p in response.headers.get("Content-Type", "").lower().split(";") + ) + content_type = content_type_header[0] + if len(content_type_header) > 1 and content_type_header[1].startswith( + CHARSET_PREFIX + ): + encoding = content_type_header[1][len(CHARSET_PREFIX) :] + else: + encoding = "utf-8" + + if ( + self.spotlight_script is not None + and not response.streaming + and content_type == "text/html" + ): + content_length = len(response.content) + injection = self.spotlight_script.encode(encoding) + injection_site = next( + ( + idx + for idx in ( + response.content.rfind(body_variant.encode(encoding)) + for body_variant in BODY_CLOSE_TAG_POSSIBILITIES + ) + if idx > -1 + ), + content_length, + ) + + # This approach works even when we don't have a `` tag + response.content = ( + response.content[:injection_site] + + injection + + response.content[injection_site:] + ) + + if response.has_header("Content-Length"): + response.headers["Content-Length"] = content_length + len(injection) + + return response + + def process_exception(self, _request, exception): + # type: (Self, HttpRequest, Exception) -> Optional[HttpResponseServerError] + if not settings.DEBUG or not self._spotlight_url: + return None + + try: + spotlight = ( + urllib.request.urlopen(self._spotlight_url).read().decode("utf-8") + ) + except urllib.error.URLError: + return None + else: + event_id = self.sentry_sdk.capture_exception(exception) + return HttpResponseServerError( + spotlight.replace( + "", + SPOTLIGHT_ERROR_PAGE_SNIPPET.format( + spotlight_url=self._spotlight_url, event_id=event_id + ), + ) + ) + +except ImportError: + settings = None + + +def setup_spotlight(options): + # type: (Dict[str, Any]) -> Optional[SpotlightClient] + _handler = logging.StreamHandler(sys.stderr) + _handler.setFormatter(logging.Formatter(" [spotlight] %(levelname)s: %(message)s")) + logger.addHandler(_handler) + logger.setLevel(logging.INFO) + + url = options.get("spotlight") + + if url is True: + url = DEFAULT_SPOTLIGHT_URL + + if not isinstance(url, str): + return None + + with capture_internal_exceptions(): + if ( + settings is not None + and settings.DEBUG + and env_to_bool(os.environ.get("SENTRY_SPOTLIGHT_ON_ERROR", "1")) + and env_to_bool(os.environ.get("SENTRY_SPOTLIGHT_MIDDLEWARE", "1")) + ): + middleware = settings.MIDDLEWARE + if DJANGO_SPOTLIGHT_MIDDLEWARE_PATH not in middleware: + settings.MIDDLEWARE = type(middleware)( + chain(middleware, (DJANGO_SPOTLIGHT_MIDDLEWARE_PATH,)) + ) + logger.info("Enabled Spotlight integration for Django") + + client = SpotlightClient(url) + logger.info("Enabled Spotlight using sidecar at %s", url) + + return client diff --git a/src/sentry_sdk_alpha/tracing.py b/src/sentry_sdk_alpha/tracing.py new file mode 100644 index 00000000000000..1cf54023e280a7 --- /dev/null +++ b/src/sentry_sdk_alpha/tracing.py @@ -0,0 +1,642 @@ +from datetime import datetime +import json +import warnings + +from opentelemetry import trace as otel_trace, context +from opentelemetry.trace import ( + format_trace_id, + format_span_id, + Span as OtelSpan, + TraceState, + get_current_span, + INVALID_SPAN, +) +from opentelemetry.trace.status import Status, StatusCode +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.version import __version__ as otel_version + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import ( + DEFAULT_SPAN_NAME, + DEFAULT_SPAN_ORIGIN, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, + SPANSTATUS, + SPANDATA, + TransactionSource, +) +from sentry_sdk_alpha.opentelemetry.consts import ( + TRACESTATE_SAMPLE_RATE_KEY, + SentrySpanAttribute, +) +from sentry_sdk_alpha.opentelemetry.utils import ( + baggage_from_trace_state, + convert_from_otel_timestamp, + convert_to_otel_timestamp, + get_trace_context, + get_trace_state, + get_sentry_meta, + serialize_trace_state, +) +from sentry_sdk_alpha.tracing_utils import get_span_status_from_http_code +from sentry_sdk_alpha.utils import ( + _serialize_span_attribute, + get_current_thread_meta, + parse_version, + should_be_treated_as_error, +) + +from typing import TYPE_CHECKING, cast + + +if TYPE_CHECKING: + from collections.abc import Callable + from typing import Any + from typing import Dict + from typing import Iterator + from typing import Optional + from typing import overload + from typing import ParamSpec + from typing import Tuple + from typing import Union + from typing import TypeVar + + P = ParamSpec("P") + R = TypeVar("R") + + from sentry_sdk_alpha._types import ( + SamplingContext, + ) + + from sentry_sdk_alpha.tracing_utils import Baggage + +_FLAGS_CAPACITY = 10 +_OTEL_VERSION = parse_version(otel_version) + +tracer = otel_trace.get_tracer(__name__) + + +class NoOpSpan: + def __init__(self, **kwargs): + # type: (Any) -> None + pass + + def __repr__(self): + # type: () -> str + return "<%s>" % self.__class__.__name__ + + @property + def root_span(self): + # type: () -> Optional[Span] + return None + + def start_child(self, **kwargs): + # type: (**Any) -> NoOpSpan + return NoOpSpan() + + def to_traceparent(self): + # type: () -> str + return "" + + def to_baggage(self): + # type: () -> Optional[Baggage] + return None + + def get_baggage(self): + # type: () -> Optional[Baggage] + return None + + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + return iter(()) + + def set_tag(self, key, value): + # type: (str, Any) -> None + pass + + def set_data(self, key, value): + # type: (str, Any) -> None + pass + + def set_status(self, value): + # type: (str) -> None + pass + + def set_http_status(self, http_status): + # type: (int) -> None + pass + + def is_success(self): + # type: () -> bool + return True + + def to_json(self): + # type: () -> Dict[str, Any] + return {} + + def get_trace_context(self): + # type: () -> Any + return {} + + def get_profile_context(self): + # type: () -> Any + return {} + + def finish( + self, + end_timestamp=None, # type: Optional[Union[float, datetime]] + ): + # type: (...) -> None + pass + + def set_context(self, key, value): + # type: (str, dict[str, Any]) -> None + pass + + def init_span_recorder(self, maxlen): + # type: (int) -> None + pass + + def _set_initial_sampling_decision(self, sampling_context): + # type: (SamplingContext) -> None + pass + + +class Span: + """ + OTel span wrapper providing compatibility with the old span interface. + """ + + def __init__( + self, + *, + op=None, # type: Optional[str] + description=None, # type: Optional[str] + status=None, # type: Optional[str] + sampled=None, # type: Optional[bool] + start_timestamp=None, # type: Optional[Union[datetime, float]] + origin=None, # type: Optional[str] + name=None, # type: Optional[str] + source=TransactionSource.CUSTOM, # type: str + attributes=None, # type: Optional[dict[str, Any]] + only_if_parent=False, # type: bool + parent_span=None, # type: Optional[Span] + otel_span=None, # type: Optional[OtelSpan] + span=None, # type: Optional[Span] + ): + # type: (...) -> None + """ + If otel_span is passed explicitly, just acts as a proxy. + + If span is passed explicitly, use it. The only purpose of this param + if backwards compatibility with start_transaction(transaction=...). + + If only_if_parent is True, just return an INVALID_SPAN + and avoid instrumentation if there's no active parent span. + """ + if otel_span is not None: + self._otel_span = otel_span + elif span is not None: + self._otel_span = span._otel_span + else: + skip_span = False + if only_if_parent and parent_span is None: + parent_span_context = get_current_span().get_span_context() + skip_span = ( + not parent_span_context.is_valid or parent_span_context.is_remote + ) + + if skip_span: + self._otel_span = INVALID_SPAN + else: + + if start_timestamp is not None: + # OTel timestamps have nanosecond precision + start_timestamp = convert_to_otel_timestamp(start_timestamp) + + span_name = name or description or op or DEFAULT_SPAN_NAME + + # Prepopulate some attrs so that they're accessible in traces_sampler + attributes = attributes or {} + if op is not None: + attributes[SentrySpanAttribute.OP] = op + if source is not None: + attributes[SentrySpanAttribute.SOURCE] = source + if description is not None: + attributes[SentrySpanAttribute.DESCRIPTION] = description + if sampled is not None: + attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled + + parent_context = None + if parent_span is not None: + parent_context = otel_trace.set_span_in_context( + parent_span._otel_span + ) + + self._otel_span = tracer.start_span( + span_name, + context=parent_context, + start_time=start_timestamp, + attributes=attributes, + ) + + self.origin = origin or DEFAULT_SPAN_ORIGIN + self.description = description + self.name = span_name + + if status is not None: + self.set_status(status) + + self.update_active_thread() + + def __eq__(self, other): + # type: (object) -> bool + if not isinstance(other, Span): + return False + return self._otel_span == other._otel_span + + def __repr__(self): + # type: () -> str + return ( + "<%s(op=%r, name:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>" + % ( + self.__class__.__name__, + self.op, + self.name, + self.trace_id, + self.span_id, + self.parent_span_id, + self.sampled, + self.origin, + ) + ) + + def __enter__(self): + # type: () -> Span + # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547 + # + # create a Context object with parent set as current span + ctx = otel_trace.set_span_in_context(self._otel_span) + # set as the implicit current context + self._ctx_token = context.attach(ctx) + + # get the new scope that was forked on context.attach + self.scope = sentry_sdk_alpha.get_current_scope() + self.scope.span = self + + return self + + def __exit__(self, ty, value, tb): + # type: (Optional[Any], Optional[Any], Optional[Any]) -> None + if value is not None and should_be_treated_as_error(ty, value): + self.set_status(SPANSTATUS.INTERNAL_ERROR) + else: + status_unset = ( + hasattr(self._otel_span, "status") + and self._otel_span.status.status_code == StatusCode.UNSET + ) + if status_unset: + self.set_status(SPANSTATUS.OK) + + self.finish() + context.detach(self._ctx_token) + del self._ctx_token + + @property + def description(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.DESCRIPTION) + + @description.setter + def description(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.DESCRIPTION, value) + + @property + def origin(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.ORIGIN) + + @origin.setter + def origin(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.ORIGIN, value) + + @property + def root_span(self): + # type: () -> Optional[Span] + root_otel_span = cast( + "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span") + ) + return Span(otel_span=root_otel_span) if root_otel_span else None + + @property + def is_root_span(self): + # type: () -> bool + return self.root_span == self + + @property + def parent_span_id(self): + # type: () -> Optional[str] + if ( + not isinstance(self._otel_span, ReadableSpan) + or self._otel_span.parent is None + ): + return None + return format_span_id(self._otel_span.parent.span_id) + + @property + def trace_id(self): + # type: () -> str + return format_trace_id(self._otel_span.get_span_context().trace_id) + + @property + def span_id(self): + # type: () -> str + return format_span_id(self._otel_span.get_span_context().span_id) + + @property + def is_valid(self): + # type: () -> bool + return self._otel_span.get_span_context().is_valid and isinstance( + self._otel_span, ReadableSpan + ) + + @property + def sampled(self): + # type: () -> Optional[bool] + return self._otel_span.get_span_context().trace_flags.sampled + + @property + def sample_rate(self): + # type: () -> Optional[float] + sample_rate = self._otel_span.get_span_context().trace_state.get( + TRACESTATE_SAMPLE_RATE_KEY + ) + return float(sample_rate) if sample_rate is not None else None + + @property + def op(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.OP) + + @op.setter + def op(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.OP, value) + + @property + def name(self): + # type: () -> Optional[str] + return self.get_attribute(SentrySpanAttribute.NAME) + + @name.setter + def name(self, value): + # type: (Optional[str]) -> None + self.set_attribute(SentrySpanAttribute.NAME, value) + + @property + def source(self): + # type: () -> str + return ( + self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM + ) + + @source.setter + def source(self, value): + # type: (str) -> None + self.set_attribute(SentrySpanAttribute.SOURCE, value) + + @property + def start_timestamp(self): + # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None + + start_time = self._otel_span.start_time + if start_time is None: + return None + + return convert_from_otel_timestamp(start_time) + + @property + def timestamp(self): + # type: () -> Optional[datetime] + if not isinstance(self._otel_span, ReadableSpan): + return None + + end_time = self._otel_span.end_time + if end_time is None: + return None + + return convert_from_otel_timestamp(end_time) + + def start_child(self, **kwargs): + # type: (**Any) -> Span + return Span(parent_span=self, **kwargs) + + def iter_headers(self): + # type: () -> Iterator[Tuple[str, str]] + yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() + yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state) + + def to_traceparent(self): + # type: () -> str + if self.sampled is True: + sampled = "1" + elif self.sampled is False: + sampled = "0" + else: + sampled = None + + traceparent = "%s-%s" % (self.trace_id, self.span_id) + if sampled is not None: + traceparent += "-%s" % (sampled,) + + return traceparent + + @property + def trace_state(self): + # type: () -> TraceState + return get_trace_state(self._otel_span) + + def to_baggage(self): + # type: () -> Baggage + return self.get_baggage() + + def get_baggage(self): + # type: () -> Baggage + return baggage_from_trace_state(self.trace_state) + + def set_tag(self, key, value): + # type: (str, Any) -> None + self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value) + + def set_data(self, key, value): + # type: (str, Any) -> None + warnings.warn( + "`Span.set_data` is deprecated. Please use `Span.set_attribute` instead.", + DeprecationWarning, + stacklevel=2, + ) + + # TODO-neel-potel we cannot add dicts here + self.set_attribute(key, value) + + def get_attribute(self, name): + # type: (str) -> Optional[Any] + if ( + not isinstance(self._otel_span, ReadableSpan) + or not self._otel_span.attributes + ): + return None + return self._otel_span.attributes.get(name) + + def set_attribute(self, key, value): + # type: (str, Any) -> None + # otel doesn't support None as values, preferring to not set the key + # at all instead + if value is None: + return + serialized_value = _serialize_span_attribute(value) + if serialized_value is None: + return + + self._otel_span.set_attribute(key, serialized_value) + + @property + def status(self): + # type: () -> Optional[str] + """ + Return the Sentry `SPANSTATUS` corresponding to the underlying OTel status. + Because differences in possible values in OTel `StatusCode` and + Sentry `SPANSTATUS` it can not be guaranteed that the status + set in `set_status()` will be the same as the one returned here. + """ + if not isinstance(self._otel_span, ReadableSpan): + return None + + if self._otel_span.status.status_code == StatusCode.UNSET: + return None + elif self._otel_span.status.status_code == StatusCode.OK: + return SPANSTATUS.OK + else: + return SPANSTATUS.UNKNOWN_ERROR + + def set_status(self, status): + # type: (str) -> None + if status == SPANSTATUS.OK: + otel_status = StatusCode.OK + otel_description = None + else: + otel_status = StatusCode.ERROR + otel_description = status + + if _OTEL_VERSION is None or _OTEL_VERSION >= (1, 12, 0): + self._otel_span.set_status(otel_status, otel_description) + else: + self._otel_span.set_status(Status(otel_status, otel_description)) + + def set_thread(self, thread_id, thread_name): + # type: (Optional[int], Optional[str]) -> None + if thread_id is not None: + self.set_attribute(SPANDATA.THREAD_ID, str(thread_id)) + + if thread_name is not None: + self.set_attribute(SPANDATA.THREAD_NAME, thread_name) + + def update_active_thread(self): + # type: () -> None + thread_id, thread_name = get_current_thread_meta() + self.set_thread(thread_id, thread_name) + + def set_http_status(self, http_status): + # type: (int) -> None + self.set_attribute(SPANDATA.HTTP_STATUS_CODE, http_status) + self.set_status(get_span_status_from_http_code(http_status)) + + def is_success(self): + # type: () -> bool + return self.status == SPANSTATUS.OK + + def finish(self, end_timestamp=None): + # type: (Optional[Union[float, datetime]]) -> None + if end_timestamp is not None: + self._otel_span.end(convert_to_otel_timestamp(end_timestamp)) + else: + self._otel_span.end() + + def to_json(self): + # type: () -> dict[str, Any] + """ + Only meant for testing. Not used internally anymore. + """ + if not isinstance(self._otel_span, ReadableSpan): + return {} + return json.loads(self._otel_span.to_json()) + + def get_trace_context(self): + # type: () -> dict[str, Any] + if not isinstance(self._otel_span, ReadableSpan): + return {} + + return get_trace_context(self._otel_span) + + def set_context(self, key, value): + # type: (str, Any) -> None + # TODO-neel-potel we cannot add dicts here + + self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value) + + def set_flag(self, flag, value): + # type: (str, bool) -> None + flag_count = self.get_attribute("_flag.count") or 0 + if flag_count < _FLAGS_CAPACITY: + self.set_attribute(f"flag.evaluation.{flag}", value) + self.set_attribute("_flag.count", flag_count + 1) + + +# TODO-neel-potel add deprecation +Transaction = Span + + +if TYPE_CHECKING: + + @overload + def trace(func=None): + # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]] + pass + + @overload + def trace(func): + # type: (Callable[P, R]) -> Callable[P, R] + pass + + +def trace(func=None): + # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]] + """ + Decorator to start a child span under the existing current transaction. + If there is no current transaction, then nothing will be traced. + + .. code-block:: + :caption: Usage + + import sentry_sdk + + @sentry_sdk.trace + def my_function(): + ... + + @sentry_sdk.trace + async def my_async_function(): + ... + """ + from sentry_sdk_alpha.tracing_utils import start_child_span_decorator + + # This patterns allows usage of both @sentry_traced and @sentry_traced(...) + # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278 + if func: + return start_child_span_decorator(func) + else: + return start_child_span_decorator diff --git a/src/sentry_sdk_alpha/tracing_utils.py b/src/sentry_sdk_alpha/tracing_utils.py new file mode 100644 index 00000000000000..6841490be97682 --- /dev/null +++ b/src/sentry_sdk_alpha/tracing_utils.py @@ -0,0 +1,869 @@ +import contextlib +import decimal +import inspect +import os +import re +import sys +import uuid +from collections.abc import Mapping +from datetime import datetime, timedelta, timezone +from decimal import ROUND_DOWN, Decimal, DefaultContext, localcontext +from functools import wraps +from random import Random +from urllib.parse import quote, unquote + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import ( + OP, + SPANDATA, + SPANSTATUS, + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk_alpha.utils import ( + capture_internal_exceptions, + filename_for_module, + Dsn, + logger, + match_regex_list, + qualname_from_function, + to_string, + is_sentry_url, + _is_external_source, + _is_in_project_root, + _module_in_list, +) + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Any + from typing import Dict + from typing import Generator + from typing import Optional + from typing import Union + from types import FrameType + + +SENTRY_TRACE_REGEX = re.compile( + "^[ \t]*" # whitespace + "([0-9a-f]{32})?" # trace_id + "-?([0-9a-f]{16})?" # span_id + "-?([01])?" # sampled + "[ \t]*$" # whitespace +) + + +# This is a normal base64 regex, modified to reflect that fact that we strip the +# trailing = or == off +base64_stripped = ( + # any of the characters in the base64 "alphabet", in multiples of 4 + "([a-zA-Z0-9+/]{4})*" + # either nothing or 2 or 3 base64-alphabet characters (see + # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for + # why there's never only 1 extra character) + "([a-zA-Z0-9+/]{2,3})?" +) + + +class EnvironHeaders(Mapping): # type: ignore + def __init__( + self, + environ, # type: Mapping[str, str] + prefix="HTTP_", # type: str + ): + # type: (...) -> None + self.environ = environ + self.prefix = prefix + + def __getitem__(self, key): + # type: (str) -> Optional[Any] + return self.environ[self.prefix + key.replace("-", "_").upper()] + + def __len__(self): + # type: () -> int + return sum(1 for _ in iter(self)) + + def __iter__(self): + # type: () -> Generator[str, None, None] + for k in self.environ: + if not isinstance(k, str): + continue + + k = k.replace("-", "_").upper() + if not k.startswith(self.prefix): + continue + + yield k[len(self.prefix) :] + + +def has_tracing_enabled(options): + # type: (Optional[Dict[str, Any]]) -> bool + """ + Returns True if either traces_sample_rate or traces_sampler is + defined. + """ + if options is None: + return False + + return bool( + options.get("traces_sample_rate") is not None + or options.get("traces_sampler") is not None + ) + + +@contextlib.contextmanager +def record_sql_queries( + cursor, # type: Any + query, # type: Any + params_list, # type: Any + paramstyle, # type: Optional[str] + executemany, # type: bool + record_cursor_repr=False, # type: bool + span_origin=None, # type: Optional[str] +): + # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None] + + # TODO: Bring back capturing of params by default + if sentry_sdk_alpha.get_client().options["_experiments"].get("record_sql_params", False): + if not params_list or params_list == [None]: + params_list = None + + if paramstyle == "pyformat": + paramstyle = "format" + else: + params_list = None + paramstyle = None + + query = _format_sql(cursor, query) + + data = {} + if params_list is not None: + data["db.params"] = params_list + if paramstyle is not None: + data["db.paramstyle"] = paramstyle + if executemany: + data["db.executemany"] = True + if record_cursor_repr and cursor is not None: + data["db.cursor"] = cursor + + with capture_internal_exceptions(): + sentry_sdk_alpha.add_breadcrumb(message=query, category="query", data=data) + + with sentry_sdk_alpha.start_span( + op=OP.DB, + name=query, + origin=span_origin, + only_if_parent=True, + ) as span: + for k, v in data.items(): + span.set_attribute(k, v) + yield span + + +def _get_frame_module_abs_path(frame): + # type: (FrameType) -> Optional[str] + try: + return frame.f_code.co_filename + except Exception: + return None + + +def _should_be_included( + is_sentry_sdk_frame, # type: bool + namespace, # type: Optional[str] + in_app_include, # type: Optional[list[str]] + in_app_exclude, # type: Optional[list[str]] + abs_path, # type: Optional[str] + project_root, # type: Optional[str] +): + # type: (...) -> bool + # in_app_include takes precedence over in_app_exclude + should_be_included = _module_in_list(namespace, in_app_include) + should_be_excluded = _is_external_source(abs_path) or _module_in_list( + namespace, in_app_exclude + ) + return not is_sentry_sdk_frame and ( + should_be_included + or (_is_in_project_root(abs_path, project_root) and not should_be_excluded) + ) + + +def add_query_source(span): + # type: (sentry_sdk.tracing.Span) -> None + """ + Adds OTel compatible source code information to the span + """ + client = sentry_sdk_alpha.get_client() + if not client.is_active(): + return + + if span.start_timestamp is None: + return + + should_add_query_source = client.options.get("enable_db_query_source", True) + if not should_add_query_source: + return + + # We assume here that the query is just ending now. We can't use + # the actual end timestamp of the span because in OTel the span + # can't be finished in order to set any attributes on it. + duration = datetime.now(tz=timezone.utc) - span.start_timestamp + threshold = client.options.get("db_query_source_threshold_ms", 0) + slow_query = duration / timedelta(milliseconds=1) > threshold + + if not slow_query: + return + + project_root = client.options["project_root"] + in_app_include = client.options.get("in_app_include") + in_app_exclude = client.options.get("in_app_exclude") + + # Find the correct frame + frame = sys._getframe() # type: Union[FrameType, None] + while frame is not None: + abs_path = _get_frame_module_abs_path(frame) + + try: + namespace = frame.f_globals.get("__name__") # type: Optional[str] + except Exception: + namespace = None + + is_sentry_sdk_frame = namespace is not None and namespace.startswith( + "sentry_sdk." + ) + + should_be_included = _should_be_included( + is_sentry_sdk_frame=is_sentry_sdk_frame, + namespace=namespace, + in_app_include=in_app_include, + in_app_exclude=in_app_exclude, + abs_path=abs_path, + project_root=project_root, + ) + if should_be_included: + break + + frame = frame.f_back + else: + frame = None + + # Set the data + if frame is not None: + try: + lineno = frame.f_lineno + except Exception: + lineno = None + if lineno is not None: + span.set_attribute(SPANDATA.CODE_LINENO, frame.f_lineno) + + try: + namespace = frame.f_globals.get("__name__") + except Exception: + namespace = None + if namespace is not None: + span.set_attribute(SPANDATA.CODE_NAMESPACE, namespace) + + filepath = _get_frame_module_abs_path(frame) + if filepath is not None: + if namespace is not None: + in_app_path = filename_for_module(namespace, filepath) + elif project_root is not None and filepath.startswith(project_root): + in_app_path = filepath.replace(project_root, "").lstrip(os.sep) + else: + in_app_path = filepath + span.set_attribute(SPANDATA.CODE_FILEPATH, in_app_path) + + try: + code_function = frame.f_code.co_name + except Exception: + code_function = None + + if code_function is not None: + span.set_attribute(SPANDATA.CODE_FUNCTION, frame.f_code.co_name) + + +def extract_sentrytrace_data(header): + # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]] + """ + Given a `sentry-trace` header string, return a dictionary of data. + """ + if not header: + return None + + if header.startswith("00-") and header.endswith("-00"): + header = header[3:-3] + + match = SENTRY_TRACE_REGEX.match(header) + if not match: + return None + + trace_id, parent_span_id, sampled_str = match.groups() + parent_sampled = None + + if trace_id: + trace_id = "{:032x}".format(int(trace_id, 16)) + if parent_span_id: + parent_span_id = "{:016x}".format(int(parent_span_id, 16)) + if sampled_str: + parent_sampled = sampled_str != "0" + + return { + "trace_id": trace_id, + "parent_span_id": parent_span_id, + "parent_sampled": parent_sampled, + } + + +def _format_sql(cursor, sql): + # type: (Any, str) -> Optional[str] + + real_sql = None + + # If we're using psycopg2, it could be that we're + # looking at a query that uses Composed objects. Use psycopg2's mogrify + # function to format the query. We lose per-parameter trimming but gain + # accuracy in formatting. + try: + if hasattr(cursor, "mogrify"): + real_sql = cursor.mogrify(sql) + if isinstance(real_sql, bytes): + real_sql = real_sql.decode(cursor.connection.encoding) + except Exception: + real_sql = None + + return real_sql or to_string(sql) + + +class PropagationContext: + """ + The PropagationContext represents the data of a trace in Sentry. + """ + + __slots__ = ( + "_trace_id", + "_span_id", + "parent_span_id", + "parent_sampled", + "baggage", + ) + + def __init__( + self, + trace_id=None, # type: Optional[str] + span_id=None, # type: Optional[str] + parent_span_id=None, # type: Optional[str] + parent_sampled=None, # type: Optional[bool] + baggage=None, # type: Optional[Baggage] + ): + # type: (...) -> None + self._trace_id = trace_id + """The trace id of the Sentry trace.""" + + self._span_id = span_id + """The span id of the currently executing span.""" + + self.parent_span_id = parent_span_id + """The id of the parent span that started this span. + The parent span could also be a span in an upstream service.""" + + self.parent_sampled = parent_sampled + """Boolean indicator if the parent span was sampled. + Important when the parent span originated in an upstream service, + because we want to sample the whole trace, or nothing from the trace.""" + + self.baggage = baggage + """Baggage object used for dynamic sampling decisions.""" + + @property + def dynamic_sampling_context(self): + # type: () -> Optional[Dict[str, str]] + return self.baggage.dynamic_sampling_context() if self.baggage else None + + @classmethod + def from_incoming_data(cls, incoming_data): + # type: (Dict[str, Any]) -> Optional[PropagationContext] + propagation_context = None + + normalized_data = normalize_incoming_data(incoming_data) + baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME) + if baggage_header: + propagation_context = PropagationContext() + propagation_context.baggage = Baggage.from_incoming_header(baggage_header) + + sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME) + if sentry_trace_header: + sentrytrace_data = extract_sentrytrace_data(sentry_trace_header) + if sentrytrace_data is not None: + if propagation_context is None: + propagation_context = PropagationContext() + propagation_context.update(sentrytrace_data) + + if propagation_context is not None: + propagation_context._fill_sample_rand() + + return propagation_context + + @property + def trace_id(self): + # type: () -> str + """The trace id of the Sentry trace.""" + if not self._trace_id: + self._trace_id = uuid.uuid4().hex + + return self._trace_id + + @trace_id.setter + def trace_id(self, value): + # type: (str) -> None + self._trace_id = value + + @property + def span_id(self): + # type: () -> str + """The span id of the currently executed span.""" + if not self._span_id: + self._span_id = uuid.uuid4().hex[16:] + + return self._span_id + + @span_id.setter + def span_id(self, value): + # type: (str) -> None + self._span_id = value + + def to_traceparent(self): + # type: () -> str + if self.parent_sampled is True: + sampled = "1" + elif self.parent_sampled is False: + sampled = "0" + else: + sampled = None + + traceparent = "%s-%s" % (self.trace_id, self.span_id) + if sampled is not None: + traceparent += "-%s" % (sampled,) + + return traceparent + + def update(self, other_dict): + # type: (Dict[str, Any]) -> None + """ + Updates the PropagationContext with data from the given dictionary. + """ + for key, value in other_dict.items(): + try: + setattr(self, key, value) + except AttributeError: + pass + + def _fill_sample_rand(self): + # type: () -> None + """ + Ensure that there is a valid sample_rand value in the baggage. + + If there is a valid sample_rand value in the baggage, we keep it. + Otherwise, we generate a sample_rand value according to the following: + + - If we have a parent_sampled value and a sample_rate in the DSC, we compute + a sample_rand value randomly in the range: + - [0, sample_rate) if parent_sampled is True, + - or, in the range [sample_rate, 1) if parent_sampled is False. + + - If either parent_sampled or sample_rate is missing, we generate a random + value in the range [0, 1). + + The sample_rand is deterministically generated from the trace_id, if present. + + This function does nothing if there is no dynamic_sampling_context. + """ + if self.dynamic_sampling_context is None or self.baggage is None: + return + + sentry_baggage = self.baggage.sentry_items + + sample_rand = None + if sentry_baggage.get("sample_rand"): + try: + sample_rand = Decimal(sentry_baggage["sample_rand"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rand to Decimal: {sample_rand}" + ) + + if sample_rand is not None and 0 <= sample_rand < 1: + # sample_rand is present and valid, so don't overwrite it + return + + sample_rate = None + if sentry_baggage.get("sample_rate"): + try: + sample_rate = float(sentry_baggage["sample_rate"]) + except Exception: + logger.debug( + f"Failed to convert incoming sample_rate to float: {sample_rate}" + ) + + lower, upper = _sample_rand_range(self.parent_sampled, sample_rate) + + try: + sample_rand = _generate_sample_rand(self.trace_id, interval=(lower, upper)) + except ValueError: + # ValueError is raised if the interval is invalid, i.e. lower >= upper. + # lower >= upper might happen if the incoming trace's sampled flag + # and sample_rate are inconsistent, e.g. sample_rate=0.0 but sampled=True. + # We cannot generate a sensible sample_rand value in this case. + logger.debug( + f"Could not backfill sample_rand, since parent_sampled={self.parent_sampled} " + f"and sample_rate={sample_rate}." + ) + return + + self.baggage.sentry_items["sample_rand"] = f"{sample_rand:.6f}" # noqa: E231 + + def _sample_rand(self): + # type: () -> Optional[str] + """Convenience method to get the sample_rand value from the baggage.""" + if self.baggage is None: + return None + + return self.baggage.sentry_items.get("sample_rand") + + def __repr__(self): + # type: (...) -> str + return "".format( + self._trace_id, + self._span_id, + self.parent_span_id, + self.parent_sampled, + self.baggage, + self.dynamic_sampling_context, + ) + + +class Baggage: + """ + The W3C Baggage header information (see https://www.w3.org/TR/baggage/). + + Before mutating a `Baggage` object, calling code must check that `mutable` is `True`. + Mutating a `Baggage` object that has `mutable` set to `False` is not allowed, but + it is the caller's responsibility to enforce this restriction. + """ + + __slots__ = ("sentry_items", "third_party_items", "mutable") + + SENTRY_PREFIX = "sentry-" + SENTRY_PREFIX_REGEX = re.compile("^sentry-") + + def __init__( + self, + sentry_items, # type: Dict[str, str] + third_party_items="", # type: str + mutable=True, # type: bool + ): + self.sentry_items = sentry_items + self.third_party_items = third_party_items + self.mutable = mutable + + @classmethod + def from_incoming_header( + cls, + header, # type: Optional[str] + ): + # type: (...) -> Baggage + """ + freeze if incoming header already has sentry baggage + """ + sentry_items = {} + third_party_items = "" + mutable = True + + if header: + for item in header.split(","): + if "=" not in item: + continue + + with capture_internal_exceptions(): + item = item.strip() + key, val = item.split("=") + if Baggage.SENTRY_PREFIX_REGEX.match(key): + baggage_key = unquote(key.split("-")[1]) + sentry_items[baggage_key] = unquote(val) + mutable = False + else: + third_party_items += ("," if third_party_items else "") + item + + return Baggage(sentry_items, third_party_items, mutable) + + @classmethod + def from_options(cls, scope): + # type: (sentry_sdk.scope.Scope) -> Optional[Baggage] + + sentry_items = {} # type: Dict[str, str] + third_party_items = "" + mutable = False + + client = sentry_sdk_alpha.get_client() + + if not client.is_active() or scope._propagation_context is None: + return Baggage(sentry_items) + + options = client.options + propagation_context = scope._propagation_context + + if propagation_context is not None: + sentry_items["trace_id"] = propagation_context.trace_id + + if options.get("environment"): + sentry_items["environment"] = options["environment"] + + if options.get("release"): + sentry_items["release"] = options["release"] + + if options.get("dsn"): + sentry_items["public_key"] = Dsn(options["dsn"]).public_key + + if options.get("traces_sample_rate"): + sentry_items["sample_rate"] = str(options["traces_sample_rate"]) + + return Baggage(sentry_items, third_party_items, mutable) + + def freeze(self): + # type: () -> None + self.mutable = False + + def dynamic_sampling_context(self): + # type: () -> Dict[str, str] + header = {} + + for key, item in self.sentry_items.items(): + header[key] = item + + return header + + def serialize(self, include_third_party=False): + # type: (bool) -> str + items = [] + + for key, val in self.sentry_items.items(): + with capture_internal_exceptions(): + item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val)) + items.append(item) + + if include_third_party: + items.append(self.third_party_items) + + return ",".join(items) + + @staticmethod + def strip_sentry_baggage(header): + # type: (str) -> str + """Remove Sentry baggage from the given header. + + Given a Baggage header, return a new Baggage header with all Sentry baggage items removed. + """ + return ",".join( + ( + item + for item in header.split(",") + if not Baggage.SENTRY_PREFIX_REGEX.match(item.strip()) + ) + ) + + def __repr__(self): + # type: () -> str + return f'' + + +def should_propagate_trace(client, url): + # type: (sentry_sdk.client.BaseClient, str) -> bool + """ + Returns True if url matches trace_propagation_targets configured in the given client. Otherwise, returns False. + """ + trace_propagation_targets = client.options["trace_propagation_targets"] + + if is_sentry_url(client, url): + return False + + return match_regex_list(url, trace_propagation_targets, substring_matching=True) + + +def normalize_incoming_data(incoming_data): + # type: (Dict[str, Any]) -> Dict[str, Any] + """ + Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes. + """ + data = {} + for key, value in incoming_data.items(): + if key.startswith("HTTP_"): + key = key[5:] + + key = key.replace("_", "-").lower() + data[key] = value + + return data + + +def start_child_span_decorator(func): + # type: (Any) -> Any + """ + Decorator to add child spans for functions. + + See also ``sentry_sdk.tracing.trace()``. + """ + # Asynchronous case + if inspect.iscoroutinefunction(func): + + @wraps(func) + async def func_with_tracing(*args, **kwargs): + # type: (*Any, **Any) -> Any + + span = get_current_span() + + if span is None: + logger.debug( + "Cannot create a child span for %s. " + "Please start a Sentry transaction before calling this function.", + qualname_from_function(func), + ) + return await func(*args, **kwargs) + + with span.start_child( + op=OP.FUNCTION, + name=qualname_from_function(func), + ): + return await func(*args, **kwargs) + + try: + func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] + except Exception: + pass + + # Synchronous case + else: + + @wraps(func) + def func_with_tracing(*args, **kwargs): + # type: (*Any, **Any) -> Any + + span = get_current_span() + + if span is None: + logger.debug( + "Cannot create a child span for %s. " + "Please start a Sentry transaction before calling this function.", + qualname_from_function(func), + ) + return func(*args, **kwargs) + + with span.start_child( + op=OP.FUNCTION, + name=qualname_from_function(func), + ): + return func(*args, **kwargs) + + try: + func_with_tracing.__signature__ = inspect.signature(func) # type: ignore[attr-defined] + except Exception: + pass + + return func_with_tracing + + +def get_current_span(scope=None): + # type: (Optional[sentry_sdk.Scope]) -> Optional[sentry_sdk.tracing.Span] + """ + Returns the currently active span if there is one running, otherwise `None` + """ + scope = scope or sentry_sdk_alpha.get_current_scope() + current_span = scope.span + return current_span + + +def _generate_sample_rand( + trace_id, # type: Optional[str] + interval=(0.0, 1.0), # type: tuple[float, float] +): + # type: (...) -> Optional[decimal.Decimal] + """Generate a sample_rand value from a trace ID. + + The generated value will be pseudorandomly chosen from the provided + interval. Specifically, given (lower, upper) = interval, the generated + value will be in the range [lower, upper). The value has 6-digit precision, + so when printing with .6f, the value will never be rounded up. + + The pseudorandom number generator is seeded with the trace ID. + """ + lower, upper = interval + if not lower < upper: # using `if lower >= upper` would handle NaNs incorrectly + raise ValueError("Invalid interval: lower must be less than upper") + + rng = Random(trace_id) + sample_rand = upper + while sample_rand >= upper: + sample_rand = rng.uniform(lower, upper) + + # Round down to exactly six decimal-digit precision. + # Setting the context is needed to avoid an InvalidOperation exception + # in case the user has changed the default precision or set traps. + with localcontext(DefaultContext) as ctx: + ctx.prec = 6 + return Decimal(sample_rand).quantize( + Decimal("0.000001"), + rounding=ROUND_DOWN, + ) + + +def _sample_rand_range(parent_sampled, sample_rate): + # type: (Optional[bool], Optional[float]) -> tuple[float, float] + """ + Compute the lower (inclusive) and upper (exclusive) bounds of the range of values + that a generated sample_rand value must fall into, given the parent_sampled and + sample_rate values. + """ + if parent_sampled is None or sample_rate is None: + return 0.0, 1.0 + elif parent_sampled is True: + return 0.0, sample_rate + else: # parent_sampled is False + return sample_rate, 1.0 + + +def get_span_status_from_http_code(http_status_code): + # type: (int) -> str + """ + Returns the Sentry status corresponding to the given HTTP status code. + + See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context + """ + if http_status_code < 400: + return SPANSTATUS.OK + + elif 400 <= http_status_code < 500: + if http_status_code == 403: + return SPANSTATUS.PERMISSION_DENIED + elif http_status_code == 404: + return SPANSTATUS.NOT_FOUND + elif http_status_code == 429: + return SPANSTATUS.RESOURCE_EXHAUSTED + elif http_status_code == 413: + return SPANSTATUS.FAILED_PRECONDITION + elif http_status_code == 401: + return SPANSTATUS.UNAUTHENTICATED + elif http_status_code == 409: + return SPANSTATUS.ALREADY_EXISTS + else: + return SPANSTATUS.INVALID_ARGUMENT + + elif 500 <= http_status_code < 600: + if http_status_code == 504: + return SPANSTATUS.DEADLINE_EXCEEDED + elif http_status_code == 501: + return SPANSTATUS.UNIMPLEMENTED + elif http_status_code == 503: + return SPANSTATUS.UNAVAILABLE + else: + return SPANSTATUS.INTERNAL_ERROR + + return SPANSTATUS.UNKNOWN_ERROR diff --git a/src/sentry_sdk_alpha/transport.py b/src/sentry_sdk_alpha/transport.py new file mode 100644 index 00000000000000..06d62269e872b3 --- /dev/null +++ b/src/sentry_sdk_alpha/transport.py @@ -0,0 +1,823 @@ +from abc import ABC, abstractmethod +import io +import os +import gzip +import socket +import ssl +import time +from datetime import datetime, timedelta, timezone +from collections import defaultdict +from urllib.request import getproxies + +try: + import brotli # type: ignore +except ImportError: + brotli = None + +import urllib3 +import certifi + +from sentry_sdk_alpha.consts import EndpointType +from sentry_sdk_alpha.utils import Dsn, logger, capture_internal_exceptions +from sentry_sdk_alpha.worker import BackgroundWorker +from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef + +from typing import TYPE_CHECKING, cast, List, Dict + +if TYPE_CHECKING: + from typing import Any + from typing import Callable + from typing import DefaultDict + from typing import Iterable + from typing import Mapping + from typing import Optional + from typing import Self + from typing import Tuple + from typing import Type + from typing import Union + + from urllib3.poolmanager import PoolManager + from urllib3.poolmanager import ProxyManager + + from sentry_sdk_alpha._types import EventDataCategory + +KEEP_ALIVE_SOCKET_OPTIONS = [] +for option in [ + (socket.SOL_SOCKET, lambda: getattr(socket, "SO_KEEPALIVE"), 1), # noqa: B009 + (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPIDLE"), 45), # noqa: B009 + (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPINTVL"), 10), # noqa: B009 + (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPCNT"), 6), # noqa: B009 +]: + try: + KEEP_ALIVE_SOCKET_OPTIONS.append((option[0], option[1](), option[2])) + except AttributeError: + # a specific option might not be available on specific systems, + # e.g. TCP_KEEPIDLE doesn't exist on macOS + pass + + +class Transport(ABC): + """Baseclass for all transports. + + A transport is used to send an event to sentry. + """ + + parsed_dsn = None # type: Optional[Dsn] + + def __init__(self, options=None): + # type: (Self, Optional[Dict[str, Any]]) -> None + self.options = options + if options and options["dsn"] is not None and options["dsn"]: + self.parsed_dsn = Dsn(options["dsn"]) + else: + self.parsed_dsn = None + + @abstractmethod + def capture_envelope(self, envelope): + # type: (Self, Envelope) -> None + """ + Send an envelope to Sentry. + + Envelopes are a data container format that can hold any type of data + submitted to Sentry. We use it to send all event data (including errors, + transactions, crons check-ins, etc.) to Sentry. + """ + pass + + def flush( + self, + timeout, + callback=None, + ): + # type: (Self, float, Optional[Any]) -> None + """ + Wait `timeout` seconds for the current events to be sent out. + + The default implementation is a no-op, since this method may only be relevant to some transports. + Subclasses should override this method if necessary. + """ + return None + + def kill(self): + # type: (Self) -> None + """ + Forcefully kills the transport. + + The default implementation is a no-op, since this method may only be relevant to some transports. + Subclasses should override this method if necessary. + """ + return None + + def record_lost_event( + self, + reason, # type: str + data_category=None, # type: Optional[EventDataCategory] + item=None, # type: Optional[Item] + *, + quantity=1, # type: int + ): + # type: (...) -> None + """This increments a counter for event loss by reason and + data category by the given positive-int quantity (default 1). + + If an item is provided, the data category and quantity are + extracted from the item, and the values passed for + data_category and quantity are ignored. + + When recording a lost transaction via data_category="transaction", + the calling code should also record the lost spans via this method. + When recording lost spans, `quantity` should be set to the number + of contained spans, plus one for the transaction itself. When + passing an Item containing a transaction via the `item` parameter, + this method automatically records the lost spans. + """ + return None + + def is_healthy(self): + # type: (Self) -> bool + return True + + def __del__(self): + # type: (Self) -> None + try: + self.kill() + except Exception: + pass + + +def _parse_rate_limits(header, now=None): + # type: (str, Optional[datetime]) -> Iterable[Tuple[Optional[EventDataCategory], datetime]] + if now is None: + now = datetime.now(timezone.utc) + + for limit in header.split(","): + try: + parameters = limit.strip().split(":") + retry_after_val, categories = parameters[:2] + + retry_after = now + timedelta(seconds=int(retry_after_val)) + for category in categories and categories.split(";") or (None,): + category = cast("Optional[EventDataCategory]", category) + yield category, retry_after + except (LookupError, ValueError): + continue + + +class BaseHttpTransport(Transport): + """The base HTTP transport.""" + + TIMEOUT = 30 # seconds + + def __init__(self, options): + # type: (Self, Dict[str, Any]) -> None + from sentry_sdk_alpha.consts import VERSION + + Transport.__init__(self, options) + assert self.parsed_dsn is not None + self.options = options # type: Dict[str, Any] + self._worker = BackgroundWorker(queue_size=options["transport_queue_size"]) + self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION) + self._disabled_until = {} # type: Dict[Optional[EventDataCategory], datetime] + # We only use this Retry() class for the `get_retry_after` method it exposes + self._retry = urllib3.util.Retry() + self._discarded_events = defaultdict( + int + ) # type: DefaultDict[Tuple[EventDataCategory, str], int] + self._last_client_report_sent = time.time() + + self._pool = self._make_pool() + + experiments = options.get("_experiments", {}) + compression_level = experiments.get( + "transport_compression_level", + experiments.get("transport_zlib_compression_level"), + ) + compression_algo = experiments.get( + "transport_compression_algo", + ( + "gzip" + # if only compression level is set, assume gzip for backwards compatibility + # if we don't have brotli available, fallback to gzip + if compression_level is not None or brotli is None + else "br" + ), + ) + + if compression_algo == "br" and brotli is None: + logger.warning( + "You asked for brotli compression without the Brotli module, falling back to gzip -9" + ) + compression_algo = "gzip" + compression_level = None + + if compression_algo not in ("br", "gzip"): + logger.warning( + "Unknown compression algo %s, disabling compression", compression_algo + ) + self._compression_level = 0 + self._compression_algo = None + else: + self._compression_algo = compression_algo + + if compression_level is not None: + self._compression_level = compression_level + elif self._compression_algo == "gzip": + self._compression_level = 9 + elif self._compression_algo == "br": + self._compression_level = 4 + + def record_lost_event( + self, + reason, # type: str + data_category=None, # type: Optional[EventDataCategory] + item=None, # type: Optional[Item] + *, + quantity=1, # type: int + ): + # type: (...) -> None + if not self.options["send_client_reports"]: + return + + if item is not None: + data_category = item.data_category + quantity = 1 # If an item is provided, we always count it as 1 (except for attachments, handled below). + + if data_category == "transaction": + # Also record the lost spans + event = item.get_transaction_event() or {} + + # +1 for the transaction itself + span_count = ( + len(cast(List[Dict[str, object]], event.get("spans") or [])) + 1 + ) + self.record_lost_event(reason, "span", quantity=span_count) + + elif data_category == "attachment": + # quantity of 0 is actually 1 as we do not want to count + # empty attachments as actually empty. + quantity = len(item.get_bytes()) or 1 + + elif data_category is None: + raise TypeError("data category not provided") + + self._discarded_events[data_category, reason] += quantity + + def _get_header_value(self, response, header): + # type: (Self, Any, str) -> Optional[str] + return response.headers.get(header) + + def _update_rate_limits(self, response): + # type: (Self, Union[urllib3.BaseHTTPResponse, httpcore.Response]) -> None + + # new sentries with more rate limit insights. We honor this header + # no matter of the status code to update our internal rate limits. + header = self._get_header_value(response, "x-sentry-rate-limits") + if header: + logger.warning("Rate-limited via x-sentry-rate-limits") + self._disabled_until.update(_parse_rate_limits(header)) + + # old sentries only communicate global rate limit hits via the + # retry-after header on 429. This header can also be emitted on new + # sentries if a proxy in front wants to globally slow things down. + elif response.status == 429: + logger.warning("Rate-limited via 429") + retry_after_value = self._get_header_value(response, "Retry-After") + retry_after = ( + self._retry.parse_retry_after(retry_after_value) + if retry_after_value is not None + else None + ) or 60 + self._disabled_until[None] = datetime.now(timezone.utc) + timedelta( + seconds=retry_after + ) + + def _send_request( + self, + body, + headers, + endpoint_type=EndpointType.ENVELOPE, + envelope=None, + ): + # type: (Self, bytes, Dict[str, str], EndpointType, Optional[Envelope]) -> None + + def record_loss(reason): + # type: (str) -> None + if envelope is None: + self.record_lost_event(reason, data_category="error") + else: + for item in envelope.items: + self.record_lost_event(reason, item=item) + + headers.update( + { + "User-Agent": str(self._auth.client), + "X-Sentry-Auth": str(self._auth.to_header()), + } + ) + try: + response = self._request( + "POST", + endpoint_type, + body, + headers, + ) + except Exception: + self.on_dropped_event("network") + record_loss("network_error") + raise + + try: + self._update_rate_limits(response) + + if response.status == 429: + # if we hit a 429. Something was rate limited but we already + # acted on this in `self._update_rate_limits`. Note that we + # do not want to record event loss here as we will have recorded + # an outcome in relay already. + self.on_dropped_event("status_429") + pass + + elif response.status >= 300 or response.status < 200: + logger.error( + "Unexpected status code: %s (body: %s)", + response.status, + getattr(response, "data", getattr(response, "content", None)), + ) + self.on_dropped_event("status_{}".format(response.status)) + record_loss("network_error") + finally: + response.close() + + def on_dropped_event(self, _reason): + # type: (Self, str) -> None + return None + + def _fetch_pending_client_report(self, force=False, interval=60): + # type: (Self, bool, int) -> Optional[Item] + if not self.options["send_client_reports"]: + return None + + if not (force or self._last_client_report_sent < time.time() - interval): + return None + + discarded_events = self._discarded_events + self._discarded_events = defaultdict(int) + self._last_client_report_sent = time.time() + + if not discarded_events: + return None + + return Item( + PayloadRef( + json={ + "timestamp": time.time(), + "discarded_events": [ + {"reason": reason, "category": category, "quantity": quantity} + for ( + (category, reason), + quantity, + ) in discarded_events.items() + ], + } + ), + type="client_report", + ) + + def _flush_client_reports(self, force=False): + # type: (Self, bool) -> None + client_report = self._fetch_pending_client_report(force=force, interval=60) + if client_report is not None: + self.capture_envelope(Envelope(items=[client_report])) + + def _check_disabled(self, category): + # type: (str) -> bool + def _disabled(bucket): + # type: (Any) -> bool + ts = self._disabled_until.get(bucket) + return ts is not None and ts > datetime.now(timezone.utc) + + return _disabled(category) or _disabled(None) + + def _is_rate_limited(self): + # type: (Self) -> bool + return any( + ts > datetime.now(timezone.utc) for ts in self._disabled_until.values() + ) + + def _is_worker_full(self): + # type: (Self) -> bool + return self._worker.full() + + def is_healthy(self): + # type: (Self) -> bool + return not (self._is_worker_full() or self._is_rate_limited()) + + def _send_envelope(self, envelope): + # type: (Self, Envelope) -> None + + # remove all items from the envelope which are over quota + new_items = [] + for item in envelope.items: + if self._check_disabled(item.data_category): + if item.data_category in ("transaction", "error", "default"): + self.on_dropped_event("self_rate_limits") + self.record_lost_event("ratelimit_backoff", item=item) + else: + new_items.append(item) + + # Since we're modifying the envelope here make a copy so that others + # that hold references do not see their envelope modified. + envelope = Envelope(headers=envelope.headers, items=new_items) + + if not envelope.items: + return None + + # since we're already in the business of sending out an envelope here + # check if we have one pending for the stats session envelopes so we + # can attach it to this enveloped scheduled for sending. This will + # currently typically attach the client report to the most recent + # session update. + client_report_item = self._fetch_pending_client_report(interval=30) + if client_report_item is not None: + envelope.items.append(client_report_item) + + content_encoding, body = self._serialize_envelope(envelope) + + assert self.parsed_dsn is not None + logger.debug( + "Sending envelope [%s] project:%s host:%s", + envelope.description, + self.parsed_dsn.project_id, + self.parsed_dsn.host, + ) + + headers = { + "Content-Type": "application/x-sentry-envelope", + } + if content_encoding: + headers["Content-Encoding"] = content_encoding + + self._send_request( + body.getvalue(), + headers=headers, + endpoint_type=EndpointType.ENVELOPE, + envelope=envelope, + ) + return None + + def _serialize_envelope(self, envelope): + # type: (Self, Envelope) -> tuple[Optional[str], io.BytesIO] + content_encoding = None + body = io.BytesIO() + if self._compression_level == 0 or self._compression_algo is None: + envelope.serialize_into(body) + else: + content_encoding = self._compression_algo + if self._compression_algo == "br" and brotli is not None: + body.write( + brotli.compress( + envelope.serialize(), quality=self._compression_level + ) + ) + else: # assume gzip as we sanitize the algo value in init + with gzip.GzipFile( + fileobj=body, mode="w", compresslevel=self._compression_level + ) as f: + envelope.serialize_into(f) + + return content_encoding, body + + def _get_pool_options(self): + # type: (Self) -> Dict[str, Any] + raise NotImplementedError() + + def _in_no_proxy(self, parsed_dsn): + # type: (Self, Dsn) -> bool + no_proxy = getproxies().get("no") + if not no_proxy: + return False + for host in no_proxy.split(","): + host = host.strip() + if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host): + return True + return False + + def _make_pool(self): + # type: (Self) -> Union[PoolManager, ProxyManager, httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] + raise NotImplementedError() + + def _request( + self, + method, + endpoint_type, + body, + headers, + ): + # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> Union[urllib3.BaseHTTPResponse, httpcore.Response] + raise NotImplementedError() + + def capture_envelope( + self, envelope # type: Envelope + ): + # type: (...) -> None + def send_envelope_wrapper(): + # type: () -> None + with capture_internal_exceptions(): + self._send_envelope(envelope) + self._flush_client_reports() + + if not self._worker.submit(send_envelope_wrapper): + self.on_dropped_event("full_queue") + for item in envelope.items: + self.record_lost_event("queue_overflow", item=item) + + def flush( + self, + timeout, + callback=None, + ): + # type: (Self, float, Optional[Callable[[int, float], None]]) -> None + logger.debug("Flushing HTTP transport") + + if timeout > 0: + self._worker.submit(lambda: self._flush_client_reports(force=True)) + self._worker.flush(timeout, callback) + + def kill(self): + # type: (Self) -> None + logger.debug("Killing HTTP transport") + self._worker.kill() + + +class HttpTransport(BaseHttpTransport): + if TYPE_CHECKING: + _pool: Union[PoolManager, ProxyManager] + + def _get_pool_options(self): + # type: (Self) -> Dict[str, Any] + + num_pools = self.options.get("_experiments", {}).get("transport_num_pools") + options = { + "num_pools": 2 if num_pools is None else int(num_pools), + "cert_reqs": "CERT_REQUIRED", + "timeout": urllib3.Timeout(total=self.TIMEOUT), + } + + socket_options = None # type: Optional[List[Tuple[int, int, int | bytes]]] + + if self.options["socket_options"] is not None: + socket_options = self.options["socket_options"] + + if self.options["keep_alive"]: + if socket_options is None: + socket_options = [] + + used_options = {(o[0], o[1]) for o in socket_options} + for default_option in KEEP_ALIVE_SOCKET_OPTIONS: + if (default_option[0], default_option[1]) not in used_options: + socket_options.append(default_option) + + if socket_options is not None: + options["socket_options"] = socket_options + + options["ca_certs"] = ( + self.options["ca_certs"] # User-provided bundle from the SDK init + or os.environ.get("SSL_CERT_FILE") + or os.environ.get("REQUESTS_CA_BUNDLE") + or certifi.where() + ) + + options["cert_file"] = self.options["cert_file"] or os.environ.get( + "CLIENT_CERT_FILE" + ) + options["key_file"] = self.options["key_file"] or os.environ.get( + "CLIENT_KEY_FILE" + ) + + return options + + def _make_pool(self): + # type: (Self) -> Union[PoolManager, ProxyManager] + if self.parsed_dsn is None: + raise ValueError("Cannot create HTTP-based transport without valid DSN") + + proxy = None + no_proxy = self._in_no_proxy(self.parsed_dsn) + + # try HTTPS first + https_proxy = self.options["https_proxy"] + if self.parsed_dsn.scheme == "https" and (https_proxy != ""): + proxy = https_proxy or (not no_proxy and getproxies().get("https")) + + # maybe fallback to HTTP proxy + http_proxy = self.options["http_proxy"] + if not proxy and (http_proxy != ""): + proxy = http_proxy or (not no_proxy and getproxies().get("http")) + + opts = self._get_pool_options() + + if proxy: + proxy_headers = self.options["proxy_headers"] + if proxy_headers: + opts["proxy_headers"] = proxy_headers + + if proxy.startswith("socks"): + use_socks_proxy = True + try: + # Check if PySocks dependency is available + from urllib3.contrib.socks import SOCKSProxyManager + except ImportError: + use_socks_proxy = False + logger.warning( + "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.", + proxy, + ) + + if use_socks_proxy: + return SOCKSProxyManager(proxy, **opts) + else: + return urllib3.PoolManager(**opts) + else: + return urllib3.ProxyManager(proxy, **opts) + else: + return urllib3.PoolManager(**opts) + + def _request( + self, + method, + endpoint_type, + body, + headers, + ): + # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> urllib3.BaseHTTPResponse + return self._pool.request( + method, + self._auth.get_api_url(endpoint_type), + body=body, + headers=headers, + ) + + +try: + import httpcore + import h2 # noqa: F401 +except ImportError: + # Sorry, no Http2Transport for you + class Http2Transport(HttpTransport): + def __init__(self, options): + # type: (Self, Dict[str, Any]) -> None + super().__init__(options) + logger.warning( + "You tried to use HTTP2Transport but don't have httpcore[http2] installed. Falling back to HTTPTransport." + ) + +else: + + class Http2Transport(BaseHttpTransport): # type: ignore + """The HTTP2 transport based on httpcore.""" + + TIMEOUT = 15 + + if TYPE_CHECKING: + _pool: Union[ + httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool + ] + + def _get_header_value(self, response, header): + # type: (Self, httpcore.Response, str) -> Optional[str] + return next( + ( + val.decode("ascii") + for key, val in response.headers + if key.decode("ascii").lower() == header + ), + None, + ) + + def _request( + self, + method, + endpoint_type, + body, + headers, + ): + # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> httpcore.Response + response = self._pool.request( + method, + self._auth.get_api_url(endpoint_type), + content=body, + headers=headers, # type: ignore + extensions={ + "timeout": { + "pool": self.TIMEOUT, + "connect": self.TIMEOUT, + "write": self.TIMEOUT, + "read": self.TIMEOUT, + } + }, + ) + return response + + def _get_pool_options(self): + # type: (Self) -> Dict[str, Any] + options = { + "http2": self.parsed_dsn is not None + and self.parsed_dsn.scheme == "https", + "retries": 3, + } # type: Dict[str, Any] + + socket_options = ( + self.options["socket_options"] + if self.options["socket_options"] is not None + else [] + ) + + used_options = {(o[0], o[1]) for o in socket_options} + for default_option in KEEP_ALIVE_SOCKET_OPTIONS: + if (default_option[0], default_option[1]) not in used_options: + socket_options.append(default_option) + + options["socket_options"] = socket_options + + ssl_context = ssl.create_default_context() + ssl_context.load_verify_locations( + self.options["ca_certs"] # User-provided bundle from the SDK init + or os.environ.get("SSL_CERT_FILE") + or os.environ.get("REQUESTS_CA_BUNDLE") + or certifi.where() + ) + cert_file = self.options["cert_file"] or os.environ.get("CLIENT_CERT_FILE") + key_file = self.options["key_file"] or os.environ.get("CLIENT_KEY_FILE") + if cert_file is not None: + ssl_context.load_cert_chain(cert_file, key_file) + + options["ssl_context"] = ssl_context + + return options + + def _make_pool(self): + # type: (Self) -> Union[httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool] + if self.parsed_dsn is None: + raise ValueError("Cannot create HTTP-based transport without valid DSN") + proxy = None + no_proxy = self._in_no_proxy(self.parsed_dsn) + + # try HTTPS first + https_proxy = self.options["https_proxy"] + if self.parsed_dsn.scheme == "https" and (https_proxy != ""): + proxy = https_proxy or (not no_proxy and getproxies().get("https")) + + # maybe fallback to HTTP proxy + http_proxy = self.options["http_proxy"] + if not proxy and (http_proxy != ""): + proxy = http_proxy or (not no_proxy and getproxies().get("http")) + + opts = self._get_pool_options() + + if proxy: + proxy_headers = self.options["proxy_headers"] + if proxy_headers: + opts["proxy_headers"] = proxy_headers + + if proxy.startswith("socks"): + try: + if "socket_options" in opts: + socket_options = opts.pop("socket_options") + if socket_options: + logger.warning( + "You have defined socket_options but using a SOCKS proxy which doesn't support these. We'll ignore socket_options." + ) + return httpcore.SOCKSProxy(proxy_url=proxy, **opts) + except RuntimeError: + logger.warning( + "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support.", + proxy, + ) + else: + return httpcore.HTTPProxy(proxy_url=proxy, **opts) + + return httpcore.ConnectionPool(**opts) + + +def make_transport(options): + # type: (Dict[str, Any]) -> Optional[Transport] + ref_transport = options["transport"] + + use_http2_transport = options.get("_experiments", {}).get("transport_http2", False) + + # By default, we use the http transport class + transport_cls = ( + Http2Transport if use_http2_transport else HttpTransport + ) # type: Type[Transport] + + if isinstance(ref_transport, Transport): + return ref_transport + elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport): + transport_cls = ref_transport + + # if a transport class is given only instantiate it if the dsn is not + # empty or None + if options["dsn"]: + return transport_cls(options) + + return None diff --git a/src/sentry_sdk_alpha/types.py b/src/sentry_sdk_alpha/types.py new file mode 100644 index 00000000000000..7f3eb912c885ea --- /dev/null +++ b/src/sentry_sdk_alpha/types.py @@ -0,0 +1,49 @@ +""" +This module contains type definitions for the Sentry SDK's public API. +The types are re-exported from the internal module `sentry_sdk._types`. + +Disclaimer: Since types are a form of documentation, type definitions +may change in minor releases. Removing a type would be considered a +breaking change, and so we will only remove type definitions in major +releases. +""" + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + # Re-export types to make them available in the public API + from sentry_sdk_alpha._types import ( + Breadcrumb, + BreadcrumbHint, + Event, + EventDataCategory, + Hint, + Log, + MonitorConfig, + SamplingContext, + ) +else: + from typing import Any + + # The lines below allow the types to be imported from outside `if TYPE_CHECKING` + # guards. The types in this module are only intended to be used for type hints. + Breadcrumb = Any + BreadcrumbHint = Any + Event = Any + EventDataCategory = Any + Hint = Any + Log = Any + MonitorConfig = Any + SamplingContext = Any + + +__all__ = ( + "Breadcrumb", + "BreadcrumbHint", + "Event", + "EventDataCategory", + "Hint", + "Log", + "MonitorConfig", + "SamplingContext", +) diff --git a/src/sentry_sdk_alpha/utils.py b/src/sentry_sdk_alpha/utils.py new file mode 100644 index 00000000000000..7b3e13b7b6f0ea --- /dev/null +++ b/src/sentry_sdk_alpha/utils.py @@ -0,0 +1,1952 @@ +import base64 +import json +import linecache +import logging +import math +import os +import random +import re +import subprocess +import sys +import threading +import time +from collections import namedtuple +from datetime import datetime, timezone +from decimal import Decimal +from functools import partial, partialmethod, wraps +from numbers import Real +from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit + +try: + # Python 3.11 + from builtins import BaseExceptionGroup +except ImportError: + # Python 3.10 and below + BaseExceptionGroup = None # type: ignore + +import sentry_sdk_alpha +from sentry_sdk_alpha.consts import ( + DEFAULT_ADD_FULL_STACK, + DEFAULT_MAX_STACK_FRAMES, + DEFAULT_MAX_VALUE_LENGTH, + SPANDATA, + EndpointType, +) +from sentry_sdk_alpha._types import Annotated, AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from types import FrameType, TracebackType + from typing import ( + Any, + Callable, + cast, + ContextManager, + Dict, + Iterator, + List, + NoReturn, + Optional, + overload, + ParamSpec, + Set, + Tuple, + Type, + TypeVar, + Union, + ) + + from gevent.hub import Hub as GeventHub + from opentelemetry.util.types import AttributeValue + + from sentry_sdk_alpha._types import Event, ExcInfo + + P = ParamSpec("P") + R = TypeVar("R") + + +epoch = datetime(1970, 1, 1) + +# The logger is created here but initialized in the debug support module +logger = logging.getLogger("sentry_sdk.errors") + +_installed_modules = None + +BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$") + +FALSY_ENV_VALUES = frozenset(("false", "f", "n", "no", "off", "0")) +TRUTHY_ENV_VALUES = frozenset(("true", "t", "y", "yes", "on", "1")) + +MAX_STACK_FRAMES = 2000 +"""Maximum number of stack frames to send to Sentry. + +If we have more than this number of stack frames, we will stop processing +the stacktrace to avoid getting stuck in a long-lasting loop. This value +exceeds the default sys.getrecursionlimit() of 1000, so users will only +be affected by this limit if they have a custom recursion limit. +""" + +MAX_EXCEPTIONS = 25 +"""Maximum number of exceptions in a chain or group to send to Sentry. + +This is a sanity limit to avoid ending in an infinite loop of exceptions when the same exception is in the root and a leave +of the exception tree. +""" + + +def env_to_bool(value, *, strict=False): + # type: (Any, Optional[bool]) -> bool | None + """Casts an ENV variable value to boolean using the constants defined above. + In strict mode, it may return None if the value doesn't match any of the predefined values. + """ + normalized = str(value).lower() if value is not None else None + + if normalized in FALSY_ENV_VALUES: + return False + + if normalized in TRUTHY_ENV_VALUES: + return True + + return None if strict else bool(value) + + +def json_dumps(data): + # type: (Any) -> bytes + """Serialize data into a compact JSON representation encoded as UTF-8.""" + return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8") + + +def get_git_revision(): + # type: () -> Optional[str] + try: + with open(os.path.devnull, "w+") as null: + # prevent command prompt windows from popping up on windows + startupinfo = None + if sys.platform == "win32" or sys.platform == "cygwin": + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + + revision = ( + subprocess.Popen( + ["git", "rev-parse", "HEAD"], + startupinfo=startupinfo, + stdout=subprocess.PIPE, + stderr=null, + stdin=null, + ) + .communicate()[0] + .strip() + .decode("utf-8") + ) + except (OSError, IOError, FileNotFoundError): + return None + + return revision + + +def get_default_release(): + # type: () -> Optional[str] + """Try to guess a default release.""" + release = os.environ.get("SENTRY_RELEASE") + if release: + return release + + release = get_git_revision() + if release: + return release + + for var in ( + "HEROKU_SLUG_COMMIT", + "SOURCE_VERSION", + "CODEBUILD_RESOLVED_SOURCE_VERSION", + "CIRCLE_SHA1", + "GAE_DEPLOYMENT_ID", + ): + release = os.environ.get(var) + if release: + return release + return None + + +def get_sdk_name(installed_integrations): + # type: (List[str]) -> str + """Return the SDK name including the name of the used web framework.""" + + # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier + # here because if django is not installed the integration is not accessible. + framework_integrations = [ + "django", + "flask", + "fastapi", + "bottle", + "falcon", + "quart", + "sanic", + "starlette", + "litestar", + "starlite", + "chalice", + "serverless", + "pyramid", + "tornado", + "aiohttp", + "aws_lambda", + "gcp", + "beam", + "asgi", + "wsgi", + ] + + for integration in framework_integrations: + if integration in installed_integrations: + return "sentry.python.{}".format(integration) + + return "sentry.python" + + +class CaptureInternalException: + __slots__ = () + + def __enter__(self): + # type: () -> ContextManager[Any] + return self + + def __exit__(self, ty, value, tb): + # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool + if ty is not None and value is not None: + capture_internal_exception((ty, value, tb)) + + return True + + +_CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException() + + +def capture_internal_exceptions(): + # type: () -> ContextManager[Any] + return _CAPTURE_INTERNAL_EXCEPTION + + +def capture_internal_exception(exc_info): + # type: (ExcInfo) -> None + """ + Capture an exception that is likely caused by a bug in the SDK + itself. + + These exceptions do not end up in Sentry and are just logged instead. + """ + if sentry_sdk_alpha.get_client().is_active(): + logger.error("Internal error in sentry_sdk", exc_info=exc_info) + + +def to_timestamp(value): + # type: (datetime) -> float + return (value - epoch).total_seconds() + + +def format_timestamp(value): + # type: (datetime) -> str + """Formats a timestamp in RFC 3339 format. + + Any datetime objects with a non-UTC timezone are converted to UTC, so that all timestamps are formatted in UTC. + """ + utctime = value.astimezone(timezone.utc) + + # We use this custom formatting rather than isoformat for backwards compatibility (we have used this format for + # several years now), and isoformat is slightly different. + return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + +def event_hint_with_exc_info(exc_info=None): + # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]] + """Creates a hint with the exc info filled in.""" + if exc_info is None: + exc_info = sys.exc_info() + else: + exc_info = exc_info_from_error(exc_info) + if exc_info[0] is None: + exc_info = None + return {"exc_info": exc_info} + + +class BadDsn(ValueError): + """Raised on invalid DSNs.""" + + +class Dsn: + """Represents a DSN.""" + + def __init__(self, value): + # type: (Union[Dsn, str]) -> None + if isinstance(value, Dsn): + self.__dict__ = dict(value.__dict__) + return + parts = urlsplit(str(value)) + + if parts.scheme not in ("http", "https"): + raise BadDsn("Unsupported scheme %r" % parts.scheme) + self.scheme = parts.scheme + + if parts.hostname is None: + raise BadDsn("Missing hostname") + + self.host = parts.hostname + + if parts.port is None: + self.port = self.scheme == "https" and 443 or 80 # type: int + else: + self.port = parts.port + + if not parts.username: + raise BadDsn("Missing public key") + + self.public_key = parts.username + self.secret_key = parts.password + + path = parts.path.rsplit("/", 1) + + try: + self.project_id = str(int(path.pop())) + except (ValueError, TypeError): + raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:]) + + self.path = "/".join(path) + "/" + + @property + def netloc(self): + # type: () -> str + """The netloc part of a DSN.""" + rv = self.host + if (self.scheme, self.port) not in (("http", 80), ("https", 443)): + rv = "%s:%s" % (rv, self.port) + return rv + + def to_auth(self, client=None): + # type: (Optional[Any]) -> Auth + """Returns the auth info object for this dsn.""" + return Auth( + scheme=self.scheme, + host=self.netloc, + path=self.path, + project_id=self.project_id, + public_key=self.public_key, + secret_key=self.secret_key, + client=client, + ) + + def __str__(self): + # type: () -> str + return "%s://%s%s@%s%s%s" % ( + self.scheme, + self.public_key, + self.secret_key and "@" + self.secret_key or "", + self.netloc, + self.path, + self.project_id, + ) + + +class Auth: + """Helper object that represents the auth info.""" + + def __init__( + self, + scheme, + host, + project_id, + public_key, + secret_key=None, + version=7, + client=None, + path="/", + ): + # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None + self.scheme = scheme + self.host = host + self.path = path + self.project_id = project_id + self.public_key = public_key + self.secret_key = secret_key + self.version = version + self.client = client + + def get_api_url( + self, type=EndpointType.ENVELOPE # type: EndpointType + ): + # type: (...) -> str + """Returns the API url for storing events.""" + return "%s://%s%sapi/%s/%s/" % ( + self.scheme, + self.host, + self.path, + self.project_id, + type.value, + ) + + def to_header(self): + # type: () -> str + """Returns the auth header a string.""" + rv = [("sentry_key", self.public_key), ("sentry_version", self.version)] + if self.client is not None: + rv.append(("sentry_client", self.client)) + if self.secret_key is not None: + rv.append(("sentry_secret", self.secret_key)) + return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv) + + +def get_type_name(cls): + # type: (Optional[type]) -> Optional[str] + return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None) + + +def get_type_module(cls): + # type: (Optional[type]) -> Optional[str] + mod = getattr(cls, "__module__", None) + if mod not in (None, "builtins", "__builtins__"): + return mod + return None + + +def should_hide_frame(frame): + # type: (FrameType) -> bool + try: + mod = frame.f_globals["__name__"] + if mod.startswith("sentry_sdk."): + return True + except (AttributeError, KeyError): + pass + + for flag_name in "__traceback_hide__", "__tracebackhide__": + try: + if frame.f_locals[flag_name]: + return True + except Exception: + pass + + return False + + +def iter_stacks(tb): + # type: (Optional[TracebackType]) -> Iterator[TracebackType] + tb_ = tb # type: Optional[TracebackType] + while tb_ is not None: + if not should_hide_frame(tb_.tb_frame): + yield tb_ + tb_ = tb_.tb_next + + +def get_lines_from_file( + filename, # type: str + lineno, # type: int + max_length=None, # type: Optional[int] + loader=None, # type: Optional[Any] + module=None, # type: Optional[str] +): + # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + context_lines = 5 + source = None + if loader is not None and hasattr(loader, "get_source"): + try: + source_str = loader.get_source(module) # type: Optional[str] + except (ImportError, IOError): + source_str = None + if source_str is not None: + source = source_str.splitlines() + + if source is None: + try: + source = linecache.getlines(filename) + except (OSError, IOError): + return [], None, [] + + if not source: + return [], None, [] + + lower_bound = max(0, lineno - context_lines) + upper_bound = min(lineno + 1 + context_lines, len(source)) + + try: + pre_context = [ + strip_string(line.strip("\r\n"), max_length=max_length) + for line in source[lower_bound:lineno] + ] + context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length) + post_context = [ + strip_string(line.strip("\r\n"), max_length=max_length) + for line in source[(lineno + 1) : upper_bound] + ] + return pre_context, context_line, post_context + except IndexError: + # the file may have changed since it was loaded into memory + return [], None, [] + + +def get_source_context( + frame, # type: FrameType + tb_lineno, # type: Optional[int] + max_value_length=None, # type: Optional[int] +): + # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]] + try: + abs_path = frame.f_code.co_filename # type: Optional[str] + except Exception: + abs_path = None + try: + module = frame.f_globals["__name__"] + except Exception: + return [], None, [] + try: + loader = frame.f_globals["__loader__"] + except Exception: + loader = None + + if tb_lineno is not None and abs_path: + lineno = tb_lineno - 1 + return get_lines_from_file( + abs_path, lineno, max_value_length, loader=loader, module=module + ) + + return [], None, [] + + +def safe_str(value): + # type: (Any) -> str + try: + return str(value) + except Exception: + return safe_repr(value) + + +def safe_repr(value): + # type: (Any) -> str + try: + return repr(value) + except Exception: + return "" + + +def filename_for_module(module, abs_path): + # type: (Optional[str], Optional[str]) -> Optional[str] + if not abs_path or not module: + return abs_path + + try: + if abs_path.endswith(".pyc"): + abs_path = abs_path[:-1] + + base_module = module.split(".", 1)[0] + if base_module == module: + return os.path.basename(abs_path) + + base_module_path = sys.modules[base_module].__file__ + if not base_module_path: + return abs_path + + return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip( + os.sep + ) + except Exception: + return abs_path + + +def serialize_frame( + frame, + tb_lineno=None, + include_local_variables=True, + include_source_context=True, + max_value_length=None, + custom_repr=None, +): + # type: (FrameType, Optional[int], bool, bool, Optional[int], Optional[Callable[..., Optional[str]]]) -> Dict[str, Any] + f_code = getattr(frame, "f_code", None) + if not f_code: + abs_path = None + function = None + else: + abs_path = frame.f_code.co_filename + function = frame.f_code.co_name + try: + module = frame.f_globals["__name__"] + except Exception: + module = None + + if tb_lineno is None: + tb_lineno = frame.f_lineno + + rv = { + "filename": filename_for_module(module, abs_path) or None, + "abs_path": os.path.abspath(abs_path) if abs_path else None, + "function": function or "", + "module": module, + "lineno": tb_lineno, + } # type: Dict[str, Any] + + if include_source_context: + rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context( + frame, tb_lineno, max_value_length + ) + + if include_local_variables: + from sentry_sdk_alpha.serializer import serialize + + rv["vars"] = serialize( + dict(frame.f_locals), is_vars=True, custom_repr=custom_repr + ) + + return rv + + +def current_stacktrace( + include_local_variables=True, # type: bool + include_source_context=True, # type: bool + max_value_length=None, # type: Optional[int] +): + # type: (...) -> Dict[str, Any] + __tracebackhide__ = True + frames = [] + + f = sys._getframe() # type: Optional[FrameType] + while f is not None: + if not should_hide_frame(f): + frames.append( + serialize_frame( + f, + include_local_variables=include_local_variables, + include_source_context=include_source_context, + max_value_length=max_value_length, + ) + ) + f = f.f_back + + frames.reverse() + + return {"frames": frames} + + +def get_errno(exc_value): + # type: (BaseException) -> Optional[Any] + return getattr(exc_value, "errno", None) + + +def get_error_message(exc_value): + # type: (Optional[BaseException]) -> str + message = ( + getattr(exc_value, "message", "") + or getattr(exc_value, "detail", "") + or safe_str(exc_value) + ) # type: str + + # __notes__ should be a list of strings when notes are added + # via add_note, but can be anything else if __notes__ is set + # directly. We only support strings in __notes__, since that + # is the correct use. + notes = getattr(exc_value, "__notes__", None) # type: object + if isinstance(notes, list) and len(notes) > 0: + message += "\n" + "\n".join(note for note in notes if isinstance(note, str)) + + return message + + +def single_exception_from_error_tuple( + exc_type, # type: Optional[type] + exc_value, # type: Optional[BaseException] + tb, # type: Optional[TracebackType] + client_options=None, # type: Optional[Dict[str, Any]] + mechanism=None, # type: Optional[Dict[str, Any]] + exception_id=None, # type: Optional[int] + parent_id=None, # type: Optional[int] + source=None, # type: Optional[str] + full_stack=None, # type: Optional[list[dict[str, Any]]] +): + # type: (...) -> Dict[str, Any] + """ + Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry. + + See the Exception Interface documentation for more details: + https://develop.sentry.dev/sdk/event-payloads/exception/ + """ + exception_value = {} # type: Dict[str, Any] + exception_value["mechanism"] = ( + mechanism.copy() if mechanism else {"type": "generic", "handled": True} + ) + if exception_id is not None: + exception_value["mechanism"]["exception_id"] = exception_id + + if exc_value is not None: + errno = get_errno(exc_value) + else: + errno = None + + if errno is not None: + exception_value["mechanism"].setdefault("meta", {}).setdefault( + "errno", {} + ).setdefault("number", errno) + + if source is not None: + exception_value["mechanism"]["source"] = source + + is_root_exception = exception_id == 0 + if not is_root_exception and parent_id is not None: + exception_value["mechanism"]["parent_id"] = parent_id + exception_value["mechanism"]["type"] = "chained" + + if is_root_exception and "type" not in exception_value["mechanism"]: + exception_value["mechanism"]["type"] = "generic" + + is_exception_group = BaseExceptionGroup is not None and isinstance( + exc_value, BaseExceptionGroup + ) + if is_exception_group: + exception_value["mechanism"]["is_exception_group"] = True + + exception_value["module"] = get_type_module(exc_type) + exception_value["type"] = get_type_name(exc_type) + exception_value["value"] = get_error_message(exc_value) + + if client_options is None: + include_local_variables = True + include_source_context = True + max_value_length = DEFAULT_MAX_VALUE_LENGTH # fallback + custom_repr = None + else: + include_local_variables = client_options["include_local_variables"] + include_source_context = client_options["include_source_context"] + max_value_length = client_options["max_value_length"] + custom_repr = client_options.get("custom_repr") + + frames = [ + serialize_frame( + tb.tb_frame, + tb_lineno=tb.tb_lineno, + include_local_variables=include_local_variables, + include_source_context=include_source_context, + max_value_length=max_value_length, + custom_repr=custom_repr, + ) + # Process at most MAX_STACK_FRAMES + 1 frames, to avoid hanging on + # processing a super-long stacktrace. + for tb, _ in zip(iter_stacks(tb), range(MAX_STACK_FRAMES + 1)) + ] # type: List[Dict[str, Any]] + + if len(frames) > MAX_STACK_FRAMES: + # If we have more frames than the limit, we remove the stacktrace completely. + # We don't trim the stacktrace here because we have not processed the whole + # thing (see above, we stop at MAX_STACK_FRAMES + 1). Normally, Relay would + # intelligently trim by removing frames in the middle of the stacktrace, but + # since we don't have the whole stacktrace, we can't do that. Instead, we + # drop the entire stacktrace. + exception_value["stacktrace"] = AnnotatedValue.removed_because_over_size_limit( + value=None + ) + + elif frames: + if not full_stack: + new_frames = frames + else: + new_frames = merge_stack_frames(frames, full_stack, client_options) + + exception_value["stacktrace"] = {"frames": new_frames} + + return exception_value + + +HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__") + +if HAS_CHAINED_EXCEPTIONS: + + def walk_exception_chain(exc_info): + # type: (ExcInfo) -> Iterator[ExcInfo] + exc_type, exc_value, tb = exc_info + + seen_exceptions = [] + seen_exception_ids = set() # type: Set[int] + + while ( + exc_type is not None + and exc_value is not None + and id(exc_value) not in seen_exception_ids + ): + yield exc_type, exc_value, tb + + # Avoid hashing random types we don't know anything + # about. Use the list to keep a ref so that the `id` is + # not used for another object. + seen_exceptions.append(exc_value) + seen_exception_ids.add(id(exc_value)) + + if exc_value.__suppress_context__: + cause = exc_value.__cause__ + else: + cause = exc_value.__context__ + if cause is None: + break + exc_type = type(cause) + exc_value = cause + tb = getattr(cause, "__traceback__", None) + +else: + + def walk_exception_chain(exc_info): + # type: (ExcInfo) -> Iterator[ExcInfo] + yield exc_info + + +def exceptions_from_error( + exc_type, # type: Optional[type] + exc_value, # type: Optional[BaseException] + tb, # type: Optional[TracebackType] + client_options=None, # type: Optional[Dict[str, Any]] + mechanism=None, # type: Optional[Dict[str, Any]] + exception_id=0, # type: int + parent_id=0, # type: int + source=None, # type: Optional[str] + full_stack=None, # type: Optional[list[dict[str, Any]]] +): + # type: (...) -> Tuple[int, List[Dict[str, Any]]] + """ + Converts the given exception information into the Sentry structured "exception" format. + This will return a list of exceptions (a flattened tree of exceptions) in the + format of the Exception Interface documentation: + https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + + This function can handle: + - simple exceptions + - chained exceptions (raise .. from ..) + - exception groups + """ + base_exception = single_exception_from_error_tuple( + exc_type=exc_type, + exc_value=exc_value, + tb=tb, + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source=source, + full_stack=full_stack, + ) + exceptions = [base_exception] + + parent_id = exception_id + exception_id += 1 + + if exception_id > MAX_EXCEPTIONS - 1: + return (exception_id, exceptions) + + causing_exception = None + exception_source = None + + # Add any causing exceptions, if present. + should_suppress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__ # type: ignore + # Note: __suppress_context__ is True if the exception is raised with the `from` keyword. + if should_suppress_context: + # Explicitly chained exceptions (Like: raise NewException() from OriginalException()) + # The field `__cause__` is set to OriginalException + has_explicit_causing_exception = ( + exc_value + and hasattr(exc_value, "__cause__") + and exc_value.__cause__ is not None + ) + if has_explicit_causing_exception: + exception_source = "__cause__" + causing_exception = exc_value.__cause__ # type: ignore + else: + # Implicitly chained exceptions (when an exception occurs while handling another exception) + # The field `__context__` is set in the exception that occurs while handling another exception, + # to the other exception. + has_implicit_causing_exception = ( + exc_value + and hasattr(exc_value, "__context__") + and exc_value.__context__ is not None + ) + if has_implicit_causing_exception: + exception_source = "__context__" + causing_exception = exc_value.__context__ # type: ignore + + if causing_exception: + # Some frameworks (e.g. FastAPI) wrap the causing exception in an + # ExceptionGroup that only contain one exception: the causing exception. + # This would lead to an infinite loop, so we skip the causing exception + # in this case. (because it is the same as the base_exception above) + if ( + BaseExceptionGroup is not None + and isinstance(causing_exception, BaseExceptionGroup) + and len(causing_exception.exceptions) == 1 + and causing_exception.exceptions[0] == exc_value + ): + causing_exception = None + + if causing_exception: + (exception_id, child_exceptions) = exceptions_from_error( + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source=exception_source, + full_stack=full_stack, + ) + exceptions.extend(child_exceptions) + + # Add child exceptions from an ExceptionGroup. + is_exception_group = exc_value and hasattr(exc_value, "exceptions") + if is_exception_group: + for idx, causing_exception in enumerate(exc_value.exceptions): # type: ignore + (exception_id, child_exceptions) = exceptions_from_error( + exc_type=type(causing_exception), + exc_value=causing_exception, + tb=getattr(causing_exception, "__traceback__", None), + client_options=client_options, + mechanism=mechanism, + exception_id=exception_id, + parent_id=parent_id, + source="exceptions[%s]" % idx, + full_stack=full_stack, + ) + exceptions.extend(child_exceptions) + + return (exception_id, exceptions) + + +def exceptions_from_error_tuple( + exc_info, # type: ExcInfo + client_options=None, # type: Optional[Dict[str, Any]] + mechanism=None, # type: Optional[Dict[str, Any]] + full_stack=None, # type: Optional[list[dict[str, Any]]] +): + # type: (...) -> List[Dict[str, Any]] + """ + Convert Python's exception information into Sentry's structured "exception" format in the event. + See https://develop.sentry.dev/sdk/data-model/event-payloads/exception/ + This is the entry point for the exception handling. + """ + # unpack the exception info tuple + exc_type, exc_value, tb = exc_info + + # let exceptions_from_error do the actual work + _, exceptions = exceptions_from_error( + exc_type=exc_type, + exc_value=exc_value, + tb=tb, + client_options=client_options, + mechanism=mechanism, + exception_id=0, + parent_id=0, + full_stack=full_stack, + ) + + # make sure the exceptions are sorted + # from the innermost (oldest) + # to the outermost (newest) exception + exceptions.reverse() + + return exceptions + + +def to_string(value): + # type: (str) -> str + try: + return str(value) + except UnicodeDecodeError: + return repr(value)[1:-1] + + +def iter_event_stacktraces(event): + # type: (Event) -> Iterator[Annotated[Dict[str, Any]]] + if "stacktrace" in event: + yield event["stacktrace"] + if "threads" in event: + for thread in event["threads"].get("values") or (): + if "stacktrace" in thread: + yield thread["stacktrace"] + if "exception" in event: + for exception in event["exception"].get("values") or (): + if isinstance(exception, dict) and "stacktrace" in exception: + yield exception["stacktrace"] + + +def iter_event_frames(event): + # type: (Event) -> Iterator[Dict[str, Any]] + for stacktrace in iter_event_stacktraces(event): + if isinstance(stacktrace, AnnotatedValue): + stacktrace = stacktrace.value or {} + + for frame in stacktrace.get("frames") or (): + yield frame + + +def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None): + # type: (Event, Optional[List[str]], Optional[List[str]], Optional[str]) -> Event + for stacktrace in iter_event_stacktraces(event): + if isinstance(stacktrace, AnnotatedValue): + stacktrace = stacktrace.value or {} + + set_in_app_in_frames( + stacktrace.get("frames"), + in_app_exclude=in_app_exclude, + in_app_include=in_app_include, + project_root=project_root, + ) + + return event + + +def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None): + # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any] + if not frames: + return None + + for frame in frames: + # if frame has already been marked as in_app, skip it + current_in_app = frame.get("in_app") + if current_in_app is not None: + continue + + module = frame.get("module") + + # check if module in frame is in the list of modules to include + if _module_in_list(module, in_app_include): + frame["in_app"] = True + continue + + # check if module in frame is in the list of modules to exclude + if _module_in_list(module, in_app_exclude): + frame["in_app"] = False + continue + + # if frame has no abs_path, skip further checks + abs_path = frame.get("abs_path") + if abs_path is None: + continue + + if _is_external_source(abs_path): + frame["in_app"] = False + continue + + if _is_in_project_root(abs_path, project_root): + frame["in_app"] = True + continue + + return frames + + +def exc_info_from_error(error): + # type: (Union[BaseException, ExcInfo]) -> ExcInfo + if isinstance(error, tuple) and len(error) == 3: + exc_type, exc_value, tb = error + elif isinstance(error, BaseException): + tb = getattr(error, "__traceback__", None) + if tb is not None: + exc_type = type(error) + exc_value = error + else: + exc_type, exc_value, tb = sys.exc_info() + if exc_value is not error: + tb = None + exc_value = error + exc_type = type(error) + + else: + raise ValueError("Expected Exception object to report, got %s!" % type(error)) + + exc_info = (exc_type, exc_value, tb) + + if TYPE_CHECKING: + # This cast is safe because exc_type and exc_value are either both + # None or both not None. + exc_info = cast(ExcInfo, exc_info) + + return exc_info + + +def merge_stack_frames(frames, full_stack, client_options): + # type: (List[Dict[str, Any]], List[Dict[str, Any]], Optional[Dict[str, Any]]) -> List[Dict[str, Any]] + """ + Add the missing frames from full_stack to frames and return the merged list. + """ + frame_ids = { + ( + frame["abs_path"], + frame["context_line"], + frame["lineno"], + frame["function"], + ) + for frame in frames + } + + new_frames = [ + stackframe + for stackframe in full_stack + if ( + stackframe["abs_path"], + stackframe["context_line"], + stackframe["lineno"], + stackframe["function"], + ) + not in frame_ids + ] + new_frames.extend(frames) + + # Limit the number of frames + max_stack_frames = ( + client_options.get("max_stack_frames", DEFAULT_MAX_STACK_FRAMES) + if client_options + else None + ) + if max_stack_frames is not None: + new_frames = new_frames[len(new_frames) - max_stack_frames :] + + return new_frames + + +def event_from_exception( + exc_info, # type: Union[BaseException, ExcInfo] + client_options=None, # type: Optional[Dict[str, Any]] + mechanism=None, # type: Optional[Dict[str, Any]] +): + # type: (...) -> Tuple[Event, Dict[str, Any]] + exc_info = exc_info_from_error(exc_info) + hint = event_hint_with_exc_info(exc_info) + + if client_options and client_options.get("add_full_stack", DEFAULT_ADD_FULL_STACK): + full_stack = current_stacktrace( + include_local_variables=client_options["include_local_variables"], + max_value_length=client_options["max_value_length"], + )["frames"] + else: + full_stack = None + + return ( + { + "level": "error", + "exception": { + "values": exceptions_from_error_tuple( + exc_info, client_options, mechanism, full_stack + ) + }, + }, + hint, + ) + + +def _module_in_list(name, items): + # type: (Optional[str], Optional[List[str]]) -> bool + if name is None: + return False + + if not items: + return False + + for item in items: + if item == name or name.startswith(item + "."): + return True + + return False + + +def _is_external_source(abs_path): + # type: (Optional[str]) -> bool + # check if frame is in 'site-packages' or 'dist-packages' + if abs_path is None: + return False + + external_source = ( + re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None + ) + return external_source + + +def _is_in_project_root(abs_path, project_root): + # type: (Optional[str], Optional[str]) -> bool + if abs_path is None or project_root is None: + return False + + # check if path is in the project root + if abs_path.startswith(project_root): + return True + + return False + + +def _truncate_by_bytes(string, max_bytes): + # type: (str, int) -> str + """ + Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes. + """ + truncated = string.encode("utf-8")[: max_bytes - 3].decode("utf-8", errors="ignore") + + return truncated + "..." + + +def _get_size_in_bytes(value): + # type: (str) -> Optional[int] + try: + return len(value.encode("utf-8")) + except (UnicodeEncodeError, UnicodeDecodeError): + return None + + +def strip_string(value, max_length=None): + # type: (str, Optional[int]) -> Union[AnnotatedValue, str] + if not value: + return value + + if max_length is None: + max_length = DEFAULT_MAX_VALUE_LENGTH + + byte_size = _get_size_in_bytes(value) + text_size = len(value) + + if byte_size is not None and byte_size > max_length: + # truncate to max_length bytes, preserving code points + truncated_value = _truncate_by_bytes(value, max_length) + elif text_size is not None and text_size > max_length: + # fallback to truncating by string length + truncated_value = value[: max_length - 3] + "..." + else: + return value + + return AnnotatedValue( + value=truncated_value, + metadata={ + "len": byte_size or text_size, + "rem": [["!limit", "x", max_length - 3, max_length]], + }, + ) + + +def parse_version(version): + # type: (str) -> Optional[Tuple[int, ...]] + """ + Parses a version string into a tuple of integers. + This uses the parsing loging from PEP 440: + https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions + """ + VERSION_PATTERN = r""" # noqa: N806 + v? + (?: + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                          # pre-release
+                [-_\.]?
+                (?P(a|b|c|rc|alpha|beta|pre|preview))
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
+            (?P                                         # post release
+                (?:-(?P[0-9]+))
+                |
+                (?:
+                    [-_\.]?
+                    (?Ppost|rev|r)
+                    [-_\.]?
+                    (?P[0-9]+)?
+                )
+            )?
+            (?P                                          # dev release
+                [-_\.]?
+                (?Pdev)
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
+        )
+        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+    """
+
+    pattern = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    try:
+        release = pattern.match(version).groupdict()["release"]  # type: ignore
+        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
+    except (TypeError, ValueError, AttributeError):
+        return None
+
+    return release_tuple
+
+
+def _is_contextvars_broken():
+    # type: () -> bool
+    """
+    Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
+    """
+    try:
+        import gevent
+        from gevent.monkey import is_object_patched
+
+        # Get the MAJOR and MINOR version numbers of Gevent
+        version_tuple = tuple(
+            [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
+        )
+        if is_object_patched("threading", "local"):
+            # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
+            # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
+            # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
+            # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
+            # for contextvars, is able to patch both thread locals and contextvars, in
+            # that case, check if contextvars are effectively patched.
+            if (
+                # Gevent 20.9.0+
+                (sys.version_info >= (3, 7) and version_tuple >= (20, 9))
+                # Gevent 20.5.0+ or Python < 3.7
+                or (is_object_patched("contextvars", "ContextVar"))
+            ):
+                return False
+
+            return True
+    except ImportError:
+        pass
+
+    try:
+        import greenlet
+        from eventlet.patcher import is_monkey_patched  # type: ignore
+
+        greenlet_version = parse_version(greenlet.__version__)
+
+        if greenlet_version is None:
+            logger.error(
+                "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
+            )
+            return False
+
+        if is_monkey_patched("thread") and greenlet_version < (0, 5):
+            return True
+    except ImportError:
+        pass
+
+    return False
+
+
+def _make_threadlocal_contextvars(local):
+    # type: (type) -> type
+    class ContextVar:
+        # Super-limited impl of ContextVar
+
+        def __init__(self, name, default=None):
+            # type: (str, Any) -> None
+            self._name = name
+            self._default = default
+            self._local = local()
+            self._original_local = local()
+
+        def get(self, default=None):
+            # type: (Any) -> Any
+            return getattr(self._local, "value", default or self._default)
+
+        def set(self, value):
+            # type: (Any) -> Any
+            token = str(random.getrandbits(64))
+            original_value = self.get()
+            setattr(self._original_local, token, original_value)
+            self._local.value = value
+            return token
+
+        def reset(self, token):
+            # type: (Any) -> None
+            self._local.value = getattr(self._original_local, token)
+            # delete the original value (this way it works in Python 3.6+)
+            del self._original_local.__dict__[token]
+
+    return ContextVar
+
+
+def _get_contextvars():
+    # type: () -> Tuple[bool, type]
+    """
+    Figure out the "right" contextvars installation to use. Returns a
+    `contextvars.ContextVar`-like class with a limited API.
+
+    See https://docs.sentry.io/platforms/python/contextvars/ for more information.
+    """
+    if not _is_contextvars_broken():
+        # On Python 3.7+ contextvars are functional.
+        try:
+            from contextvars import ContextVar
+
+            return True, ContextVar
+        except ImportError:
+            pass
+
+    # Fall back to basic thread-local usage.
+
+    from threading import local
+
+    return False, _make_threadlocal_contextvars(local)
+
+
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+
+CONTEXTVARS_ERROR_MESSAGE = """
+
+With asyncio/ASGI applications, the Sentry SDK requires a functional
+installation of `contextvars` to avoid leaking scope/context data across
+requests.
+
+Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
+"""
+
+
+def qualname_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
+    func_qualname = None  # type: Optional[str]
+
+    # Python 2
+    try:
+        return "%s.%s.%s" % (
+            func.im_class.__module__,  # type: ignore
+            func.im_class.__name__,  # type: ignore
+            func.__name__,
+        )
+    except Exception:
+        pass
+
+    prefix, suffix = "", ""
+
+    if isinstance(func, partial) and hasattr(func.func, "__name__"):
+        prefix, suffix = "partial()"
+        func = func.func
+    else:
+        # The _partialmethod attribute of methods wrapped with partialmethod() was renamed to __partialmethod__ in CPython 3.13:
+        # https://github.com/python/cpython/pull/16600
+        partial_method = getattr(func, "_partialmethod", None) or getattr(
+            func, "__partialmethod__", None
+        )
+        if isinstance(partial_method, partialmethod):
+            prefix, suffix = "partialmethod()"
+            func = partial_method.func
+
+    if hasattr(func, "__qualname__"):
+        func_qualname = func.__qualname__
+    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
+        func_qualname = func.__name__
+
+    # Python 3: methods, functions, classes
+    if func_qualname is not None:
+        if hasattr(func, "__module__") and isinstance(func.__module__, str):
+            func_qualname = func.__module__ + "." + func_qualname
+        func_qualname = prefix + func_qualname + suffix
+
+    return func_qualname
+
+
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    return qualname_from_function(func)
+
+
+disable_capture_event = ContextVar("disable_capture_event")
+
+
+class ServerlessTimeoutWarning(Exception):  # noqa: N818
+    """Raised when a serverless method is about to reach its timeout."""
+
+    pass
+
+
+class TimeoutThread(threading.Thread):
+    """Creates a Thread which runs (sleeps) for a time duration equal to
+    waiting_time and raises a custom ServerlessTimeout exception.
+    """
+
+    def __init__(self, waiting_time, configured_timeout):
+        # type: (float, int) -> None
+        threading.Thread.__init__(self)
+        self.waiting_time = waiting_time
+        self.configured_timeout = configured_timeout
+        self._stop_event = threading.Event()
+
+    def stop(self):
+        # type: () -> None
+        self._stop_event.set()
+
+    def run(self):
+        # type: () -> None
+
+        self._stop_event.wait(self.waiting_time)
+
+        if self._stop_event.is_set():
+            return
+
+        integer_configured_timeout = int(self.configured_timeout)
+
+        # Setting up the exact integer value of configured time(in seconds)
+        if integer_configured_timeout < self.configured_timeout:
+            integer_configured_timeout = integer_configured_timeout + 1
+
+        # Raising Exception after timeout duration is reached
+        raise ServerlessTimeoutWarning(
+            "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
+                integer_configured_timeout
+            )
+        )
+
+
+def to_base64(original):
+    # type: (str) -> Optional[str]
+    """
+    Convert a string to base64, via UTF-8. Returns None on invalid input.
+    """
+    base64_string = None
+
+    try:
+        utf8_bytes = original.encode("UTF-8")
+        base64_bytes = base64.b64encode(utf8_bytes)
+        base64_string = base64_bytes.decode("UTF-8")
+    except Exception as err:
+        logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)
+
+    return base64_string
+
+
+def from_base64(base64_string):
+    # type: (str) -> Optional[str]
+    """
+    Convert a string from base64, via UTF-8. Returns None on invalid input.
+    """
+    utf8_string = None
+
+    try:
+        only_valid_chars = BASE64_ALPHABET.match(base64_string)
+        assert only_valid_chars
+
+        base64_bytes = base64_string.encode("UTF-8")
+        utf8_bytes = base64.b64decode(base64_bytes)
+        utf8_string = utf8_bytes.decode("UTF-8")
+    except Exception as err:
+        logger.warning(
+            "Unable to decode {b64} from base64:".format(b64=base64_string), err
+        )
+
+    return utf8_string
+
+
+Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])
+
+
+def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
+    # type: (str, bool, bool, bool) -> Union[str, Components]
+    """
+    Removes the authority and query parameter values from a given URL.
+    """
+    parsed_url = urlsplit(url)
+    query_params = parse_qs(parsed_url.query, keep_blank_values=True)
+
+    # strip username:password (netloc can be usr:pwd@example.com)
+    if remove_authority:
+        netloc_parts = parsed_url.netloc.split("@")
+        if len(netloc_parts) > 1:
+            netloc = "%s:%s@%s" % (
+                SENSITIVE_DATA_SUBSTITUTE,
+                SENSITIVE_DATA_SUBSTITUTE,
+                netloc_parts[-1],
+            )
+        else:
+            netloc = parsed_url.netloc
+    else:
+        netloc = parsed_url.netloc
+
+    # strip values from query string
+    if remove_query_values:
+        query_string = unquote(
+            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
+        )
+    else:
+        query_string = parsed_url.query
+
+    components = Components(
+        scheme=parsed_url.scheme,
+        netloc=netloc,
+        query=query_string,
+        path=parsed_url.path,
+        fragment=parsed_url.fragment,
+    )
+
+    if split:
+        return components
+    else:
+        return urlunsplit(components)
+
+
+ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])
+
+
+def parse_url(url, sanitize=True):
+    # type: (str, bool) -> ParsedUrl
+    """
+    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
+    parameters will be sanitized to remove sensitive data. The autority (username and password)
+    in the URL will always be removed.
+    """
+    parsed_url = sanitize_url(
+        url, remove_authority=True, remove_query_values=sanitize, split=True
+    )
+
+    base_url = urlunsplit(
+        Components(
+            scheme=parsed_url.scheme,  # type: ignore
+            netloc=parsed_url.netloc,  # type: ignore
+            query="",
+            path=parsed_url.path,  # type: ignore
+            fragment="",
+        )
+    )
+
+    return ParsedUrl(
+        url=base_url,
+        query=parsed_url.query,  # type: ignore
+        fragment=parsed_url.fragment,  # type: ignore
+    )
+
+
+def is_valid_sample_rate(rate, source):
+    # type: (Any, str) -> bool
+    """
+    Checks the given sample rate to make sure it is valid type and value (a
+    boolean or a number between 0 and 1, inclusive).
+    """
+
+    # both booleans and NaN are instances of Real, so a) checking for Real
+    # checks for the possibility of a boolean also, and b) we have to check
+    # separately for NaN and Decimal does not derive from Real so need to check that too
+    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
+                source=source, rate=rate, type=type(rate)
+            )
+        )
+        return False
+
+    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
+    rate = float(rate)
+    if rate < 0 or rate > 1:
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
+                source=source, rate=rate
+            )
+        )
+        return False
+
+    return True
+
+
+def match_regex_list(item, regex_list=None, substring_matching=False):
+    # type: (str, Optional[List[str]], bool) -> bool
+    if regex_list is None:
+        return False
+
+    for item_matcher in regex_list:
+        if not substring_matching and item_matcher[-1] != "$":
+            item_matcher += "$"
+
+        matched = re.search(item_matcher, item)
+        if matched:
+            return True
+
+    return False
+
+
+def is_sentry_url(client, url):
+    # type: (sentry_sdk.client.BaseClient, str) -> bool
+    """
+    Determines whether the given URL matches the Sentry DSN.
+    """
+    return (
+        client is not None
+        and client.transport is not None
+        and client.transport.parsed_dsn is not None
+        and client.transport.parsed_dsn.netloc in url
+    )
+
+
+def _generate_installed_modules():
+    # type: () -> Iterator[Tuple[str, str]]
+    try:
+        from importlib import metadata
+
+        yielded = set()
+        for dist in metadata.distributions():
+            name = dist.metadata.get("Name", None)  # type: ignore[attr-defined]
+            # `metadata` values may be `None`, see:
+            # https://github.com/python/cpython/issues/91216
+            # and
+            # https://github.com/python/importlib_metadata/issues/371
+            if name is not None:
+                normalized_name = _normalize_module_name(name)
+                if dist.version is not None and normalized_name not in yielded:
+                    yield normalized_name, dist.version
+                    yielded.add(normalized_name)
+
+    except ImportError:
+        # < py3.8
+        try:
+            import pkg_resources
+        except ImportError:
+            return
+
+        for info in pkg_resources.working_set:
+            yield _normalize_module_name(info.key), info.version
+
+
+def _normalize_module_name(name):
+    # type: (str) -> str
+    return name.lower()
+
+
+def _get_installed_modules():
+    # type: () -> Dict[str, str]
+    global _installed_modules
+    if _installed_modules is None:
+        _installed_modules = dict(_generate_installed_modules())
+    return _installed_modules
+
+
+def package_version(package):
+    # type: (str) -> Optional[Tuple[int, ...]]
+    installed_packages = _get_installed_modules()
+    version = installed_packages.get(package)
+    if version is None:
+        return None
+
+    return parse_version(version)
+
+
+def reraise(tp, value, tb=None):
+    # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn
+    assert value is not None
+    if value.__traceback__ is not tb:
+        raise value.with_traceback(tb)
+    raise value
+
+
+def _no_op(*_a, **_k):
+    # type: (*Any, **Any) -> None
+    """No-op function for ensure_integration_enabled."""
+    pass
+
+
+if TYPE_CHECKING:
+
+    @overload
+    def ensure_integration_enabled(
+        integration,  # type: type[sentry_sdk.integrations.Integration]
+        original_function,  # type: Callable[P, R]
+    ):
+        # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
+        ...
+
+    @overload
+    def ensure_integration_enabled(
+        integration,  # type: type[sentry_sdk.integrations.Integration]
+    ):
+        # type: (...) -> Callable[[Callable[P, None]], Callable[P, None]]
+        ...
+
+
+def ensure_integration_enabled(
+    integration,  # type: type[sentry_sdk.integrations.Integration]
+    original_function=_no_op,  # type: Union[Callable[P, R], Callable[P, None]]
+):
+    # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
+    """
+    Ensures a given integration is enabled prior to calling a Sentry-patched function.
+
+    The function takes as its parameters the integration that must be enabled and the original
+    function that the SDK is patching. The function returns a function that takes the
+    decorated (Sentry-patched) function as its parameter, and returns a function that, when
+    called, checks whether the given integration is enabled. If the integration is enabled, the
+    function calls the decorated, Sentry-patched function. If the integration is not enabled,
+    the original function is called.
+
+    The function also takes care of preserving the original function's signature and docstring.
+
+    Example usage:
+
+    ```python
+    @ensure_integration_enabled(MyIntegration, my_function)
+    def patch_my_function():
+        with sentry_sdk.start_span(...):
+            return my_function()
+    ```
+    """
+    if TYPE_CHECKING:
+        # Type hint to ensure the default function has the right typing. The overloads
+        # ensure the default _no_op function is only used when R is None.
+        original_function = cast(Callable[P, R], original_function)
+
+    def patcher(sentry_patched_function):
+        # type: (Callable[P, R]) -> Callable[P, R]
+        def runner(*args: "P.args", **kwargs: "P.kwargs"):
+            # type: (...) -> R
+            if sentry_sdk_alpha.get_client().get_integration(integration) is None:
+                return original_function(*args, **kwargs)
+
+            return sentry_patched_function(*args, **kwargs)
+
+        if original_function is _no_op:
+            return wraps(sentry_patched_function)(runner)
+
+        return wraps(original_function)(runner)
+
+    return patcher
+
+
+def now():
+    # type: () -> float
+    return time.perf_counter()
+
+
+try:
+    from gevent import get_hub as get_gevent_hub
+    from gevent.monkey import is_module_patched
+except ImportError:
+
+    # it's not great that the signatures are different, get_hub can't return None
+    # consider adding an if TYPE_CHECKING to change the signature to Optional[GeventHub]
+    def get_gevent_hub():  # type: ignore[misc]
+        # type: () -> Optional[GeventHub]
+        return None
+
+    def is_module_patched(mod_name):
+        # type: (str) -> bool
+        # unable to import from gevent means no modules have been patched
+        return False
+
+
+def is_gevent():
+    # type: () -> bool
+    return is_module_patched("threading") or is_module_patched("_thread")
+
+
+def get_current_thread_meta(thread=None):
+    # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]]
+    """
+    Try to get the id of the current thread, with various fall backs.
+    """
+
+    # if a thread is specified, that takes priority
+    if thread is not None:
+        try:
+            thread_id = thread.ident
+            thread_name = thread.name
+            if thread_id is not None:
+                return thread_id, thread_name
+        except AttributeError:
+            pass
+
+    # if the app is using gevent, we should look at the gevent hub first
+    # as the id there differs from what the threading module reports
+    if is_gevent():
+        gevent_hub = get_gevent_hub()
+        if gevent_hub is not None:
+            try:
+                # this is undocumented, so wrap it in try except to be safe
+                return gevent_hub.thread_ident, None
+            except AttributeError:
+                pass
+
+    # use the current thread's id if possible
+    try:
+        thread = threading.current_thread()
+        thread_id = thread.ident
+        thread_name = thread.name
+        if thread_id is not None:
+            return thread_id, thread_name
+    except AttributeError:
+        pass
+
+    # if we can't get the current thread id, fall back to the main thread id
+    try:
+        thread = threading.main_thread()
+        thread_id = thread.ident
+        thread_name = thread.name
+        if thread_id is not None:
+            return thread_id, thread_name
+    except AttributeError:
+        pass
+
+    # we've tried everything, time to give up
+    return None, None
+
+
+def _serialize_span_attribute(value):
+    # type: (Any) -> Optional[AttributeValue]
+    """Serialize an object so that it's OTel-compatible and displays nicely in Sentry."""
+    # check for allowed primitives
+    if isinstance(value, (int, str, float, bool)):
+        return value
+
+    # lists are allowed too, as long as they don't mix types
+    if isinstance(value, (list, tuple)):
+        for type_ in (int, str, float, bool):
+            if all(isinstance(item, type_) for item in value):
+                return list(value)
+
+    # if this is anything else, just try to coerce to string
+    # we prefer json.dumps since this makes things like dictionaries display
+    # nicely in the UI
+    try:
+        return json.dumps(value)
+    except TypeError:
+        try:
+            return str(value)
+        except Exception:
+            return None
+
+
+ISO_TZ_SEPARATORS = frozenset(("+", "-"))
+
+
+def datetime_from_isoformat(value):
+    # type: (str) -> datetime
+    try:
+        result = datetime.fromisoformat(value)
+    except (AttributeError, ValueError):
+        # py 3.6
+        timestamp_format = (
+            "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S"
+        )
+        if value.endswith("Z"):
+            value = value[:-1] + "+0000"
+
+        if value[-6] in ISO_TZ_SEPARATORS:
+            timestamp_format += "%z"
+            value = value[:-3] + value[-2:]
+        elif value[-5] in ISO_TZ_SEPARATORS:
+            timestamp_format += "%z"
+
+        result = datetime.strptime(value, timestamp_format)
+    return result.astimezone(timezone.utc)
+
+
+def should_be_treated_as_error(ty, value):
+    # type: (Any, Any) -> bool
+    if ty == SystemExit and hasattr(value, "code") and value.code in (0, None):
+        # https://docs.python.org/3/library/exceptions.html#SystemExit
+        return False
+
+    return True
+
+
+def http_client_status_to_breadcrumb_level(status_code):
+    # type: (Optional[int]) -> str
+    if status_code is not None:
+        if 500 <= status_code <= 599:
+            return "error"
+        elif 400 <= status_code <= 499:
+            return "warning"
+
+    return "info"
+
+
+def set_thread_info_from_span(data, span):
+    # type: (Dict[str, Any], sentry_sdk.tracing.Span) -> None
+    if span.get_attribute(SPANDATA.THREAD_ID) is not None:
+        data[SPANDATA.THREAD_ID] = span.get_attribute(SPANDATA.THREAD_ID)
+        if span.get_attribute(SPANDATA.THREAD_NAME) is not None:
+            data[SPANDATA.THREAD_NAME] = span.get_attribute(SPANDATA.THREAD_NAME)
diff --git a/src/sentry_sdk_alpha/worker.py b/src/sentry_sdk_alpha/worker.py
new file mode 100644
index 00000000000000..4030ddf6c8042e
--- /dev/null
+++ b/src/sentry_sdk_alpha/worker.py
@@ -0,0 +1,141 @@
+import os
+import threading
+
+from time import sleep, time
+from sentry_sdk_alpha._queue import Queue, FullError
+from sentry_sdk_alpha.utils import logger
+from sentry_sdk_alpha.consts import DEFAULT_QUEUE_SIZE
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Optional
+    from typing import Callable
+
+
+_TERMINATOR = object()
+
+
+class BackgroundWorker:
+    def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
+        # type: (int) -> None
+        self._queue = Queue(queue_size)  # type: Queue
+        self._lock = threading.Lock()
+        self._thread = None  # type: Optional[threading.Thread]
+        self._thread_for_pid = None  # type: Optional[int]
+
+    @property
+    def is_alive(self):
+        # type: () -> bool
+        if self._thread_for_pid != os.getpid():
+            return False
+        if not self._thread:
+            return False
+        return self._thread.is_alive()
+
+    def _ensure_thread(self):
+        # type: () -> None
+        if not self.is_alive:
+            self.start()
+
+    def _timed_queue_join(self, timeout):
+        # type: (float) -> bool
+        deadline = time() + timeout
+        queue = self._queue
+
+        queue.all_tasks_done.acquire()
+
+        try:
+            while queue.unfinished_tasks:
+                delay = deadline - time()
+                if delay <= 0:
+                    return False
+                queue.all_tasks_done.wait(timeout=delay)
+
+            return True
+        finally:
+            queue.all_tasks_done.release()
+
+    def start(self):
+        # type: () -> None
+        with self._lock:
+            if not self.is_alive:
+                self._thread = threading.Thread(
+                    target=self._target, name="sentry-sdk.BackgroundWorker"
+                )
+                self._thread.daemon = True
+                try:
+                    self._thread.start()
+                    self._thread_for_pid = os.getpid()
+                except RuntimeError:
+                    # At this point we can no longer start because the interpreter
+                    # is already shutting down.  Sadly at this point we can no longer
+                    # send out events.
+                    self._thread = None
+
+    def kill(self):
+        # type: () -> None
+        """
+        Kill worker thread. Returns immediately. Not useful for
+        waiting on shutdown for events, use `flush` for that.
+        """
+        logger.debug("background worker got kill request")
+        with self._lock:
+            if self._thread:
+                try:
+                    self._queue.put_nowait(_TERMINATOR)
+                except FullError:
+                    logger.debug("background worker queue full, kill failed")
+
+                self._thread = None
+                self._thread_for_pid = None
+
+    def flush(self, timeout, callback=None):
+        # type: (float, Optional[Any]) -> None
+        logger.debug("background worker got flush request")
+        with self._lock:
+            if self.is_alive and timeout > 0.0:
+                self._wait_flush(timeout, callback)
+        logger.debug("background worker flushed")
+
+    def full(self):
+        # type: () -> bool
+        return self._queue.full()
+
+    def _wait_flush(self, timeout, callback):
+        # type: (float, Optional[Any]) -> None
+        initial_timeout = min(0.1, timeout)
+        if not self._timed_queue_join(initial_timeout):
+            pending = self._queue.qsize() + 1
+            logger.debug("%d event(s) pending on flush", pending)
+            if callback is not None:
+                callback(pending, timeout)
+
+            if not self._timed_queue_join(timeout - initial_timeout):
+                pending = self._queue.qsize() + 1
+                logger.error("flush timed out, dropped %s events", pending)
+
+    def submit(self, callback):
+        # type: (Callable[[], None]) -> bool
+        self._ensure_thread()
+        try:
+            self._queue.put_nowait(callback)
+            return True
+        except FullError:
+            return False
+
+    def _target(self):
+        # type: () -> None
+        while True:
+            callback = self._queue.get()
+            try:
+                if callback is _TERMINATOR:
+                    break
+                try:
+                    callback()
+                except Exception:
+                    logger.error("Failed processing job", exc_info=True)
+            finally:
+                self._queue.task_done()
+            sleep(0)

From a5a8885e7a5dd68f300589f4f14fe89ed9b19503 Mon Sep 17 00:00:00 2001
From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com>
Date: Thu, 15 May 2025 08:21:14 +0000
Subject: [PATCH 09/22] :hammer_and_wrench: apply pre-commit fixes

---
 src/sentry_sdk_alpha/__init__.py              |   8 +-
 src/sentry_sdk_alpha/_compat.py               |   4 +-
 src/sentry_sdk_alpha/_log_batcher.py          |  16 +-
 src/sentry_sdk_alpha/_queue.py                |   2 -
 src/sentry_sdk_alpha/_types.py                | 171 +++++++-----------
 src/sentry_sdk_alpha/_werkzeug.py             |   5 +-
 src/sentry_sdk_alpha/ai/monitoring.py         |  26 +--
 src/sentry_sdk_alpha/api.py                   |  37 ++--
 src/sentry_sdk_alpha/attachments.py           |  10 +-
 src/sentry_sdk_alpha/client.py                | 124 +++++--------
 src/sentry_sdk_alpha/consts.py                |  53 ++----
 src/sentry_sdk_alpha/crons/__init__.py        |   1 -
 src/sentry_sdk_alpha/crons/api.py             |   4 +-
 src/sentry_sdk_alpha/crons/decorator.py       |  15 +-
 src/sentry_sdk_alpha/debug.py                 |   4 +-
 src/sentry_sdk_alpha/envelope.py              |  21 +--
 src/sentry_sdk_alpha/feature_flags.py         |  18 +-
 src/sentry_sdk_alpha/integrations/__init__.py |  36 +---
 .../integrations/_asgi_common.py              |  17 +-
 .../integrations/_wsgi_common.py              |  21 +--
 src/sentry_sdk_alpha/integrations/aiohttp.py  |  59 +++---
 .../integrations/anthropic.py                 |  22 +--
 src/sentry_sdk_alpha/integrations/argv.py     |   3 +-
 src/sentry_sdk_alpha/integrations/ariadne.py  |  19 +-
 src/sentry_sdk_alpha/integrations/arq.py      |  32 ++--
 src/sentry_sdk_alpha/integrations/asgi.py     |  31 +---
 src/sentry_sdk_alpha/integrations/asyncio.py  |  14 +-
 src/sentry_sdk_alpha/integrations/asyncpg.py  |  22 +--
 src/sentry_sdk_alpha/integrations/atexit.py   |   9 +-
 .../integrations/aws_lambda.py                |  72 +++-----
 src/sentry_sdk_alpha/integrations/beam.py     |   9 +-
 src/sentry_sdk_alpha/integrations/boto3.py    |  14 +-
 src/sentry_sdk_alpha/integrations/bottle.py   |  46 ++---
 .../integrations/celery/__init__.py           |  50 ++---
 .../integrations/celery/beat.py               |  40 ++--
 .../integrations/celery/utils.py              |   1 +
 src/sentry_sdk_alpha/integrations/chalice.py  |  24 +--
 .../integrations/clickhouse_driver.py         |  23 +--
 .../integrations/cloud_resource_context.py    |  40 ++--
 src/sentry_sdk_alpha/integrations/cohere.py   |  28 +--
 src/sentry_sdk_alpha/integrations/dedupe.py   |   6 +-
 .../integrations/django/__init__.py           |  69 +++----
 .../integrations/django/asgi.py               |  17 +-
 .../integrations/django/caching.py            |  27 +--
 .../integrations/django/middleware.py         |  17 +-
 .../integrations/django/signals_handlers.py   |  13 +-
 .../integrations/django/templates.py          |  17 +-
 .../integrations/django/transactions.py       |   1 -
 .../integrations/django/views.py              |   4 +-
 src/sentry_sdk_alpha/integrations/dramatiq.py |  21 +--
 .../integrations/excepthook.py                |  20 +-
 .../integrations/executing.py                 |   8 +-
 src/sentry_sdk_alpha/integrations/falcon.py   |  23 +--
 src/sentry_sdk_alpha/integrations/fastapi.py  |  20 +-
 src/sentry_sdk_alpha/integrations/flask.py    |  18 +-
 src/sentry_sdk_alpha/integrations/gcp.py      |  26 +--
 .../integrations/gnu_backtrace.py             |   4 +-
 src/sentry_sdk_alpha/integrations/gql.py      |  21 +--
 src/sentry_sdk_alpha/integrations/graphene.py |   4 +-
 .../integrations/grpc/__init__.py             |  23 +--
 .../integrations/grpc/aio/__init__.py         |   2 +-
 .../integrations/grpc/aio/client.py           |  17 +-
 .../integrations/grpc/aio/server.py           |   4 +-
 .../integrations/grpc/client.py               |  19 +-
 .../integrations/grpc/server.py               |  10 +-
 src/sentry_sdk_alpha/integrations/httpx.py    |  16 +-
 src/sentry_sdk_alpha/integrations/huey.py     |  28 +--
 .../integrations/huggingface_hub.py           |  33 +---
 .../integrations/langchain.py                 |  34 ++--
 .../integrations/launchdarkly.py              |   6 +-
 src/sentry_sdk_alpha/integrations/litestar.py |  30 ++-
 src/sentry_sdk_alpha/integrations/logging.py  |  43 ++---
 src/sentry_sdk_alpha/integrations/loguru.py   |  17 +-
 src/sentry_sdk_alpha/integrations/modules.py  |   5 +-
 src/sentry_sdk_alpha/integrations/openai.py   |  58 ++----
 .../integrations/pure_eval.py                 |  13 +-
 src/sentry_sdk_alpha/integrations/pymongo.py  |  10 +-
 src/sentry_sdk_alpha/integrations/pyramid.py  |  17 +-
 src/sentry_sdk_alpha/integrations/quart.py    |  13 +-
 src/sentry_sdk_alpha/integrations/ray.py      |   4 +-
 .../integrations/redis/__init__.py            |   6 +-
 .../integrations/redis/_async_common.py       |  17 +-
 .../integrations/redis/_sync_common.py        |   8 +-
 .../integrations/redis/modules/caches.py      |   7 +-
 .../integrations/redis/modules/queries.py     |   8 +-
 .../integrations/redis/redis.py               |   7 +-
 .../integrations/redis/redis_cluster.py       |  12 +-
 .../integrations/redis/utils.py               |   8 +-
 src/sentry_sdk_alpha/integrations/rq.py       |  25 +--
 .../integrations/rust_tracing.py              |  35 ++--
 src/sentry_sdk_alpha/integrations/sanic.py    |  42 ++---
 .../integrations/serverless.py                |  11 +-
 src/sentry_sdk_alpha/integrations/socket.py   |   4 +-
 .../integrations/spark/spark_driver.py        |  22 +--
 .../integrations/spark/spark_worker.py        |  18 +-
 .../integrations/sqlalchemy.py                |  14 +-
 .../integrations/starlette.py                 |  52 ++----
 src/sentry_sdk_alpha/integrations/starlite.py |  25 +--
 src/sentry_sdk_alpha/integrations/statsig.py  |   4 +-
 src/sentry_sdk_alpha/integrations/stdlib.py   |  23 +--
 .../integrations/strawberry.py                |  35 ++--
 src/sentry_sdk_alpha/integrations/sys_exit.py |   4 +-
 .../integrations/threading.py                 |  18 +-
 src/sentry_sdk_alpha/integrations/tornado.py  |  33 ++--
 src/sentry_sdk_alpha/integrations/trytond.py  |   9 +-
 src/sentry_sdk_alpha/integrations/typer.py    |  20 +-
 src/sentry_sdk_alpha/integrations/unleash.py  |   2 +-
 src/sentry_sdk_alpha/integrations/wsgi.py     |  37 ++--
 src/sentry_sdk_alpha/monitor.py               |   9 +-
 src/sentry_sdk_alpha/opentelemetry/consts.py  |   2 +-
 .../opentelemetry/contextvars_context.py      |  21 +--
 .../opentelemetry/propagator.py               |  25 +--
 src/sentry_sdk_alpha/opentelemetry/sampler.py |  37 ++--
 src/sentry_sdk_alpha/opentelemetry/scope.py   |  39 ++--
 .../opentelemetry/span_processor.py           |  51 ++----
 src/sentry_sdk_alpha/opentelemetry/tracing.py |   8 +-
 src/sentry_sdk_alpha/opentelemetry/utils.py   |  61 +++----
 src/sentry_sdk_alpha/profiler/__init__.py     |   5 +-
 .../profiler/continuous_profiler.py           |  67 ++-----
 .../profiler/transaction_profiler.py          |  95 +++-------
 src/sentry_sdk_alpha/profiler/utils.py        |  58 +++---
 src/sentry_sdk_alpha/scope.py                 | 103 +++--------
 src/sentry_sdk_alpha/scrubber.py              |  21 +--
 src/sentry_sdk_alpha/serializer.py            |  40 ++--
 src/sentry_sdk_alpha/session.py               |   8 +-
 src/sentry_sdk_alpha/sessions.py              |  13 +-
 src/sentry_sdk_alpha/spotlight.py             |  34 ++--
 src/sentry_sdk_alpha/tracing.py               |  84 +++------
 src/sentry_sdk_alpha/tracing_utils.py         |  59 +++---
 src/sentry_sdk_alpha/transport.py             |  80 +++-----
 src/sentry_sdk_alpha/utils.py                 |  96 ++++------
 src/sentry_sdk_alpha/worker.py                |  14 +-
 132 files changed, 1223 insertions(+), 2202 deletions(-)

diff --git a/src/sentry_sdk_alpha/__init__.py b/src/sentry_sdk_alpha/__init__.py
index 3862499cc9ceea..8c908ce9184d15 100644
--- a/src/sentry_sdk_alpha/__init__.py
+++ b/src/sentry_sdk_alpha/__init__.py
@@ -1,12 +1,10 @@
 # TODO-neel scope switch
 # TODO-neel avoid duplication between api and __init__
-from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope
-from sentry_sdk_alpha.transport import Transport, HttpTransport
-from sentry_sdk_alpha.client import Client
-
 from sentry_sdk_alpha.api import *  # noqa
-
+from sentry_sdk_alpha.client import Client
 from sentry_sdk_alpha.consts import VERSION  # noqa
+from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope
+from sentry_sdk_alpha.transport import HttpTransport, Transport
 
 __all__ = [  # noqa
     "Scope",
diff --git a/src/sentry_sdk_alpha/_compat.py b/src/sentry_sdk_alpha/_compat.py
index 87e7844cda31a2..f43ea04f35b9c3 100644
--- a/src/sentry_sdk_alpha/_compat.py
+++ b/src/sentry_sdk_alpha/_compat.py
@@ -1,10 +1,8 @@
 import sys
-
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import TypeVar
+    from typing import Any, TypeVar
 
     T = TypeVar("T")
 
diff --git a/src/sentry_sdk_alpha/_log_batcher.py b/src/sentry_sdk_alpha/_log_batcher.py
index aa121e22f16325..49f40a882eb9e3 100644
--- a/src/sentry_sdk_alpha/_log_batcher.py
+++ b/src/sentry_sdk_alpha/_log_batcher.py
@@ -1,11 +1,12 @@
 import os
 import random
 import threading
+from collections.abc import Callable
 from datetime import datetime, timezone
-from typing import Optional, List, Callable, TYPE_CHECKING, Any
+from typing import TYPE_CHECKING, Any, List, Optional
 
-from sentry_sdk_alpha.utils import format_timestamp, safe_repr
 from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef
+from sentry_sdk_alpha.utils import format_timestamp, safe_repr
 
 if TYPE_CHECKING:
     from sentry_sdk_alpha._types import Log
@@ -121,9 +122,7 @@ def format_attribute(val):
             "trace_id": log.get("trace_id", "00000000-0000-0000-0000-000000000000"),
             "level": str(log["severity_text"]),
             "body": str(log["body"]),
-            "attributes": {
-                k: format_attribute(v) for (k, v) in log["attributes"].items()
-            },
+            "attributes": {k: format_attribute(v) for (k, v) in log["attributes"].items()},
         }
 
         return res
@@ -131,9 +130,7 @@ def format_attribute(val):
     def _flush(self):
         # type: (...) -> Optional[Envelope]
 
-        envelope = Envelope(
-            headers={"sent_at": format_timestamp(datetime.now(timezone.utc))}
-        )
+        envelope = Envelope(headers={"sent_at": format_timestamp(datetime.now(timezone.utc))})
         with self._lock:
             if len(self._log_buffer) == 0:
                 return None
@@ -148,8 +145,7 @@ def _flush(self):
                     payload=PayloadRef(
                         json={
                             "items": [
-                                self._log_to_transport_format(log)
-                                for log in self._log_buffer
+                                self._log_to_transport_format(log) for log in self._log_buffer
                             ]
                         }
                     ),
diff --git a/src/sentry_sdk_alpha/_queue.py b/src/sentry_sdk_alpha/_queue.py
index a21c86ec0aeb7f..99b3916bb65396 100644
--- a/src/sentry_sdk_alpha/_queue.py
+++ b/src/sentry_sdk_alpha/_queue.py
@@ -72,10 +72,8 @@
 """
 
 import threading
-
 from collections import deque
 from time import time
-
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
diff --git a/src/sentry_sdk_alpha/_types.py b/src/sentry_sdk_alpha/_types.py
index 79260e3431f3cf..efd482ab2d19b3 100644
--- a/src/sentry_sdk_alpha/_types.py
+++ b/src/sentry_sdk_alpha/_types.py
@@ -1,6 +1,5 @@
 from typing import TYPE_CHECKING, TypeVar, Union
 
-
 # Re-exported for compat, since code out there in the wild might use this variable.
 MYPY = TYPE_CHECKING
 
@@ -98,18 +97,10 @@ def substituted_because_contains_sensitive_data(cls):
 
 
 if TYPE_CHECKING:
-    from collections.abc import Container, MutableMapping, Sequence
-
+    from collections.abc import Callable, Container, Mapping, MutableMapping, Sequence
     from datetime import datetime
-
     from types import TracebackType
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import Mapping
-    from typing import Optional
-    from typing import Type
-    from typing_extensions import Literal, TypedDict
+    from typing import Any, Dict, Literal, Optional, Type, TypedDict
 
     class SDKInfo(TypedDict):
         name: str
@@ -119,87 +110,73 @@ class SDKInfo(TypedDict):
     # "critical" is an alias of "fatal" recognized by Relay
     LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]
 
-    Event = TypedDict(
-        "Event",
-        {
-            "breadcrumbs": Annotated[
-                dict[Literal["values"], list[dict[str, Any]]]
-            ],  # TODO: We can expand on this type
-            "check_in_id": str,
-            "contexts": dict[str, dict[str, object]],
-            "dist": str,
-            "duration": Optional[float],
-            "environment": str,
-            "errors": list[dict[str, Any]],  # TODO: We can expand on this type
-            "event_id": str,
-            "exception": dict[
-                Literal["values"], list[dict[str, Any]]
-            ],  # TODO: We can expand on this type
-            "extra": MutableMapping[str, object],
-            "fingerprint": list[str],
-            "level": LogLevelStr,
-            "logentry": Mapping[str, object],
-            "logger": str,
-            "message": str,
-            "modules": dict[str, str],
-            "monitor_config": Mapping[str, object],
-            "monitor_slug": Optional[str],
-            "platform": Literal["python"],
-            "profile": object,  # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
-            "release": str,
-            "request": dict[str, object],
-            "sdk": Mapping[str, object],
-            "server_name": str,
-            "spans": Annotated[list[dict[str, object]]],
-            "stacktrace": dict[
-                str, object
-            ],  # We access this key in the code, but I am unsure whether we ever set it
-            "start_timestamp": datetime,
-            "status": Optional[str],
-            "tags": MutableMapping[
-                str, str
-            ],  # Tags must be less than 200 characters each
-            "threads": dict[
-                Literal["values"], list[dict[str, Any]]
-            ],  # TODO: We can expand on this type
-            "timestamp": Optional[datetime],  # Must be set before sending the event
-            "transaction": str,
-            "transaction_info": Mapping[str, Any],  # TODO: We can expand on this type
-            "type": Literal["check_in", "transaction"],
-            "user": dict[str, object],
-            "_dropped_spans": int,
-        },
-        total=False,
-    )
+    class Event(TypedDict, total=False):
+        breadcrumbs: Annotated[dict[Literal["values"], list[dict[str, Any]]]]
+        # TODO: We can expand on this type
+        check_in_id: str
+        contexts: dict[str, dict[str, object]]
+        dist: str
+        duration: Optional[float]
+        environment: str
+        errors: list[dict[str, Any]]  # TODO: We can expand on this type
+        event_id: str
+        exception: dict[Literal["values"], list[dict[str, Any]]]
+        # TODO: We can expand on this type
+        extra: MutableMapping[str, object]
+        fingerprint: list[str]
+        level: LogLevelStr
+        logentry: Mapping[str, object]
+        logger: str
+        message: str
+        modules: dict[str, str]
+        monitor_config: Mapping[str, object]
+        monitor_slug: Optional[str]
+        platform: Literal["python"]
+        profile: object  # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
+        release: str
+        request: dict[str, object]
+        sdk: Mapping[str, object]
+        server_name: str
+        spans: Annotated[list[dict[str, object]]]
+        stacktrace: dict[str, object]
+        # We access this key in the code, but I am unsure whether we ever set it
+        start_timestamp: datetime
+        status: Optional[str]
+        tags: MutableMapping[str, str]
+        # Tags must be less than 200 characters each
+        threads: dict[Literal["values"], list[dict[str, Any]]]
+        # TODO: We can expand on this type
+        timestamp: Optional[datetime]  # Must be set before sending the event
+        transaction: str
+        transaction_info: Mapping[str, Any]  # TODO: We can expand on this type
+        type: Literal["check_in", "transaction"]
+        user: dict[str, object]
+        _dropped_spans: int
 
     ExcInfo = Union[
-        tuple[Type[BaseException], BaseException, Optional[TracebackType]],
+        tuple[type[BaseException], BaseException, Optional[TracebackType]],
         tuple[None, None, None],
     ]
 
     # TODO: Make a proper type definition for this (PRs welcome!)
-    Hint = Dict[str, Any]
-
-    Log = TypedDict(
-        "Log",
-        {
-            "severity_text": str,
-            "severity_number": int,
-            "body": str,
-            "attributes": dict[str, str | bool | float | int],
-            "time_unix_nano": int,
-            "trace_id": Optional[str],
-        },
-    )
+    Hint = dict[str, Any]
+
+    class Log(TypedDict):
+        severity_text: str
+        severity_number: int
+        body: str
+        attributes: dict[str, str | bool | float | int]
+        time_unix_nano: int
+        trace_id: Optional[str]
 
     # TODO: Make a proper type definition for this (PRs welcome!)
-    Breadcrumb = Dict[str, Any]
+    Breadcrumb = dict[str, Any]
 
     # TODO: Make a proper type definition for this (PRs welcome!)
-    BreadcrumbHint = Dict[str, Any]
+    BreadcrumbHint = dict[str, Any]
 
     # TODO: Make a proper type definition for this (PRs welcome!)
-    SamplingContext = Dict[str, Any]
+    SamplingContext = dict[str, Any]
 
     EventProcessor = Callable[[Event, Hint], Optional[Event]]
     ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
@@ -243,28 +220,18 @@ class SDKInfo(TypedDict):
         "second",  # not supported in Sentry and will result in a warning
     ]
 
-    MonitorConfigSchedule = TypedDict(
-        "MonitorConfigSchedule",
-        {
-            "type": MonitorConfigScheduleType,
-            "value": Union[int, str],
-            "unit": MonitorConfigScheduleUnit,
-        },
-        total=False,
-    )
-
-    MonitorConfig = TypedDict(
-        "MonitorConfig",
-        {
-            "schedule": MonitorConfigSchedule,
-            "timezone": str,
-            "checkin_margin": int,
-            "max_runtime": int,
-            "failure_issue_threshold": int,
-            "recovery_threshold": int,
-        },
-        total=False,
-    )
+    class MonitorConfigSchedule(TypedDict, total=False):
+        type: MonitorConfigScheduleType
+        value: Union[int, str]
+        unit: MonitorConfigScheduleUnit
+
+    class MonitorConfig(TypedDict, total=False):
+        schedule: MonitorConfigSchedule
+        timezone: str
+        checkin_margin: int
+        max_runtime: int
+        failure_issue_threshold: int
+        recovery_threshold: int
 
     HttpStatusCodeRange = Union[int, Container[int]]
 
diff --git a/src/sentry_sdk_alpha/_werkzeug.py b/src/sentry_sdk_alpha/_werkzeug.py
index 0fa3d611f154b4..79dda2f0529f3e 100644
--- a/src/sentry_sdk_alpha/_werkzeug.py
+++ b/src/sentry_sdk_alpha/_werkzeug.py
@@ -35,9 +35,8 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Dict
-    from typing import Iterator
-    from typing import Tuple
+    from collections.abc import Iterator
+    from typing import Dict, Tuple
 
 
 #
diff --git a/src/sentry_sdk_alpha/ai/monitoring.py b/src/sentry_sdk_alpha/ai/monitoring.py
index 7122b45d998129..1f824d9bc3b0ae 100644
--- a/src/sentry_sdk_alpha/ai/monitoring.py
+++ b/src/sentry_sdk_alpha/ai/monitoring.py
@@ -1,16 +1,16 @@
 import inspect
 from functools import wraps
+from typing import TYPE_CHECKING
 
-from sentry_sdk_alpha.consts import SPANDATA
 import sentry_sdk_alpha.utils
 from sentry_sdk_alpha import start_span
+from sentry_sdk_alpha.consts import SPANDATA
 from sentry_sdk_alpha.tracing import Span
 from sentry_sdk_alpha.utils import ContextVar
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Optional, Callable, Any
+    from collections.abc import Callable
+    from typing import Any, Optional
 
 _ai_pipeline_name = ContextVar("ai_pipeline_name", default=None)
 
@@ -34,9 +34,7 @@ def sync_wrapped(*args, **kwargs):
             curr_pipeline = _ai_pipeline_name.get()
             op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline")
 
-            with start_span(
-                name=description, op=op, only_if_parent=True, **span_kwargs
-            ) as span:
+            with start_span(name=description, op=op, only_if_parent=True, **span_kwargs) as span:
                 for k, v in kwargs.pop("sentry_tags", {}).items():
                     span.set_tag(k, v)
                 for k, v in kwargs.pop("sentry_data", {}).items():
@@ -65,9 +63,7 @@ async def async_wrapped(*args, **kwargs):
             curr_pipeline = _ai_pipeline_name.get()
             op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline")
 
-            with start_span(
-                name=description, op=op, only_if_parent=True, **span_kwargs
-            ) as span:
+            with start_span(name=description, op=op, only_if_parent=True, **span_kwargs) as span:
                 for k, v in kwargs.pop("sentry_tags", {}).items():
                     span.set_tag(k, v)
                 for k, v in kwargs.pop("sentry_data", {}).items():
@@ -99,9 +95,7 @@ async def async_wrapped(*args, **kwargs):
     return decorator
 
 
-def record_token_usage(
-    span, prompt_tokens=None, completion_tokens=None, total_tokens=None
-):
+def record_token_usage(span, prompt_tokens=None, completion_tokens=None, total_tokens=None):
     # type: (Span, Optional[int], Optional[int], Optional[int]) -> None
     ai_pipeline_name = get_ai_pipeline_name()
     if ai_pipeline_name:
@@ -110,11 +104,7 @@ def record_token_usage(
         span.set_attribute(SPANDATA.AI_PROMPT_TOKENS_USED, prompt_tokens)
     if completion_tokens is not None:
         span.set_attribute(SPANDATA.AI_COMPLETION_TOKENS_USED, completion_tokens)
-    if (
-        total_tokens is None
-        and prompt_tokens is not None
-        and completion_tokens is not None
-    ):
+    if total_tokens is None and prompt_tokens is not None and completion_tokens is not None:
         total_tokens = prompt_tokens + completion_tokens
     if total_tokens is not None:
         span.set_attribute(SPANDATA.AI_TOTAL_TOKENS_USED, total_tokens)
diff --git a/src/sentry_sdk_alpha/api.py b/src/sentry_sdk_alpha/api.py
index deb4649ab5a8b7..78eaf237951796 100644
--- a/src/sentry_sdk_alpha/api.py
+++ b/src/sentry_sdk_alpha/api.py
@@ -1,34 +1,25 @@
 import inspect
 from contextlib import contextmanager
+from typing import TYPE_CHECKING
 
-from sentry_sdk_alpha import tracing_utils, Client
+from sentry_sdk_alpha import Client, tracing_utils
 from sentry_sdk_alpha._init_implementation import init
-from sentry_sdk_alpha.tracing import trace
 from sentry_sdk_alpha.crons import monitor
-
-# TODO-neel-potel make 2 scope strategies/impls and switch
-from sentry_sdk_alpha.scope import Scope as BaseScope
+from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope
 from sentry_sdk_alpha.opentelemetry.scope import (
-    PotelScope as Scope,
-    new_scope,
     isolation_scope,
-    use_scope,
+    new_scope,
     use_isolation_scope,
+    use_scope,
 )
 
-
-from typing import TYPE_CHECKING
+# TODO-neel-potel make 2 scope strategies/impls and switch
+from sentry_sdk_alpha.scope import Scope as BaseScope
+from sentry_sdk_alpha.tracing import trace
 
 if TYPE_CHECKING:
-    from collections.abc import Mapping
-
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
-    from typing import Callable
-    from typing import TypeVar
-    from typing import Union
-    from typing import Generator
+    from collections.abc import Callable, Generator, Mapping
+    from typing import Any, Dict, Optional, TypeVar, Union
 
     import sentry_sdk_alpha
 
@@ -74,7 +65,7 @@
 
 def scopemethod(f):
     # type: (F) -> F
-    f.__doc__ = "%s\n\n%s" % (
+    f.__doc__ = "{}\n\n{}".format(
         "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
         inspect.getdoc(getattr(Scope, f.__name__)),
     )
@@ -83,7 +74,7 @@ def scopemethod(f):
 
 def clientmethod(f):
     # type: (F) -> F
-    f.__doc__ = "%s\n\n%s" % (
+    f.__doc__ = "{}\n\n{}".format(
         "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__,
         inspect.getdoc(getattr(Client, f.__name__)),
     )
@@ -157,9 +148,7 @@ def capture_message(
     **scope_kwargs,  # type: Any
 ):
     # type: (...) -> Optional[str]
-    return get_current_scope().capture_message(
-        message, level, scope=scope, **scope_kwargs
-    )
+    return get_current_scope().capture_message(message, level, scope=scope, **scope_kwargs)
 
 
 @scopemethod
diff --git a/src/sentry_sdk_alpha/attachments.py b/src/sentry_sdk_alpha/attachments.py
index 2e0b2ee89432d4..92b379713d70cd 100644
--- a/src/sentry_sdk_alpha/attachments.py
+++ b/src/sentry_sdk_alpha/attachments.py
@@ -1,12 +1,12 @@
-import os
 import mimetypes
+import os
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.envelope import Item, PayloadRef
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Optional, Union, Callable
+    from collections.abc import Callable
+    from typing import Optional, Union
 
 
 class Attachment:
@@ -72,4 +72,4 @@ def to_envelope_item(self):
 
     def __repr__(self):
         # type: () -> str
-        return "" % (self.filename,)
+        return "".format(self.filename)
diff --git a/src/sentry_sdk_alpha/client.py b/src/sentry_sdk_alpha/client.py
index a261a374df26cb..d0667566411882 100644
--- a/src/sentry_sdk_alpha/client.py
+++ b/src/sentry_sdk_alpha/client.py
@@ -1,67 +1,61 @@
 import os
-import uuid
 import random
 import socket
+import uuid
 from collections.abc import Mapping
 from datetime import datetime, timezone
 from importlib import import_module
-from typing import TYPE_CHECKING, List, Dict, cast, overload
+from typing import TYPE_CHECKING, Dict, List, cast, overload
 
 from sentry_sdk_alpha._compat import check_uwsgi_thread_support
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    ContextVar,
-    capture_internal_exceptions,
-    current_stacktrace,
-    env_to_bool,
-    format_timestamp,
-    get_sdk_name,
-    get_type_name,
-    get_default_release,
-    handle_in_app,
-    logger,
-)
-from sentry_sdk_alpha.serializer import serialize
-from sentry_sdk_alpha.tracing import trace
-from sentry_sdk_alpha.transport import BaseHttpTransport, make_transport
 from sentry_sdk_alpha.consts import (
-    SPANDATA,
     DEFAULT_MAX_VALUE_LENGTH,
     DEFAULT_OPTIONS,
+    SPANDATA,
     VERSION,
     ClientConstructor,
 )
+from sentry_sdk_alpha.envelope import Envelope
 from sentry_sdk_alpha.integrations import setup_integrations
 from sentry_sdk_alpha.integrations.dedupe import DedupeIntegration
-from sentry_sdk_alpha.sessions import SessionFlusher
-from sentry_sdk_alpha.envelope import Envelope
-
+from sentry_sdk_alpha.monitor import Monitor
 from sentry_sdk_alpha.profiler.continuous_profiler import setup_continuous_profiler
 from sentry_sdk_alpha.profiler.transaction_profiler import (
-    has_profiling_enabled,
     Profile,
+    has_profiling_enabled,
     setup_profiler,
 )
 from sentry_sdk_alpha.scrubber import EventScrubber
-from sentry_sdk_alpha.monitor import Monitor
+from sentry_sdk_alpha.serializer import serialize
+from sentry_sdk_alpha.sessions import SessionFlusher
 from sentry_sdk_alpha.spotlight import setup_spotlight
+from sentry_sdk_alpha.tracing import trace
+from sentry_sdk_alpha.transport import BaseHttpTransport, make_transport
+from sentry_sdk_alpha.utils import (
+    AnnotatedValue,
+    ContextVar,
+    capture_internal_exceptions,
+    current_stacktrace,
+    env_to_bool,
+    format_timestamp,
+    get_default_release,
+    get_sdk_name,
+    get_type_name,
+    handle_in_app,
+    logger,
+)
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Optional
-    from typing import Sequence
-    from typing import Type
-    from typing import Union
-    from typing import TypeVar
-
-    from sentry_sdk_alpha._types import Event, Hint, SDKInfo, Log
+    from collections.abc import Callable, Sequence
+    from typing import Any, Optional, Type, TypeVar, Union
+
+    from sentry_sdk_alpha._log_batcher import LogBatcher
+    from sentry_sdk_alpha._types import Event, Hint, Log, SDKInfo
     from sentry_sdk_alpha.integrations import Integration
     from sentry_sdk_alpha.scope import Scope
     from sentry_sdk_alpha.session import Session
     from sentry_sdk_alpha.spotlight import SpotlightClient
     from sentry_sdk_alpha.transport import Transport
-    from sentry_sdk_alpha._log_batcher import LogBatcher
 
     I = TypeVar("I", bound=Integration)  # noqa: E741
 
@@ -93,7 +87,7 @@ def _get_options(*args, **kwargs):
 
     for key, value in options.items():
         if key not in rv:
-            raise TypeError("Unknown option %r" % (key,))
+            raise TypeError("Unknown option {!r}".format(key))
 
         rv[key] = value
 
@@ -122,9 +116,7 @@ def _get_options(*args, **kwargs):
 
     if rv["event_scrubber"] is None:
         rv["event_scrubber"] = EventScrubber(
-            send_default_pii=(
-                False if rv["send_default_pii"] is None else rv["send_default_pii"]
-            )
+            send_default_pii=(False if rv["send_default_pii"] is None else rv["send_default_pii"])
         )
 
     if rv["socket_options"] and not isinstance(rv["socket_options"], list):
@@ -147,9 +139,7 @@ class BaseClient:
 
     def __init__(self, options=None):
         # type: (Optional[Dict[str, Any]]) -> None
-        self.options = (
-            options if options is not None else DEFAULT_OPTIONS
-        )  # type: Dict[str, Any]
+        self.options = options if options is not None else DEFAULT_OPTIONS  # type: Dict[str, Any]
 
         self.transport = None  # type: Optional[Transport]
         self.monitor = None  # type: Optional[Monitor]
@@ -248,7 +238,7 @@ class _Client(BaseClient):
 
     def __init__(self, *args, **kwargs):
         # type: (*Any, **Any) -> None
-        super(_Client, self).__init__(options=get_options(*args, **kwargs))
+        super().__init__(options=get_options(*args, **kwargs))
         self._init_impl()
 
     def __getstate__(self):
@@ -349,9 +339,7 @@ def _capture_envelope(envelope):
             self.integrations = setup_integrations(
                 self.options["integrations"],
                 with_defaults=self.options["default_integrations"],
-                with_auto_enabling_integrations=self.options[
-                    "auto_enabling_integrations"
-                ],
+                with_auto_enabling_integrations=self.options["auto_enabling_integrations"],
                 disabled_integrations=self.options["disabled_integrations"],
             )
 
@@ -360,9 +348,7 @@ def _capture_envelope(envelope):
                 spotlight_env_value = os.environ["SENTRY_SPOTLIGHT"]
                 spotlight_config = env_to_bool(spotlight_env_value, strict=True)
                 self.options["spotlight"] = (
-                    spotlight_config
-                    if spotlight_config is not None
-                    else spotlight_env_value
+                    spotlight_config if spotlight_config is not None else spotlight_env_value
                 )
 
             if self.options.get("spotlight"):
@@ -455,7 +441,7 @@ def _prepare_event(
 
         if scope is not None:
             is_transaction = event.get("type") == "transaction"
-            spans_before = len(cast(List[Dict[str, object]], event.get("spans", [])))
+            spans_before = len(cast(list[dict[str, object]], event.get("spans", [])))
             event_ = scope.apply_to_event(event, hint, self.options)
 
             # one of the event/error processors returned None
@@ -474,9 +460,7 @@ def _prepare_event(
                 return None
 
             event = event_  # type: Optional[Event]  # type: ignore[no-redef]
-            spans_delta = spans_before - len(
-                cast(List[Dict[str, object]], event.get("spans", []))
-            )
+            spans_delta = spans_before - len(cast(list[dict[str, object]], event.get("spans", [])))
             if is_transaction and spans_delta > 0 and self.transport is not None:
                 self.transport.record_lost_event(
                     "event_processor", data_category="span", quantity=spans_delta
@@ -492,9 +476,7 @@ def _prepare_event(
                     if not isinstance(breadcrumbs, AnnotatedValue)
                     else []
                 )
-                previous_total_breadcrumbs = (
-                    len(values) + scope._n_breadcrumbs_truncated
-                )
+                previous_total_breadcrumbs = len(values) + scope._n_breadcrumbs_truncated
 
         if (
             self.options["attach_stacktrace"]
@@ -544,9 +526,7 @@ def _prepare_event(
                 event_scrubber.scrub_event(event)
 
         if previous_total_spans is not None:
-            event["spans"] = AnnotatedValue(
-                event.get("spans", []), {"len": previous_total_spans}
-            )
+            event["spans"] = AnnotatedValue(event.get("spans", []), {"len": previous_total_spans})
         if previous_total_breadcrumbs is not None:
             event["breadcrumbs"] = AnnotatedValue(
                 event.get("breadcrumbs", []), {"len": previous_total_breadcrumbs}
@@ -565,20 +545,14 @@ def _prepare_event(
             )
 
         before_send = self.options["before_send"]
-        if (
-            before_send is not None
-            and event is not None
-            and event.get("type") != "transaction"
-        ):
+        if before_send is not None and event is not None and event.get("type") != "transaction":
             new_event = None  # type: Optional[Event]
             with capture_internal_exceptions():
                 new_event = before_send(event, hint or {})
             if new_event is None:
                 logger.info("before send dropped event")
                 if self.transport:
-                    self.transport.record_lost_event(
-                        "before_send", data_category="error"
-                    )
+                    self.transport.record_lost_event("before_send", data_category="error")
 
                 # If this is an exception, reset the DedupeIntegration. It still
                 # remembers the dropped exception as the last exception, meaning
@@ -596,7 +570,7 @@ def _prepare_event(
             and event.get("type") == "transaction"
         ):
             new_event = None
-            spans_before = len(cast(List[Dict[str, object]], event.get("spans", [])))
+            spans_before = len(cast(list[dict[str, object]], event.get("spans", [])))
             with capture_internal_exceptions():
                 new_event = before_send_transaction(event, hint or {})
             if new_event is None:
@@ -612,7 +586,7 @@ def _prepare_event(
                     )
             else:
                 spans_delta = spans_before - len(
-                    cast(List[Dict[str, object]], new_event.get("spans", []))
+                    cast(list[dict[str, object]], new_event.get("spans", []))
                 )
                 if spans_delta > 0 and self.transport is not None:
                     self.transport.record_lost_event(
@@ -631,7 +605,7 @@ def _is_ignored_error(self, event, hint):
 
         error = exc_info[0]
         error_type_name = get_type_name(exc_info[0])
-        error_full_name = "%s.%s" % (exc_info[0].__module__, error_type_name)
+        error_full_name = "{}.{}".format(exc_info[0].__module__, error_type_name)
 
         for ignored_error in self.options["ignore_errors"]:
             # String types are matched against the type name in the
@@ -794,11 +768,7 @@ def capture_event(
         is_transaction = event_opt.get("type") == "transaction"
         is_checkin = event_opt.get("type") == "check_in"
 
-        if (
-            not is_transaction
-            and not is_checkin
-            and not self._should_sample_error(event, hint)
-        ):
+        if not is_transaction and not is_checkin and not self._should_sample_error(event, hint):
             return None
 
         attachments = hint.get("attachments")
@@ -876,9 +846,7 @@ def _capture_experimental_log(self, current_scope, log):
         # If debug is enabled, log the log to the console
         debug = self.options.get("debug", False)
         if debug:
-            logger.debug(
-                f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}'
-            )
+            logger.debug(f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}')
 
         before_send_log = self.options["_experiments"].get("before_send_log")
         if before_send_log is not None:
@@ -991,7 +959,7 @@ def __exit__(self, exc_type, exc_value, tb):
     # Use `ClientConstructor` to define the argument types of `init` and
     # `Dict[str, Any]` to tell static analyzers about the return type.
 
-    class get_options(ClientConstructor, Dict[str, Any]):  # noqa: N801
+    class get_options(ClientConstructor, dict[str, Any]):  # noqa: N801
         pass
 
     class Client(ClientConstructor, _Client):
diff --git a/src/sentry_sdk_alpha/consts.py b/src/sentry_sdk_alpha/consts.py
index 80b46dd4a796a5..fe76a2adcd8387 100644
--- a/src/sentry_sdk_alpha/consts.py
+++ b/src/sentry_sdk_alpha/consts.py
@@ -1,5 +1,4 @@
 import itertools
-
 from enum import Enum
 from typing import TYPE_CHECKING
 
@@ -27,20 +26,10 @@ class CompressionAlgo(Enum):
 
 
 if TYPE_CHECKING:
-    import sentry_sdk_alpha
-
-    from typing import Optional
-    from typing import Callable
-    from typing import Union
-    from typing import List
-    from typing import Type
-    from typing import Dict
-    from typing import Any
-    from typing import Sequence
-    from typing import Tuple
-    from typing_extensions import Literal
-    from typing_extensions import TypedDict
+    from collections.abc import Callable, Sequence
+    from typing import Any, Dict, List, Literal, Optional, Tuple, Type, TypedDict, Union
 
+    import sentry_sdk_alpha
     from sentry_sdk_alpha._types import (
         BreadcrumbProcessor,
         ContinuousProfilerMode,
@@ -56,24 +45,20 @@ class CompressionAlgo(Enum):
     # functionality. Changing them from the defaults (`None`) in production
     # code is highly discouraged. They are not subject to any stability
     # guarantees such as the ones from semantic versioning.
-    Experiments = TypedDict(
-        "Experiments",
-        {
-            "max_spans": Optional[int],
-            "max_flags": Optional[int],
-            "record_sql_params": Optional[bool],
-            "continuous_profiling_auto_start": Optional[bool],
-            "continuous_profiling_mode": Optional[ContinuousProfilerMode],
-            "otel_powered_performance": Optional[bool],
-            "transport_zlib_compression_level": Optional[int],
-            "transport_compression_level": Optional[int],
-            "transport_compression_algo": Optional[CompressionAlgo],
-            "transport_num_pools": Optional[int],
-            "transport_http2": Optional[bool],
-            "enable_logs": Optional[bool],
-        },
-        total=False,
-    )
+    class Experiments(TypedDict, total=False):
+        max_spans: Optional[int]
+        max_flags: Optional[int]
+        record_sql_params: Optional[bool]
+        continuous_profiling_auto_start: Optional[bool]
+        continuous_profiling_mode: Optional[ContinuousProfilerMode]
+        otel_powered_performance: Optional[bool]
+        transport_zlib_compression_level: Optional[int]
+        transport_compression_level: Optional[int]
+        transport_compression_algo: Optional[CompressionAlgo]
+        transport_num_pools: Optional[int]
+        transport_http2: Optional[bool]
+        enable_logs: Optional[bool]
+
 
 DEFAULT_QUEUE_SIZE = 100
 DEFAULT_MAX_BREADCRUMBS = 100
@@ -525,9 +510,7 @@ class OP:
     MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
     OPENAI_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.openai"
     OPENAI_EMBEDDINGS_CREATE = "ai.embeddings.create.openai"
-    HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE = (
-        "ai.chat_completions.create.huggingface_hub"
-    )
+    HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.huggingface_hub"
     LANGCHAIN_PIPELINE = "ai.pipeline.langchain"
     LANGCHAIN_RUN = "ai.run.langchain"
     LANGCHAIN_TOOL = "ai.tool.langchain"
diff --git a/src/sentry_sdk_alpha/crons/__init__.py b/src/sentry_sdk_alpha/crons/__init__.py
index 24509e63c6d926..067105ca922273 100644
--- a/src/sentry_sdk_alpha/crons/__init__.py
+++ b/src/sentry_sdk_alpha/crons/__init__.py
@@ -2,7 +2,6 @@
 from sentry_sdk_alpha.crons.consts import MonitorStatus
 from sentry_sdk_alpha.crons.decorator import monitor
 
-
 __all__ = [
     "capture_checkin",
     "MonitorStatus",
diff --git a/src/sentry_sdk_alpha/crons/api.py b/src/sentry_sdk_alpha/crons/api.py
index 51427fbbee1862..0ac8d81baf2b14 100644
--- a/src/sentry_sdk_alpha/crons/api.py
+++ b/src/sentry_sdk_alpha/crons/api.py
@@ -1,11 +1,11 @@
 import uuid
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Optional
+
     from sentry_sdk_alpha._types import Event, MonitorConfig
 
 
diff --git a/src/sentry_sdk_alpha/crons/decorator.py b/src/sentry_sdk_alpha/crons/decorator.py
index 2c433b32079b14..986fad2f23a1b9 100644
--- a/src/sentry_sdk_alpha/crons/decorator.py
+++ b/src/sentry_sdk_alpha/crons/decorator.py
@@ -1,25 +1,16 @@
 from functools import wraps
 from inspect import iscoroutinefunction
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.crons import capture_checkin
 from sentry_sdk_alpha.crons.consts import MonitorStatus
 from sentry_sdk_alpha.utils import now
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import Awaitable, Callable
     from types import TracebackType
-    from typing import (
-        Any,
-        Optional,
-        ParamSpec,
-        Type,
-        TypeVar,
-        Union,
-        cast,
-        overload,
-    )
+    from typing import Any, Optional, ParamSpec, Type, TypeVar, Union, cast, overload
+
     from sentry_sdk_alpha._types import MonitorConfig
 
     P = ParamSpec("P")
diff --git a/src/sentry_sdk_alpha/debug.py b/src/sentry_sdk_alpha/debug.py
index 0bf0cfe2e4d109..8f24cb91e2667f 100644
--- a/src/sentry_sdk_alpha/debug.py
+++ b/src/sentry_sdk_alpha/debug.py
@@ -1,10 +1,10 @@
-import sys
 import logging
+import sys
+from logging import LogRecord
 
 from sentry_sdk_alpha import get_client
 from sentry_sdk_alpha.client import _client_init_debug
 from sentry_sdk_alpha.utils import logger
-from logging import LogRecord
 
 
 class _DebugFilter(logging.Filter):
diff --git a/src/sentry_sdk_alpha/envelope.py b/src/sentry_sdk_alpha/envelope.py
index e928c18da42ccf..71669671fd9034 100644
--- a/src/sentry_sdk_alpha/envelope.py
+++ b/src/sentry_sdk_alpha/envelope.py
@@ -1,19 +1,14 @@
 import io
 import json
 import mimetypes
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.session import Session
-from sentry_sdk_alpha.utils import json_dumps, capture_internal_exceptions
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import capture_internal_exceptions, json_dumps
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
-    from typing import Union
-    from typing import Dict
-    from typing import List
-    from typing import Iterator
+    from collections.abc import Iterator
+    from typing import Any, Dict, List, Optional, Union
 
     from sentry_sdk_alpha._types import Event, EventDataCategory
 
@@ -51,7 +46,7 @@ def __init__(
     @property
     def description(self):
         # type: (...) -> str
-        return "envelope with %s items (%s)" % (
+        return "envelope with {} items ({})".format(
             len(self.items),
             ", ".join(x.data_category for x in self.items),
         )
@@ -170,7 +165,7 @@ def deserialize(
 
     def __repr__(self):
         # type: (...) -> str
-        return "" % (self.headers, self.items)
+        return "".format(self.headers, self.items)
 
 
 class PayloadRef:
@@ -212,7 +207,7 @@ def inferred_content_type(self):
 
     def __repr__(self):
         # type: (...) -> str
-        return "" % (self.inferred_content_type,)
+        return "".format(self.inferred_content_type)
 
 
 class Item:
@@ -249,7 +244,7 @@ def __init__(
 
     def __repr__(self):
         # type: (...) -> str
-        return "" % (
+        return "".format(
             self.headers,
             self.payload,
             self.data_category,
diff --git a/src/sentry_sdk_alpha/feature_flags.py b/src/sentry_sdk_alpha/feature_flags.py
index 9ba7d82c53d21a..a54d1fd841391f 100644
--- a/src/sentry_sdk_alpha/feature_flags.py
+++ b/src/sentry_sdk_alpha/feature_flags.py
@@ -1,14 +1,16 @@
 import copy
-import sentry_sdk_alpha
-from sentry_sdk_alpha._lru_cache import LRUCache
 from threading import Lock
-
 from typing import TYPE_CHECKING, Any
 
+import sentry_sdk_alpha
+from sentry_sdk_alpha._lru_cache import LRUCache
+
 if TYPE_CHECKING:
     from typing import TypedDict
 
-    FlagData = TypedDict("FlagData", {"flag": str, "result": bool})
+    class FlagData(TypedDict):
+        flag: str
+        result: bool
 
 
 DEFAULT_FLAG_CAPACITY = 100
@@ -39,9 +41,7 @@ def __deepcopy__(self, memo):
     def get(self):
         # type: () -> list[FlagData]
         with self.lock:
-            return [
-                {"flag": key, "result": value} for key, value in self.__buffer.get_all()
-            ]
+            return [{"flag": key, "result": value} for key, value in self.__buffer.get_all()]
 
     def set(self, flag, result):
         # type: (str, bool) -> None
@@ -50,9 +50,7 @@ def set(self, flag, result):
             # on the lock. This is of course a deadlock. However, this is far outside the expected
             # usage of this class. We guard against it here for completeness and to document this
             # expected failure mode.
-            raise ValueError(
-                "FlagBuffer instances can not be inserted into the dictionary."
-            )
+            raise ValueError("FlagBuffer instances can not be inserted into the dictionary.")
 
         with self.lock:
             self.__buffer.set(flag, result)
diff --git a/src/sentry_sdk_alpha/integrations/__init__.py b/src/sentry_sdk_alpha/integrations/__init__.py
index 30daec17778668..6e06c85b994271 100644
--- a/src/sentry_sdk_alpha/integrations/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/__init__.py
@@ -1,20 +1,12 @@
 from abc import ABC, abstractmethod
 from threading import Lock
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.utils import logger
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from collections.abc import Sequence
-    from typing import Callable
-    from typing import Dict
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Type
-    from typing import Union
+    from collections.abc import Callable, Iterator, Sequence
+    from typing import Dict, List, Optional, Set, Type, Union
 
 
 _DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600))
@@ -50,13 +42,11 @@ def iter_default_integrations(with_auto_enabling_integrations):
                 module, cls = import_string.rsplit(".", 1)
                 yield getattr(import_module(module), cls)
             except (DidNotEnable, SyntaxError) as e:
-                logger.debug(
-                    "Did not import default integration %s: %s", import_string, e
-                )
+                logger.debug("Did not import default integration %s: %s", import_string, e)
 
     if isinstance(iter_default_integrations.__doc__, str):
         for import_string in integrations:
-            iter_default_integrations.__doc__ += "\n- `{}`".format(import_string)
+            iter_default_integrations.__doc__ += f"\n- `{import_string}`"
 
     return iter_default_integrations
 
@@ -180,9 +170,7 @@ def setup_integrations(
     `disabled_integrations` takes precedence over `with_defaults` and
     `with_auto_enabling_integrations`.
     """
-    integrations = dict(
-        (integration.identifier, integration) for integration in integrations or ()
-    )
+    integrations = {integration.identifier: integration for integration in integrations or ()}
 
     logger.debug("Setting up integrations (with default = %s)", with_defaults)
 
@@ -196,9 +184,7 @@ def setup_integrations(
     used_as_default_integration = set()
 
     if with_defaults:
-        for integration_cls in iter_default_integrations(
-            with_auto_enabling_integrations
-        ):
+        for integration_cls in iter_default_integrations(with_auto_enabling_integrations):
             if integration_cls.identifier not in integrations:
                 instance = integration_cls()
                 integrations[instance.identifier] = instance
@@ -210,18 +196,14 @@ def setup_integrations(
                 if type(integration) in disabled_integrations:
                     logger.debug("Ignoring integration %s", identifier)
                 else:
-                    logger.debug(
-                        "Setting up previously not enabled integration %s", identifier
-                    )
+                    logger.debug("Setting up previously not enabled integration %s", identifier)
                     try:
                         type(integration).setup_once()
                     except DidNotEnable as e:
                         if identifier not in used_as_default_integration:
                             raise
 
-                        logger.debug(
-                            "Did not enable default integration %s: %s", identifier, e
-                        )
+                        logger.debug("Did not enable default integration %s: %s", identifier, e)
                     else:
                         _installed_integrations.add(identifier)
 
diff --git a/src/sentry_sdk_alpha/integrations/_asgi_common.py b/src/sentry_sdk_alpha/integrations/_asgi_common.py
index 11dd43b07420fc..8b2b7893e47033 100644
--- a/src/sentry_sdk_alpha/integrations/_asgi_common.py
+++ b/src/sentry_sdk_alpha/integrations/_asgi_common.py
@@ -1,16 +1,11 @@
 import urllib
+from typing import TYPE_CHECKING
 
-from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.integrations._wsgi_common import _filter_headers
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.scope import should_send_default_pii
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
-    from typing import Union
-    from typing_extensions import Literal
+    from typing import Any, Dict, Literal, Optional, Union
 
     from sentry_sdk_alpha.utils import AnnotatedValue
 
@@ -43,14 +38,14 @@ def _get_url(asgi_scope, default_scheme=None, host=None):
     path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
 
     if host:
-        return "%s://%s%s" % (scheme, host, path)
+        return "{}://{}{}".format(scheme, host, path)
 
     if server is not None:
         host, port = server
         default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
         if port != default_port:
-            return "%s://%s:%s%s" % (scheme, host, port, path)
-        return "%s://%s%s" % (scheme, host, path)
+            return "{}://{}:{}{}".format(scheme, host, port, path)
+        return "{}://{}{}".format(scheme, host, path)
     return path
 
 
diff --git a/src/sentry_sdk_alpha/integrations/_wsgi_common.py b/src/sentry_sdk_alpha/integrations/_wsgi_common.py
index 372b23f515b04b..d01ea6aa9b0935 100644
--- a/src/sentry_sdk_alpha/integrations/_wsgi_common.py
+++ b/src/sentry_sdk_alpha/integrations/_wsgi_common.py
@@ -3,7 +3,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE
+from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE, AnnotatedValue
 
 try:
     from django.http.request import RawPostDataException
@@ -13,12 +13,9 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Mapping
-    from typing import MutableMapping
-    from typing import Optional
-    from typing import Union
+    from collections.abc import Mapping, MutableMapping
+    from typing import Any, Dict, Optional, Union
+
     from sentry_sdk_alpha._types import Event
 
 
@@ -33,9 +30,7 @@
     "HTTP_X_REAL_IP",
 )
 
-SENSITIVE_HEADERS = tuple(
-    x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_")
-)
+SENSITIVE_HEADERS = tuple(x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_"))
 
 DEFAULT_HTTP_METHODS_TO_CAPTURE = (
     "CONNECT",
@@ -206,11 +201,7 @@ def env(self):
 def _is_json_content_type(ct):
     # type: (Optional[str]) -> bool
     mt = (ct or "").split(";", 1)[0]
-    return (
-        mt == "application/json"
-        or (mt.startswith("application/"))
-        and mt.endswith("+json")
-    )
+    return mt == "application/json" or (mt.startswith("application/")) and mt.endswith("+json")
 
 
 def _filter_headers(headers):
diff --git a/src/sentry_sdk_alpha/integrations/aiohttp.py b/src/sentry_sdk_alpha/integrations/aiohttp.py
index e12704d803d385..75121b80eac38b 100644
--- a/src/sentry_sdk_alpha/integrations/aiohttp.py
+++ b/src/sentry_sdk_alpha/integrations/aiohttp.py
@@ -4,28 +4,32 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import (
-    OP,
-    SPANSTATUS,
-    SPANDATA,
     BAGGAGE_HEADER_NAME,
+    OP,
     SOURCE_FOR_STYLE,
+    SPANDATA,
+    SPANSTATUS,
     TransactionSource,
 )
 from sentry_sdk_alpha.integrations import (
     _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    _check_minimum_version,
-    Integration,
     DidNotEnable,
+    Integration,
+    _check_minimum_version,
 )
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.sessions import track_session
 from sentry_sdk_alpha.integrations._wsgi_common import (
     _filter_headers,
     _request_headers_to_span_attributes,
     request_body_within_bounds,
 )
+from sentry_sdk_alpha.integrations.logging import ignore_logger
+from sentry_sdk_alpha.sessions import track_session
 from sentry_sdk_alpha.tracing_utils import should_propagate_trace
 from sentry_sdk_alpha.utils import (
+    CONTEXTVARS_ERROR_MESSAGE,
+    HAS_REAL_CONTEXTVARS,
+    SENSITIVE_DATA_SUBSTITUTE,
+    AnnotatedValue,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
@@ -36,17 +40,13 @@
     reraise,
     set_thread_info_from_span,
     transaction_from_function,
-    HAS_REAL_CONTEXTVARS,
-    CONTEXTVARS_ERROR_MESSAGE,
-    SENSITIVE_DATA_SUBSTITUTE,
-    AnnotatedValue,
 )
 
 try:
     import asyncio
 
-    from aiohttp import __version__ as AIOHTTP_VERSION
     from aiohttp import ClientSession, TraceConfig
+    from aiohttp import __version__ as AIOHTTP_VERSION
     from aiohttp.web import Application, HTTPException, UrlDispatcher
 except ImportError:
     raise DidNotEnable("AIOHTTP not installed")
@@ -54,19 +54,16 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from aiohttp.web_request import Request
-    from aiohttp.web_urldispatcher import UrlMappingMatchInfo
-    from aiohttp import TraceRequestStartParams, TraceRequestEndParams
-
     from collections.abc import Set
     from types import SimpleNamespace
-    from typing import Any
-    from typing import Optional
-    from typing import Tuple
-    from typing import Union
+    from typing import Any, Optional, Tuple, Union
+
+    from aiohttp import TraceRequestEndParams, TraceRequestStartParams
+    from aiohttp.web_request import Request
+    from aiohttp.web_urldispatcher import UrlMappingMatchInfo
 
-    from sentry_sdk_alpha.utils import ExcInfo
     from sentry_sdk_alpha._types import Event, EventProcessor
+    from sentry_sdk_alpha.utils import ExcInfo
 
 
 TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
@@ -149,10 +146,7 @@ async def sentry_app_handle(self, request, *args, **kwargs):
                             except HTTPException as e:
                                 span.set_http_status(e.status_code)
 
-                                if (
-                                    e.status_code
-                                    in integration._failed_request_status_codes
-                                ):
+                                if e.status_code in integration._failed_request_status_codes:
                                     _capture_exception()
 
                                 raise
@@ -188,7 +182,7 @@ async def sentry_urldispatcher_resolve(self, request):
                 elif integration.transaction_style == "method_and_path_pattern":
                     route_info = rv.get_info()
                     pattern = route_info.get("path") or route_info.get("formatter")
-                    name = "{} {}".format(request.method, pattern)
+                    name = f"{request.method} {pattern}"
             except Exception:
                 pass
 
@@ -233,8 +227,7 @@ async def on_request_start(session, trace_config_ctx, params):
 
         span = sentry_sdk_alpha.start_span(
             op=OP.HTTP_CLIENT,
-            name="%s %s"
-            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
             origin=AioHttpIntegration.origin,
             only_if_parent=True,
         )
@@ -258,17 +251,13 @@ async def on_request_start(session, trace_config_ctx, params):
             for (
                 key,
                 value,
-            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(
-                span=span
-            ):
+            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(span=span):
                 logger.debug(
                     "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
                         key=key, value=value, url=params.url
                     )
                 )
-                if key == BAGGAGE_HEADER_NAME and params.headers.get(
-                    BAGGAGE_HEADER_NAME
-                ):
+                if key == BAGGAGE_HEADER_NAME and params.headers.get(BAGGAGE_HEADER_NAME):
                     # do not overwrite any existing baggage, just append to it
                     params.headers[key] += "," + value
                 else:
@@ -321,7 +310,7 @@ def aiohttp_processor(
         with capture_internal_exceptions():
             request_info = event.setdefault("request", {})
 
-            request_info["url"] = "%s://%s%s" % (
+            request_info["url"] = "{}://{}{}".format(
                 request.scheme,
                 request.host,
                 request.path,
diff --git a/src/sentry_sdk_alpha/integrations/anthropic.py b/src/sentry_sdk_alpha/integrations/anthropic.py
index ce374cefca5cc4..c95807d509dd0b 100644
--- a/src/sentry_sdk_alpha/integrations/anthropic.py
+++ b/src/sentry_sdk_alpha/integrations/anthropic.py
@@ -4,7 +4,7 @@
 import sentry_sdk_alpha
 from sentry_sdk_alpha.ai.monitoring import record_token_usage
 from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
@@ -21,7 +21,9 @@
     raise DidNotEnable("Anthropic not installed")
 
 if TYPE_CHECKING:
-    from typing import Any, AsyncIterator, Iterator
+    from collections.abc import AsyncIterator, Iterator
+    from typing import Any
+
     from sentry_sdk_alpha.tracing import Span
 
 
@@ -111,9 +113,7 @@ def _collect_ai_data(event, input_tokens, output_tokens, content_blocks):
     return input_tokens, output_tokens, content_blocks
 
 
-def _add_ai_data_to_span(
-    span, integration, input_tokens, output_tokens, content_blocks
-):
+def _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks):
     # type: (Span, AnthropicIntegration, int, int, list[str]) -> None
     """
     Add token usage and content blocks from the AI streaming response to the span.
@@ -167,9 +167,7 @@ def _sentry_patched_create_common(f, *args, **kwargs):
 
         if hasattr(result, "content"):
             if should_send_default_pii() and integration.include_prompts:
-                span.set_attribute(
-                    SPANDATA.AI_RESPONSES, _get_responses(result.content)
-                )
+                span.set_attribute(SPANDATA.AI_RESPONSES, _get_responses(result.content))
             _calculate_token_usage(result, span)
             span.__exit__(None, None, None)
 
@@ -189,9 +187,7 @@ def new_iterator():
                     )
                     yield event
 
-                _add_ai_data_to_span(
-                    span, integration, input_tokens, output_tokens, content_blocks
-                )
+                _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks)
                 span.__exit__(None, None, None)
 
             async def new_iterator_async():
@@ -206,9 +202,7 @@ async def new_iterator_async():
                     )
                     yield event
 
-                _add_ai_data_to_span(
-                    span, integration, input_tokens, output_tokens, content_blocks
-                )
+                _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks)
                 span.__exit__(None, None, None)
 
             if str(type(result._iterator)) == "":
diff --git a/src/sentry_sdk_alpha/integrations/argv.py b/src/sentry_sdk_alpha/integrations/argv.py
index 15c9518b73f73a..c505e05e24500c 100644
--- a/src/sentry_sdk_alpha/integrations/argv.py
+++ b/src/sentry_sdk_alpha/integrations/argv.py
@@ -1,11 +1,10 @@
 import sys
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Optional
 
diff --git a/src/sentry_sdk_alpha/integrations/ariadne.py b/src/sentry_sdk_alpha/integrations/ariadne.py
index ab27e065887605..3ee8db08b90088 100644
--- a/src/sentry_sdk_alpha/integrations/ariadne.py
+++ b/src/sentry_sdk_alpha/integrations/ariadne.py
@@ -1,10 +1,10 @@
 from importlib import import_module
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha import get_client, capture_event
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
-from sentry_sdk_alpha.integrations.logging import ignore_logger
+from sentry_sdk_alpha import capture_event, get_client
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
@@ -24,8 +24,15 @@
 
 if TYPE_CHECKING:
     from typing import Any, Dict, List, Optional
-    from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser  # type: ignore
+
+    from ariadne.types import (  # type: ignore
+        GraphQLError,
+        GraphQLResult,
+        GraphQLSchema,
+        QueryParser,
+    )
     from graphql.language.ast import DocumentNode
+
     from sentry_sdk_alpha._types import Event, EventProcessor
 
 
@@ -122,9 +129,7 @@ def inner(event, hint):
 
         with capture_internal_exceptions():
             try:
-                content_length = int(
-                    (data.get("headers") or {}).get("Content-Length", 0)
-                )
+                content_length = int((data.get("headers") or {}).get("Content-Length", 0))
             except (TypeError, ValueError):
                 return event
 
diff --git a/src/sentry_sdk_alpha/integrations/arq.py b/src/sentry_sdk_alpha/integrations/arq.py
index 7dac247c650464..6cbc85a53a63e5 100644
--- a/src/sentry_sdk_alpha/integrations/arq.py
+++ b/src/sentry_sdk_alpha/integrations/arq.py
@@ -2,23 +2,23 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANSTATUS
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
-    SENSITIVE_DATA_SUBSTITUTE,
     parse_version,
     reraise,
 )
 
 try:
     import arq.worker
-    from arq.version import VERSION as ARQ_VERSION
     from arq.connections import ArqRedis
+    from arq.version import VERSION as ARQ_VERSION
     from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
 except ImportError:
     raise DidNotEnable("Arq is not installed")
@@ -28,13 +28,13 @@
 if TYPE_CHECKING:
     from typing import Any, Dict, Optional, Union
 
-    from sentry_sdk_alpha._types import EventProcessor, Event, ExcInfo, Hint
-
     from arq.cron import CronJob
     from arq.jobs import Job
     from arq.typing import WorkerCoroutine
     from arq.worker import Function
 
+    from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
+
 ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
 
 DEFAULT_TRANSACTION_NAME = "unknown arq task"
@@ -159,12 +159,8 @@ def event_processor(event, hint):
             extra = event.setdefault("extra", {})
             extra["arq-job"] = {
                 "task": ctx["job_name"],
-                "args": (
-                    args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
-                ),
-                "kwargs": (
-                    kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
-                ),
+                "args": (args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
+                "kwargs": (kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
                 "retry": ctx["job_try"],
             }
 
@@ -210,28 +206,22 @@ def _sentry_create_worker(*args, **kwargs):
         if isinstance(settings_cls, dict):
             if "functions" in settings_cls:
                 settings_cls["functions"] = [
-                    _get_arq_function(func)
-                    for func in settings_cls.get("functions", [])
+                    _get_arq_function(func) for func in settings_cls.get("functions", [])
                 ]
             if "cron_jobs" in settings_cls:
                 settings_cls["cron_jobs"] = [
-                    _get_arq_cron_job(cron_job)
-                    for cron_job in settings_cls.get("cron_jobs", [])
+                    _get_arq_cron_job(cron_job) for cron_job in settings_cls.get("cron_jobs", [])
                 ]
 
         if hasattr(settings_cls, "functions"):
-            settings_cls.functions = [
-                _get_arq_function(func) for func in settings_cls.functions
-            ]
+            settings_cls.functions = [_get_arq_function(func) for func in settings_cls.functions]
         if hasattr(settings_cls, "cron_jobs"):
             settings_cls.cron_jobs = [
                 _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs
             ]
 
         if "functions" in kwargs:
-            kwargs["functions"] = [
-                _get_arq_function(func) for func in kwargs.get("functions", [])
-            ]
+            kwargs["functions"] = [_get_arq_function(func) for func in kwargs.get("functions", [])]
         if "cron_jobs" in kwargs:
             kwargs["cron_jobs"] = [
                 _get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", [])
diff --git a/src/sentry_sdk_alpha/integrations/asgi.py b/src/sentry_sdk_alpha/integrations/asgi.py
index aa696992d9b607..537e81e070bc11 100644
--- a/src/sentry_sdk_alpha/integrations/asgi.py
+++ b/src/sentry_sdk_alpha/integrations/asgi.py
@@ -8,10 +8,10 @@
 import inspect
 from copy import deepcopy
 from functools import partial
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
-
 from sentry_sdk_alpha.integrations._asgi_common import (
     _get_headers,
     _get_query,
@@ -24,24 +24,19 @@
 )
 from sentry_sdk_alpha.sessions import track_session
 from sentry_sdk_alpha.utils import (
+    CONTEXTVARS_ERROR_MESSAGE,
+    HAS_REAL_CONTEXTVARS,
     ContextVar,
+    _get_installed_modules,
     capture_internal_exceptions,
     event_from_exception,
-    HAS_REAL_CONTEXTVARS,
-    CONTEXTVARS_ERROR_MESSAGE,
     logger,
     transaction_from_function,
-    _get_installed_modules,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import Optional
-    from typing import Tuple
+    from collections.abc import Callable
+    from typing import Any, Dict, Optional, Tuple
 
     from sentry_sdk_alpha._types import Event, Hint
 
@@ -208,17 +203,11 @@ async def _run_app(self, scope, receive, send, asgi_version):
                         ty == "http" and method in self.http_methods_to_capture
                     )
                     if not should_trace:
-                        return await self._run_original_app(
-                            scope, receive, send, asgi_version
-                        )
+                        return await self._run_original_app(scope, receive, send, asgi_version)
 
                     with sentry_sdk_alpha.continue_trace(_get_headers(scope)):
                         with sentry_sdk_alpha.start_span(
-                            op=(
-                                OP.WEBSOCKET_SERVER
-                                if ty == "websocket"
-                                else OP.HTTP_SERVER
-                            ),
+                            op=(OP.WEBSOCKET_SERVER if ty == "websocket" else OP.HTTP_SERVER),
                             name=transaction_name,
                             source=transaction_source,
                             origin=self.span_origin,
@@ -266,9 +255,7 @@ def event_processor(self, event, hint, asgi_scope):
             ]
         )
         if not already_set:
-            name, source = self._get_transaction_name_and_source(
-                self.transaction_style, asgi_scope
-            )
+            name, source = self._get_transaction_name_and_source(self.transaction_style, asgi_scope)
             event["transaction"] = name
             event["transaction_info"] = {"source": source}
 
diff --git a/src/sentry_sdk_alpha/integrations/asyncio.py b/src/sentry_sdk_alpha/integrations/asyncio.py
index 58e4ffe0c1cf0e..f7583136ca375f 100644
--- a/src/sentry_sdk_alpha/integrations/asyncio.py
+++ b/src/sentry_sdk_alpha/integrations/asyncio.py
@@ -2,7 +2,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.utils import event_from_exception, logger, reraise
 
 try:
@@ -11,11 +11,11 @@
 except ImportError:
     raise DidNotEnable("asyncio not available")
 
-from typing import cast, TYPE_CHECKING
+from typing import TYPE_CHECKING, cast
 
 if TYPE_CHECKING:
-    from typing import Any
     from collections.abc import Coroutine
+    from typing import Any
 
     from sentry_sdk_alpha._types import ExcInfo
 
@@ -61,9 +61,7 @@ async def _task_with_sentry_span_creation():
 
             # Trying to use user set task factory (if there is one)
             if orig_task_factory:
-                task = orig_task_factory(
-                    loop, _task_with_sentry_span_creation(), **kwargs
-                )
+                task = orig_task_factory(loop, _task_with_sentry_span_creation(), **kwargs)
 
             if task is None:
                 # The default task factory in `asyncio` does not have its own function
@@ -79,9 +77,7 @@ async def _task_with_sentry_span_creation():
 
             # Set the task name to include the original coroutine's name
             try:
-                cast("asyncio.Task[Any]", task).set_name(
-                    f"{get_name(coro)} (Sentry-wrapped)"
-                )
+                cast("asyncio.Task[Any]", task).set_name(f"{get_name(coro)} (Sentry-wrapped)")
             except AttributeError:
                 # set_name might not be available in all Python versions
                 pass
diff --git a/src/sentry_sdk_alpha/integrations/asyncpg.py b/src/sentry_sdk_alpha/integrations/asyncpg.py
index 440e4ca029804f..f89956f0e2563b 100644
--- a/src/sentry_sdk_alpha/integrations/asyncpg.py
+++ b/src/sentry_sdk_alpha/integrations/asyncpg.py
@@ -1,17 +1,19 @@
 from __future__ import annotations
+
 import contextlib
-from typing import Any, TypeVar, Callable, Awaitable, Iterator, Optional
+from collections.abc import Awaitable, Callable, Iterator
+from typing import Any, Optional, TypeVar
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.tracing import Span
 from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
 from sentry_sdk_alpha.utils import (
     _serialize_span_attribute,
+    capture_internal_exceptions,
     ensure_integration_enabled,
     parse_version,
-    capture_internal_exceptions,
 )
 
 try:
@@ -39,9 +41,7 @@ def setup_once() -> None:
         asyncpg.Connection.execute = _wrap_execute(
             asyncpg.Connection.execute,
         )
-        asyncpg.Connection._execute = _wrap_connection_method(
-            asyncpg.Connection._execute
-        )
+        asyncpg.Connection._execute = _wrap_connection_method(asyncpg.Connection._execute)
         asyncpg.Connection._executemany = _wrap_connection_method(
             asyncpg.Connection._executemany, executemany=True
         )
@@ -176,9 +176,7 @@ async def _inner(*args: Any, **kwargs: Any) -> T:
             _set_on_span(span, data)
 
             with capture_internal_exceptions():
-                sentry_sdk_alpha.add_breadcrumb(
-                    message="connect", category="query", data=data
-                )
+                sentry_sdk_alpha.add_breadcrumb(message="connect", category="query", data=data)
 
             res = await f(*args, **kwargs)
 
@@ -189,9 +187,9 @@ async def _inner(*args: Any, **kwargs: Any) -> T:
 
 def _get_db_data(
     conn: Any = None,
-    addr: Optional[tuple[str, ...]] = None,
-    database: Optional[str] = None,
-    user: Optional[str] = None,
+    addr: tuple[str, ...] | None = None,
+    database: str | None = None,
+    user: str | None = None,
 ) -> dict[str, str]:
     if conn is not None:
         addr = conn._addr
diff --git a/src/sentry_sdk_alpha/integrations/atexit.py b/src/sentry_sdk_alpha/integrations/atexit.py
index 20fcc05bc13e4b..9afde4d7c75895 100644
--- a/src/sentry_sdk_alpha/integrations/atexit.py
+++ b/src/sentry_sdk_alpha/integrations/atexit.py
@@ -1,15 +1,14 @@
+import atexit
 import os
 import sys
-import atexit
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import logger
 from sentry_sdk_alpha.integrations import Integration
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import logger
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
+    from typing import Any, Optional
 
 
 def default_callback(pending, timeout):
diff --git a/src/sentry_sdk_alpha/integrations/aws_lambda.py b/src/sentry_sdk_alpha/integrations/aws_lambda.py
index e77ff907f55297..5b587a348cfc18 100644
--- a/src/sentry_sdk_alpha/integrations/aws_lambda.py
+++ b/src/sentry_sdk_alpha/integrations/aws_lambda.py
@@ -5,36 +5,33 @@
 from copy import deepcopy
 from datetime import datetime, timedelta, timezone
 from os import environ
+from typing import TYPE_CHECKING
 from urllib.parse import urlencode
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
+from sentry_sdk_alpha.integrations import Integration
+from sentry_sdk_alpha.integrations._wsgi_common import (
+    _filter_headers,
+    _request_headers_to_span_attributes,
+)
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
     AnnotatedValue,
+    TimeoutThread,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
     logger,
-    TimeoutThread,
     reraise,
 )
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    _filter_headers,
-    _request_headers_to_span_attributes,
-)
-
-from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import TypeVar
-    from typing import Callable
-    from typing import Optional
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar
 
-    from sentry_sdk_alpha._types import EventProcessor, Event, Hint
+    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
 
     F = TypeVar("F", bound=Callable[..., Any])
 
@@ -131,26 +128,17 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
             with capture_internal_exceptions():
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(
-                    _make_request_event_processor(
-                        request_data, aws_context, configured_time
-                    )
-                )
-                scope.set_tag(
-                    "aws_region", aws_context.invoked_function_arn.split(":")[3]
+                    _make_request_event_processor(request_data, aws_context, configured_time)
                 )
+                scope.set_tag("aws_region", aws_context.invoked_function_arn.split(":")[3])
                 if batch_size > 1:
                     scope.set_tag("batch_request", True)
                     scope.set_tag("batch_size", batch_size)
 
                 # Starting the Timeout thread only if the configured time is greater than Timeout warning
                 # buffer and timeout_warning parameter is set True.
-                if (
-                    integration.timeout_warning
-                    and configured_time > TIMEOUT_WARNING_BUFFER
-                ):
-                    waiting_time = (
-                        configured_time - TIMEOUT_WARNING_BUFFER
-                    ) / MILLIS_TO_SECONDS
+                if integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER:
+                    waiting_time = (configured_time - TIMEOUT_WARNING_BUFFER) / MILLIS_TO_SECONDS
 
                     timeout_thread = TimeoutThread(
                         waiting_time,
@@ -240,9 +228,7 @@ def sentry_handle_event_request(  # type: ignore
             lambda_runtime_client, request_handler, *args, **kwargs
         ):
             request_handler = _wrap_handler(request_handler)
-            return old_handle_event_request(
-                lambda_runtime_client, request_handler, *args, **kwargs
-            )
+            return old_handle_event_request(lambda_runtime_client, request_handler, *args, **kwargs)
 
         lambda_bootstrap.handle_event_request = sentry_handle_event_request
 
@@ -258,15 +244,11 @@ def inner(*args, **kwargs):
 
             return inner  # type: ignore
 
-        lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = (
-            _wrap_post_function(
-                lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
-            )
+        lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
         )
-        lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = (
-            _wrap_post_function(
-                lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
-            )
+        lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
+            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
         )
 
 
@@ -293,9 +275,7 @@ def get_lambda_bootstrap():
     elif "__main__" in sys.modules:
         module = sys.modules["__main__"]
         # python3.9 runtime
-        if hasattr(module, "awslambdaricmain") and hasattr(
-            module.awslambdaricmain, "bootstrap"
-        ):
+        if hasattr(module, "awslambdaricmain") and hasattr(module.awslambdaricmain, "bootstrap"):
             return module.awslambdaricmain.bootstrap
         elif hasattr(module, "bootstrap"):
             # awslambdaric python module in container builds
@@ -388,8 +368,8 @@ def _get_url(aws_event, aws_context):
     host = headers.get("Host", None)
     proto = headers.get("X-Forwarded-Proto", None)
     if proto and host and path:
-        return "{}://{}{}".format(proto, host, path)
-    return "awslambda:///{}".format(aws_context.function_name)
+        return f"{proto}://{host}{path}"
+    return f"awslambda:///{aws_context.function_name}"
 
 
 def _get_cloudwatch_logs_url(aws_context, start_time):
@@ -416,9 +396,7 @@ def _get_cloudwatch_logs_url(aws_context, start_time):
         log_group=aws_context.log_group_name,
         log_stream=aws_context.log_stream_name,
         start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
-        end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(
-            formatstring
-        ),
+        end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(formatstring),
     )
 
     return url
@@ -463,9 +441,7 @@ def _event_from_error_json(error_json):
                     "type": error_json.get("errorType"),
                     "value": error_json.get("errorMessage"),
                     "stacktrace": {
-                        "frames": _parse_formatted_traceback(
-                            error_json.get("stackTrace", [])
-                        ),
+                        "frames": _parse_formatted_traceback(error_json.get("stackTrace", [])),
                     },
                     "mechanism": {
                         "type": "aws_lambda",
diff --git a/src/sentry_sdk_alpha/integrations/beam.py b/src/sentry_sdk_alpha/integrations/beam.py
index d7ce271d9b5979..63929578c99947 100644
--- a/src/sentry_sdk_alpha/integrations/beam.py
+++ b/src/sentry_sdk_alpha/integrations/beam.py
@@ -1,6 +1,7 @@
 import sys
 import types
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
@@ -12,13 +13,9 @@
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Iterator
-    from typing import TypeVar
-    from typing import Callable
+    from collections.abc import Callable, Iterator
+    from typing import Any, TypeVar
 
     from sentry_sdk_alpha._types import ExcInfo
 
diff --git a/src/sentry_sdk_alpha/integrations/boto3.py b/src/sentry_sdk_alpha/integrations/boto3.py
index 2bc70689af92fa..466e0da371cc92 100644
--- a/src/sentry_sdk_alpha/integrations/boto3.py
+++ b/src/sentry_sdk_alpha/integrations/boto3.py
@@ -1,8 +1,9 @@
 from functools import partial
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
     ensure_integration_enabled,
@@ -10,21 +11,16 @@
     parse_version,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
-    from typing import Type
+    from typing import Any, Dict, Optional, Type
 
     from sentry_sdk_alpha.tracing import Span
 
 try:
     from botocore import __version__ as BOTOCORE_VERSION  # type: ignore
+    from botocore.awsrequest import AWSRequest  # type: ignore
     from botocore.client import BaseClient  # type: ignore
     from botocore.response import StreamingBody  # type: ignore
-    from botocore.awsrequest import AWSRequest  # type: ignore
 except ImportError:
     raise DidNotEnable("botocore is not installed")
 
@@ -59,7 +55,7 @@ def sentry_patched_init(self, *args, **kwargs):
 @ensure_integration_enabled(Boto3Integration)
 def _sentry_request_created(service_id, request, operation_name, **kwargs):
     # type: (str, AWSRequest, str, **Any) -> None
-    description = "aws.%s.%s" % (service_id, operation_name)
+    description = "aws.{}.{}".format(service_id, operation_name)
     span = sentry_sdk_alpha.start_span(
         op=OP.HTTP_CLIENT,
         name=description,
diff --git a/src/sentry_sdk_alpha/integrations/bottle.py b/src/sentry_sdk_alpha/integrations/bottle.py
index 47619bab09b945..1a949833d96c83 100644
--- a/src/sentry_sdk_alpha/integrations/bottle.py
+++ b/src/sentry_sdk_alpha/integrations/bottle.py
@@ -1,7 +1,16 @@
 import functools
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
+from sentry_sdk_alpha.integrations import (
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
+    DidNotEnable,
+    Integration,
+    _check_minimum_version,
+)
+from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
+from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
     ensure_integration_enabled,
@@ -9,37 +18,20 @@
     parse_version,
     transaction_from_function,
 )
-from sentry_sdk_alpha.integrations import (
-    Integration,
-    DidNotEnable,
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    _check_minimum_version,
-)
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
-
-from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from collections.abc import Set
+    from collections.abc import Callable, Set
+    from typing import Any, Dict, Optional
 
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from typing import Any
-    from typing import Dict
-    from typing import Callable
-    from typing import Optional
     from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
 
-    from sentry_sdk_alpha._types import EventProcessor, Event
+    from sentry_sdk_alpha._types import Event, EventProcessor
+    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
 
 try:
-    from bottle import (
-        Bottle,
-        HTTPResponse,
-        Route,
-        request as bottle_request,
-        __version__ as BOTTLE_VERSION,
-    )
+    from bottle import Bottle, HTTPResponse, Route
+    from bottle import __version__ as BOTTLE_VERSION
+    from bottle import request as bottle_request
 except ImportError:
     raise DidNotEnable("Bottle not installed")
 
@@ -184,11 +176,7 @@ def _set_transaction_name_and_source(event, transaction_style, request):
 
     elif transaction_style == "endpoint":
         try:
-            name = (
-                request.route.name
-                or transaction_from_function(request.route.callback)
-                or ""
-            )
+            name = request.route.name or transaction_from_function(request.route.callback) or ""
         except RuntimeError:
             pass
 
diff --git a/src/sentry_sdk_alpha/integrations/celery/__init__.py b/src/sentry_sdk_alpha/integrations/celery/__init__.py
index e1d9a9d93a46e5..07c2faaeb298f5 100644
--- a/src/sentry_sdk_alpha/integrations/celery/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/celery/__init__.py
@@ -1,11 +1,12 @@
 import sys
 from collections.abc import Mapping
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha import isolation_scope
-from sentry_sdk_alpha.consts import OP, SPANSTATUS, SPANDATA, BAGGAGE_HEADER_NAME
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, OP, SPANDATA, SPANSTATUS
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations.celery.beat import (
     _patch_beat_apply_entry,
     _patch_redbeat_maybe_due,
@@ -22,17 +23,11 @@
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import List
-    from typing import Optional
-    from typing import TypeVar
-    from typing import Union
-
-    from sentry_sdk_alpha._types import EventProcessor, Event, Hint, ExcInfo
+    from collections.abc import Callable
+    from typing import Any, List, Optional, TypeVar, Union
+
+    from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
     from sentry_sdk_alpha.tracing import Span
 
     F = TypeVar("F", bound=Callable[..., Any])
@@ -42,12 +37,7 @@
     from celery import VERSION as CELERY_VERSION  # type: ignore
     from celery.app.task import Task  # type: ignore
     from celery.app.trace import task_has_custom
-    from celery.exceptions import (  # type: ignore
-        Ignore,
-        Reject,
-        Retry,
-        SoftTimeLimitExceeded,
-    )
+    from celery.exceptions import Ignore, Reject, Retry, SoftTimeLimitExceeded  # type: ignore
     from kombu import Producer  # type: ignore
 except ImportError:
     raise DidNotEnable("Celery not installed")
@@ -175,16 +165,13 @@ def _update_celery_task_headers(original_headers, span, monitor_beat_tasks):
         if monitor_beat_tasks:
             headers.update(
                 {
-                    "sentry-monitor-start-timestamp-s": "%.9f"
-                    % _now_seconds_since_epoch(),
+                    "sentry-monitor-start-timestamp-s": "%.9f" % _now_seconds_since_epoch(),
                 }
             )
 
         # Add the time the task was enqueued to the headers
         # This is used in the consumer to calculate the latency
-        updated_headers.update(
-            {"sentry-task-enqueued-time": _now_seconds_since_epoch()}
-        )
+        updated_headers.update({"sentry-task-enqueued-time": _now_seconds_since_epoch()})
 
         if headers:
             existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME)
@@ -380,17 +367,13 @@ def _inner(*args, **kwargs):
                         )
 
                 if latency is not None:
-                    span.set_attribute(
-                        SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency
-                    )
+                    span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency)
 
                 with capture_internal_exceptions():
                     span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id)
 
                 with capture_internal_exceptions():
-                    span.set_attribute(
-                        SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries
-                    )
+                    span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries)
 
                 with capture_internal_exceptions():
                     span.set_attribute(
@@ -462,10 +445,7 @@ def sentry_workloop(*args, **kwargs):
             return original_workloop(*args, **kwargs)
         finally:
             with capture_internal_exceptions():
-                if (
-                    sentry_sdk_alpha.get_client().get_integration(CeleryIntegration)
-                    is not None
-                ):
+                if sentry_sdk_alpha.get_client().get_integration(CeleryIntegration) is not None:
                     sentry_sdk_alpha.flush()
 
     Worker.workloop = sentry_workloop
@@ -512,9 +492,7 @@ def sentry_publish(self, *args, **kwargs):
                 span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries)
 
             with capture_internal_exceptions():
-                span.set_attribute(
-                    SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type
-                )
+                span.set_attribute(SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type)
 
             return original_publish(self, *args, **kwargs)
 
diff --git a/src/sentry_sdk_alpha/integrations/celery/beat.py b/src/sentry_sdk_alpha/integrations/celery/beat.py
index 8fa67f448df9ae..cd95195ef9cf04 100644
--- a/src/sentry_sdk_alpha/integrations/celery/beat.py
+++ b/src/sentry_sdk_alpha/integrations/celery/beat.py
@@ -1,20 +1,18 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
-from sentry_sdk_alpha.crons import capture_checkin, MonitorStatus
+from sentry_sdk_alpha.crons import MonitorStatus, capture_checkin
 from sentry_sdk_alpha.integrations import DidNotEnable
 from sentry_sdk_alpha.integrations.celery.utils import (
     _get_humanized_interval,
     _now_seconds_since_epoch,
 )
-from sentry_sdk_alpha.utils import (
-    logger,
-    match_regex_list,
-)
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import logger, match_regex_list
 
 if TYPE_CHECKING:
     from collections.abc import Callable
     from typing import Any, Optional, TypeVar, Union
+
     from sentry_sdk_alpha._types import (
         MonitorConfig,
         MonitorConfigScheduleType,
@@ -25,14 +23,10 @@
 
 
 try:
-    from celery import Task, Celery  # type: ignore
+    from celery import Celery, Task  # type: ignore
     from celery.beat import Scheduler  # type: ignore
     from celery.schedules import crontab, schedule  # type: ignore
-    from celery.signals import (  # type: ignore
-        task_failure,
-        task_success,
-        task_retry,
-    )
+    from celery.signals import task_failure, task_retry, task_success  # type: ignore
 except ImportError:
     raise DidNotEnable("Celery not installed")
 
@@ -74,9 +68,7 @@ def _get_monitor_config(celery_schedule, app, monitor_name):
         )
     elif isinstance(celery_schedule, schedule):
         schedule_type = "interval"
-        (schedule_value, schedule_unit) = _get_humanized_interval(
-            celery_schedule.seconds
-        )
+        (schedule_value, schedule_unit) = _get_humanized_interval(celery_schedule.seconds)
 
         if schedule_unit == "second":
             logger.warning(
@@ -123,9 +115,7 @@ def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration):
 
     monitor_name = schedule_entry.name
 
-    task_should_be_excluded = match_regex_list(
-        monitor_name, integration.exclude_beat_tasks
-    )
+    task_should_be_excluded = match_regex_list(monitor_name, integration.exclude_beat_tasks)
     if task_should_be_excluded:
         return
 
@@ -235,9 +225,7 @@ def crons_task_success(sender, **kwargs):
         monitor_config=monitor_config,
         check_in_id=headers["sentry-monitor-check-in-id"],
         duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s)
-            if start_timestamp_s
-            else None
+            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
         ),
         status=MonitorStatus.OK,
     )
@@ -260,9 +248,7 @@ def crons_task_failure(sender, **kwargs):
         monitor_config=monitor_config,
         check_in_id=headers["sentry-monitor-check-in-id"],
         duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s)
-            if start_timestamp_s
-            else None
+            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
         ),
         status=MonitorStatus.ERROR,
     )
@@ -285,9 +271,7 @@ def crons_task_retry(sender, **kwargs):
         monitor_config=monitor_config,
         check_in_id=headers["sentry-monitor-check-in-id"],
         duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s)
-            if start_timestamp_s
-            else None
+            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
         ),
         status=MonitorStatus.ERROR,
     )
diff --git a/src/sentry_sdk_alpha/integrations/celery/utils.py b/src/sentry_sdk_alpha/integrations/celery/utils.py
index 9da8d118abd418..47265db49511e3 100644
--- a/src/sentry_sdk_alpha/integrations/celery/utils.py
+++ b/src/sentry_sdk_alpha/integrations/celery/utils.py
@@ -3,6 +3,7 @@
 
 if TYPE_CHECKING:
     from typing import Any, Tuple
+
     from sentry_sdk_alpha._types import MonitorConfigScheduleUnit
 
 
diff --git a/src/sentry_sdk_alpha/integrations/chalice.py b/src/sentry_sdk_alpha/integrations/chalice.py
index 6f532e20f8c8a4..b3dd0f3b38f4ed 100644
--- a/src/sentry_sdk_alpha/integrations/chalice.py
+++ b/src/sentry_sdk_alpha/integrations/chalice.py
@@ -2,7 +2,7 @@
 from functools import wraps
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.integrations.aws_lambda import _make_request_event_processor
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
@@ -14,8 +14,8 @@
 
 try:
     import chalice  # type: ignore
-    from chalice import __version__ as CHALICE_VERSION
     from chalice import Chalice, ChaliceViewError
+    from chalice import __version__ as CHALICE_VERSION
     from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
 except ImportError:
     raise DidNotEnable("Chalice is not installed")
@@ -23,10 +23,8 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import TypeVar
-    from typing import Callable
+    from collections.abc import Callable
+    from typing import Any, Dict, TypeVar
 
     F = TypeVar("F", bound=Callable[..., Any])
 
@@ -105,26 +103,20 @@ def setup_once():
         version = parse_version(CHALICE_VERSION)
 
         if version is None:
-            raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
+            raise DidNotEnable(f"Unparsable Chalice version: {CHALICE_VERSION}")
 
         if version < (1, 20):
             old_get_view_function_response = Chalice._get_view_function_response
         else:
             from chalice.app import RestAPIEventHandler
 
-            old_get_view_function_response = (
-                RestAPIEventHandler._get_view_function_response
-            )
+            old_get_view_function_response = RestAPIEventHandler._get_view_function_response
 
         def sentry_event_response(app, view_function, function_args):
             # type: (Any, F, Dict[str, Any]) -> Any
-            wrapped_view_function = _get_view_function_response(
-                app, view_function, function_args
-            )
+            wrapped_view_function = _get_view_function_response(app, view_function, function_args)
 
-            return old_get_view_function_response(
-                app, wrapped_view_function, function_args
-            )
+            return old_get_view_function_response(app, wrapped_view_function, function_args)
 
         if version < (1, 20):
             Chalice._get_view_function_response = sentry_event_response
diff --git a/src/sentry_sdk_alpha/integrations/clickhouse_driver.py b/src/sentry_sdk_alpha/integrations/clickhouse_driver.py
index 76c5f42e2ff056..b5ffed4f14402d 100644
--- a/src/sentry_sdk_alpha/integrations/clickhouse_driver.py
+++ b/src/sentry_sdk_alpha/integrations/clickhouse_driver.py
@@ -1,21 +1,22 @@
+from typing import TYPE_CHECKING, Any, Dict, TypeVar, cast
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
-from sentry_sdk_alpha.tracing import Span
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.scope import should_send_default_pii
+from sentry_sdk_alpha.tracing import Span
 from sentry_sdk_alpha.utils import (
     _serialize_span_attribute,
     capture_internal_exceptions,
     ensure_integration_enabled,
 )
 
-from typing import TYPE_CHECKING, cast, Any, Dict, TypeVar
-
 # Hack to get new Python features working in older versions
 # without introducing a hard dependency on `typing_extensions`
 # from: https://stackoverflow.com/a/71944042/300572
 if TYPE_CHECKING:
-    from typing import ParamSpec, Callable
+    from collections.abc import Callable
+    from typing import ParamSpec
 else:
     # Fake ParamSpec
     class ParamSpec:
@@ -131,9 +132,7 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
             with capture_internal_exceptions():
                 query = data.pop("db.query.text", None)
                 if query:
-                    sentry_sdk_alpha.add_breadcrumb(
-                        message=query, category="query", data=data
-                    )
+                    sentry_sdk_alpha.add_breadcrumb(message=query, category="query", data=data)
 
             span.finish()
 
@@ -160,9 +159,7 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
             _set_on_span(span, data)
 
             if should_send_default_pii():
-                saved_db_data = getattr(
-                    connection, "_sentry_db_data", {}
-                )  # type: dict[str, Any]
+                saved_db_data = getattr(connection, "_sentry_db_data", {})  # type: dict[str, Any]
                 db_params = saved_db_data.get("db.params") or []  # type: list[Any]
                 db_params.extend(db_params_data)
                 saved_db_data["db.params"] = db_params
@@ -173,7 +170,7 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
     return _inner_send_data
 
 
-def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[str, str]:
+def _get_db_data(connection: clickhouse_driver.connection.Connection) -> dict[str, str]:
     return {
         SPANDATA.DB_SYSTEM: "clickhouse",
         SPANDATA.SERVER_ADDRESS: connection.host,
@@ -183,6 +180,6 @@ def _get_db_data(connection: clickhouse_driver.connection.Connection) -> Dict[st
     }
 
 
-def _set_on_span(span: Span, data: Dict[str, Any]) -> None:
+def _set_on_span(span: Span, data: dict[str, Any]) -> None:
     for key, value in data.items():
         span.set_attribute(key, _serialize_span_attribute(value))
diff --git a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
index 1eb9452eb6fb8c..7d40ec80668b1f 100644
--- a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
+++ b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
@@ -1,12 +1,12 @@
 import json
+from typing import TYPE_CHECKING
+
 import urllib3
 
-from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.api import set_context
+from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.utils import logger
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Dict
 
@@ -16,15 +16,11 @@
 HTTP_TIMEOUT = 2.0
 
 AWS_METADATA_HOST = "169.254.169.254"
-AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST)
-AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(
-    AWS_METADATA_HOST
-)
+AWS_TOKEN_URL = f"http://{AWS_METADATA_HOST}/latest/api/token"
+AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(AWS_METADATA_HOST)
 
 GCP_METADATA_HOST = "metadata.google.internal"
-GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(
-    GCP_METADATA_HOST
-)
+GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(GCP_METADATA_HOST)
 
 
 class CLOUD_PROVIDER:  # noqa: N801
@@ -86,9 +82,7 @@ def _is_aws(cls):
             return True
 
         except urllib3.exceptions.TimeoutError:
-            logger.debug(
-                "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
-            )
+            logger.debug("AWS metadata service timed out after %s seconds", HTTP_TIMEOUT)
             return False
         except Exception as e:
             logger.debug("Error checking AWS metadata service: %s", str(e))
@@ -140,9 +134,7 @@ def _get_aws_context(cls):
                 pass
 
         except urllib3.exceptions.TimeoutError:
-            logger.debug(
-                "AWS metadata service timed out after %s seconds", HTTP_TIMEOUT
-            )
+            logger.debug("AWS metadata service timed out after %s seconds", HTTP_TIMEOUT)
         except Exception as e:
             logger.debug("Error fetching AWS metadata: %s", str(e))
 
@@ -165,9 +157,7 @@ def _is_gcp(cls):
             return True
 
         except urllib3.exceptions.TimeoutError:
-            logger.debug(
-                "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
-            )
+            logger.debug("GCP metadata service timed out after %s seconds", HTTP_TIMEOUT)
             return False
         except Exception as e:
             logger.debug("Error checking GCP metadata service: %s", str(e))
@@ -200,17 +190,13 @@ def _get_gcp_context(cls):
                 pass
 
             try:
-                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][
-                    "zone"
-                ].split("/")[-1]
+                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"]["zone"].split("/")[-1]
             except Exception:
                 pass
 
             try:
                 # only populated in google cloud run
-                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[
-                    -1
-                ]
+                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[-1]
             except Exception:
                 pass
 
@@ -220,9 +206,7 @@ def _get_gcp_context(cls):
                 pass
 
         except urllib3.exceptions.TimeoutError:
-            logger.debug(
-                "GCP metadata service timed out after %s seconds", HTTP_TIMEOUT
-            )
+            logger.debug("GCP metadata service timed out after %s seconds", HTTP_TIMEOUT)
         except Exception as e:
             logger.debug("Error fetching GCP metadata: %s", str(e))
 
diff --git a/src/sentry_sdk_alpha/integrations/cohere.py b/src/sentry_sdk_alpha/integrations/cohere.py
index aab650cb20e55c..bbe68150cf040f 100644
--- a/src/sentry_sdk_alpha/integrations/cohere.py
+++ b/src/sentry_sdk_alpha/integrations/cohere.py
@@ -1,28 +1,25 @@
 from functools import wraps
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha import consts
 from sentry_sdk_alpha.ai.monitoring import record_token_usage
-from sentry_sdk_alpha.consts import SPANDATA
 from sentry_sdk_alpha.ai.utils import set_data_normalized
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.consts import SPANDATA
 
 if TYPE_CHECKING:
-    from typing import Any, Callable, Iterator
+    from typing import Any
+    from collections.abc import Callable, Iterator
     from sentry_sdk_alpha.tracing import Span
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.integrations import DidNotEnable, Integration
+from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
 try:
-    from cohere.client import Client
+    from cohere import ChatStreamEndEvent, NonStreamedChatResponse
     from cohere.base_client import BaseCohere
-    from cohere import (
-        ChatStreamEndEvent,
-        NonStreamedChatResponse,
-    )
+    from cohere.client import Client
 
     if TYPE_CHECKING:
         from cohere import StreamedChatResponse
@@ -208,8 +205,7 @@ def new_iterator():
                 collect_chat_response_fields(
                     span,
                     res,
-                    include_pii=should_send_default_pii()
-                    and integration.include_prompts,
+                    include_pii=should_send_default_pii() and integration.include_prompts,
                 )
                 span.__exit__(None, None, None)
             else:
@@ -236,9 +232,7 @@ def new_embed(*args, **kwargs):
             origin=CohereIntegration.origin,
             only_if_parent=True,
         ) as span:
-            if "texts" in kwargs and (
-                should_send_default_pii() and integration.include_prompts
-            ):
+            if "texts" in kwargs and (should_send_default_pii() and integration.include_prompts):
                 if isinstance(kwargs["texts"], str):
                     set_data_normalized(span, SPANDATA.AI_TEXTS, [kwargs["texts"]])
                 elif (
@@ -246,9 +240,7 @@ def new_embed(*args, **kwargs):
                     and len(kwargs["texts"]) > 0
                     and isinstance(kwargs["texts"][0], str)
                 ):
-                    set_data_normalized(
-                        span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"]
-                    )
+                    set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"])
 
             if "model" in kwargs:
                 set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
diff --git a/src/sentry_sdk_alpha/integrations/dedupe.py b/src/sentry_sdk_alpha/integrations/dedupe.py
index d47dc411e888c8..02d890475bed88 100644
--- a/src/sentry_sdk_alpha/integrations/dedupe.py
+++ b/src/sentry_sdk_alpha/integrations/dedupe.py
@@ -1,9 +1,9 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import ContextVar
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import ContextVar
 
 if TYPE_CHECKING:
     from typing import Optional
diff --git a/src/sentry_sdk_alpha/integrations/django/__init__.py b/src/sentry_sdk_alpha/integrations/django/__init__.py
index 86658874986a71..e6c67879f033e0 100644
--- a/src/sentry_sdk_alpha/integrations/django/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/django/__init__.py
@@ -6,35 +6,35 @@
 from importlib import import_module
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA, SOURCE_FOR_STYLE, TransactionSource
+from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, SPANDATA, TransactionSource
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
+from sentry_sdk_alpha.integrations._wsgi_common import (
+    DEFAULT_HTTP_METHODS_TO_CAPTURE,
+    RequestExtractor,
+)
+from sentry_sdk_alpha.integrations.logging import ignore_logger
+from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk_alpha.scope import add_global_event_processor, should_send_default_pii
 from sentry_sdk_alpha.serializer import add_global_repr_processor
 from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
 from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    HAS_REAL_CONTEXTVARS,
     SENSITIVE_DATA_SUBSTITUTE,
-    logger,
+    AnnotatedValue,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
+    logger,
     transaction_from_function,
     walk_exception_chain,
 )
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    RequestExtractor,
-)
 
 try:
     from django import VERSION as DJANGO_VERSION
+    from django.conf import settings
     from django.conf import settings as django_settings
     from django.core import signals
-    from django.conf import settings
 
     try:
         from django.urls import resolve
@@ -55,34 +55,30 @@
 except ImportError:
     raise DidNotEnable("Django not installed")
 
+from typing import TYPE_CHECKING
+
 from sentry_sdk_alpha.integrations.django.caching import patch_caching
-from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER
+from sentry_sdk_alpha.integrations.django.middleware import patch_django_middlewares
+from sentry_sdk_alpha.integrations.django.signals_handlers import patch_signals
 from sentry_sdk_alpha.integrations.django.templates import (
     get_template_frame_from_exception,
     patch_templates,
 )
-from sentry_sdk_alpha.integrations.django.middleware import patch_django_middlewares
-from sentry_sdk_alpha.integrations.django.signals_handlers import patch_signals
+from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER
 from sentry_sdk_alpha.integrations.django.views import patch_views
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import Optional
-    from typing import Union
-    from typing import List
+    from collections.abc import Callable
+    from typing import Any, Dict, List, Optional, Union
 
     from django.core.handlers.wsgi import WSGIRequest
-    from django.http.response import HttpResponse
     from django.http.request import QueryDict
+    from django.http.response import HttpResponse
     from django.utils.datastructures import MultiValueDict
 
-    from sentry_sdk_alpha.tracing import Span
+    from sentry_sdk_alpha._types import Event, EventProcessor, Hint, NotImplementedType
     from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from sentry_sdk_alpha._types import Event, Hint, EventProcessor, NotImplementedType
+    from sentry_sdk_alpha.tracing import Span
 
 
 TRANSACTION_STYLE_VALUES = ("function_name", "url")
@@ -241,7 +237,7 @@ def _django_queryset_repr(value, hint):
             if not isinstance(value, QuerySet) or value._result_cache:
                 return NotImplemented
 
-            return "<%s from %s at 0x%x>" % (
+            return "<{} from {} at 0x{:x}>".format(
                 value.__class__.__name__,
                 value.__module__,
                 id(value),
@@ -308,9 +304,7 @@ def _patch_drf():
                 def sentry_patched_drf_initial(self, request, *args, **kwargs):
                     # type: (APIView, Any, *Any, **Any) -> Any
                     with capture_internal_exceptions():
-                        request._request._sentry_drf_request_backref = weakref.ref(
-                            request
-                        )
+                        request._request._sentry_drf_request_backref = weakref.ref(request)
                         pass
                     return old_drf_initial(self, request, *args, **kwargs)
 
@@ -333,8 +327,7 @@ def _patch_channels():
         # workers in gunicorn+gevent and the websocket stuff in a separate
         # process.
         logger.warning(
-            "We detected that you are using Django channels 2.0."
-            + CONTEXTVARS_ERROR_MESSAGE
+            "We detected that you are using Django channels 2.0." + CONTEXTVARS_ERROR_MESSAGE
         )
 
     from sentry_sdk_alpha.integrations.django.asgi import patch_channels_asgi_handler_impl
@@ -355,9 +348,7 @@ def _patch_django_asgi_handler():
         #
         # We cannot hard-raise here because Django's ASGI stuff may not be used
         # at all.
-        logger.warning(
-            "We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE
-        )
+        logger.warning("We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE)
 
     from sentry_sdk_alpha.integrations.django.asgi import patch_django_asgi_handler_impl
 
@@ -399,9 +390,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
             if isinstance(handler, str):
                 scope.set_transaction_name(handler)
             else:
-                name = transaction_from_function(
-                    getattr(handler, "view_class", handler)
-                )
+                name = transaction_from_function(getattr(handler, "view_class", handler))
                 if isinstance(name, str):
                     scope.set_transaction_name(name)
     except Exception:
@@ -420,9 +409,7 @@ def _before_get_response(request):
     # Rely on WSGI middleware to start a trace
     _set_transaction_name_and_source(scope, integration.transaction_style, request)
 
-    scope.add_event_processor(
-        _make_wsgi_request_event_processor(weakref.ref(request), integration)
-    )
+    scope.add_event_processor(_make_wsgi_request_event_processor(weakref.ref(request), integration))
 
 
 def _attempt_resolve_again(request, scope, transaction_style):
diff --git a/src/sentry_sdk_alpha/integrations/django/asgi.py b/src/sentry_sdk_alpha/integrations/django/asgi.py
index a56f956dbd71af..a0ada4f75607df 100644
--- a/src/sentry_sdk_alpha/integrations/django/asgi.py
+++ b/src/sentry_sdk_alpha/integrations/django/asgi.py
@@ -9,23 +9,19 @@
 import asyncio
 import functools
 import inspect
+from typing import TYPE_CHECKING
 
 from django.core.handlers.wsgi import WSGIRequest
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-
 from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
 from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-)
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
 
 if TYPE_CHECKING:
-    from typing import Any, Callable, Union, TypeVar
+    from collections.abc import Callable
+    from typing import Any, TypeVar, Union
 
     from django.core.handlers.asgi import ASGIRequest
     from django.http.response import HttpResponse
@@ -58,10 +54,7 @@ def asgi_request_event_processor(event, hint):
         # if the request is gone we are fine not logging the data from
         # it.  This might happen if the processor is pushed away to
         # another thread.
-        from sentry_sdk_alpha.integrations.django import (
-            DjangoRequestExtractor,
-            _set_user_info,
-        )
+        from sentry_sdk_alpha.integrations.django import DjangoRequestExtractor, _set_user_info
 
         if request is None:
             return event
diff --git a/src/sentry_sdk_alpha/integrations/django/caching.py b/src/sentry_sdk_alpha/integrations/django/caching.py
index 634418e4631d38..17963943d79f4f 100644
--- a/src/sentry_sdk_alpha/integrations/django/caching.py
+++ b/src/sentry_sdk_alpha/integrations/django/caching.py
@@ -1,23 +1,18 @@
 import functools
 from typing import TYPE_CHECKING
-from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string
-from urllib3.util import parse_url as urlparse
 
 from django import VERSION as DJANGO_VERSION
 from django.core.cache import CacheHandler
+from urllib3.util import parse_url as urlparse
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-)
-
+from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string
+from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Optional
+    from collections.abc import Callable
+    from typing import Any, Optional
 
 
 METHODS_TO_INSTRUMENT = [
@@ -40,9 +35,7 @@ def _patch_cache_method(cache, method_name, address, port):
     original_method = getattr(cache, method_name)
 
     @ensure_integration_enabled(DjangoIntegration, original_method)
-    def _instrument_call(
-        cache, method_name, original_method, args, kwargs, address, port
-    ):
+    def _instrument_call(cache, method_name, original_method, args, kwargs, address, port):
         # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any
         is_set_operation = method_name.startswith("set")
         is_get_operation = not is_set_operation
@@ -93,9 +86,7 @@ def _instrument_call(
     @functools.wraps(original_method)
     def sentry_method(*args, **kwargs):
         # type: (*Any, **Any) -> Any
-        return _instrument_call(
-            cache, method_name, original_method, args, kwargs, address, port
-        )
+        return _instrument_call(cache, method_name, original_method, args, kwargs, address, port)
 
     setattr(cache, method_name, sentry_method)
 
@@ -151,9 +142,7 @@ def sentry_get_item(self, alias):
                 if integration is not None and integration.cache_spans:
                     from django.conf import settings
 
-                    address, port = _get_address_port(
-                        settings.CACHES[alias or "default"]
-                    )
+                    address, port = _get_address_port(settings.CACHES[alias or "default"])
 
                     _patch_cache(cache, address, port)
 
diff --git a/src/sentry_sdk_alpha/integrations/django/middleware.py b/src/sentry_sdk_alpha/integrations/django/middleware.py
index 22ad0cd9b81a30..af0835d8ed434c 100644
--- a/src/sentry_sdk_alpha/integrations/django/middleware.py
+++ b/src/sentry_sdk_alpha/integrations/django/middleware.py
@@ -3,6 +3,7 @@
 """
 
 from functools import wraps
+from typing import TYPE_CHECKING
 
 from django import VERSION as DJANGO_VERSION
 
@@ -10,25 +11,19 @@
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.utils import (
     ContextVar,
-    transaction_from_function,
     capture_internal_exceptions,
+    transaction_from_function,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Optional
-    from typing import TypeVar
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar
 
     from sentry_sdk_alpha.tracing import Span
 
     F = TypeVar("F", bound=Callable[..., Any])
 
-_import_string_should_wrap_middleware = ContextVar(
-    "import_string_should_wrap_middleware"
-)
+_import_string_should_wrap_middleware = ContextVar("import_string_should_wrap_middleware")
 
 DJANGO_SUPPORTS_ASYNC_MIDDLEWARE = DJANGO_VERSION >= (3, 1)
 
@@ -83,7 +78,7 @@ def _check_middleware_span(old_method):
         description = middleware_name
         function_basename = getattr(old_method, "__name__", None)
         if function_basename:
-            description = "{}.{}".format(description, function_basename)
+            description = f"{description}.{function_basename}"
 
         middleware_span = sentry_sdk_alpha.start_span(
             op=OP.MIDDLEWARE_DJANGO,
diff --git a/src/sentry_sdk_alpha/integrations/django/signals_handlers.py b/src/sentry_sdk_alpha/integrations/django/signals_handlers.py
index aeecc673b3088d..8995d7fcb47f8c 100644
--- a/src/sentry_sdk_alpha/integrations/django/signals_handlers.py
+++ b/src/sentry_sdk_alpha/integrations/django/signals_handlers.py
@@ -1,4 +1,5 @@
 from functools import wraps
+from typing import TYPE_CHECKING
 
 from django.dispatch import Signal
 
@@ -6,8 +7,6 @@
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations.django import DJANGO_VERSION
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import Callable
     from typing import Any, Union
@@ -21,9 +20,7 @@ def _get_receiver_name(receiver):
         name = receiver.__qualname__
     elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
         name = receiver.__name__
-    elif hasattr(
-        receiver, "func"
-    ):  # certain functions (like partials) dont have a name
+    elif hasattr(receiver, "func"):  # certain functions (like partials) dont have a name
         if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):
             name = "partial()"
 
@@ -77,11 +74,7 @@ def wrapper(*args, **kwargs):
             return wrapper
 
         integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-        if (
-            integration
-            and integration.signals_spans
-            and self not in integration.signals_denylist
-        ):
+        if integration and integration.signals_spans and self not in integration.signals_denylist:
             for idx, receiver in enumerate(sync_receivers):
                 sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver)
 
diff --git a/src/sentry_sdk_alpha/integrations/django/templates.py b/src/sentry_sdk_alpha/integrations/django/templates.py
index 4bdeffb4facf4c..b42e4b57976f94 100644
--- a/src/sentry_sdk_alpha/integrations/django/templates.py
+++ b/src/sentry_sdk_alpha/integrations/django/templates.py
@@ -1,4 +1,5 @@
 import functools
+from typing import TYPE_CHECKING
 
 from django.template import TemplateSyntaxError
 from django.template.base import Origin
@@ -8,14 +9,9 @@
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.utils import ensure_integration_enabled
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
-    from typing import Iterator
-    from typing import Tuple
+    from collections.abc import Iterator
+    from typing import Any, Dict, Optional, Tuple
 
 
 def get_template_frame_from_exception(exc_value):
@@ -29,9 +25,7 @@ def get_template_frame_from_exception(exc_value):
     # ``django_template_source`` attribute (rather than the legacy
     # ``TemplateSyntaxError.source`` check)
     if hasattr(exc_value, "django_template_source"):
-        return _get_template_frame_from_source(
-            exc_value.django_template_source  # type: ignore
-        )
+        return _get_template_frame_from_source(exc_value.django_template_source)  # type: ignore
 
     if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
         source = exc_value.source
@@ -45,7 +39,7 @@ def _get_template_name_description(template_name):
     # type: (str) -> str
     if isinstance(template_name, (list, tuple)):
         if template_name:
-            return "[{}, ...]".format(template_name[0])
+            return f"[{template_name[0]}, ...]"
     else:
         return template_name
 
@@ -53,6 +47,7 @@ def _get_template_name_description(template_name):
 def patch_templates():
     # type: () -> None
     from django.template.response import SimpleTemplateResponse
+
     from sentry_sdk_alpha.integrations.django import DjangoIntegration
 
     real_rendered_content = SimpleTemplateResponse.rendered_content
diff --git a/src/sentry_sdk_alpha/integrations/django/transactions.py b/src/sentry_sdk_alpha/integrations/django/transactions.py
index 78b972bc375cce..fc2d58cd4dec90 100644
--- a/src/sentry_sdk_alpha/integrations/django/transactions.py
+++ b/src/sentry_sdk_alpha/integrations/django/transactions.py
@@ -6,7 +6,6 @@
 """
 
 import re
-
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
diff --git a/src/sentry_sdk_alpha/integrations/django/views.py b/src/sentry_sdk_alpha/integrations/django/views.py
index 4023f8eb90de89..c9f7874bfbe0c0 100644
--- a/src/sentry_sdk_alpha/integrations/django/views.py
+++ b/src/sentry_sdk_alpha/integrations/django/views.py
@@ -1,10 +1,9 @@
 import functools
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Any
 
@@ -26,6 +25,7 @@ def patch_views():
 
     from django.core.handlers.base import BaseHandler
     from django.template.response import SimpleTemplateResponse
+
     from sentry_sdk_alpha.integrations.django import DjangoIntegration
 
     old_make_view_atomic = BaseHandler.make_view_atomic
diff --git a/src/sentry_sdk_alpha/integrations/dramatiq.py b/src/sentry_sdk_alpha/integrations/dramatiq.py
index 6ee7f32fa537ae..19338f101e955a 100644
--- a/src/sentry_sdk_alpha/integrations/dramatiq.py
+++ b/src/sentry_sdk_alpha/integrations/dramatiq.py
@@ -1,23 +1,20 @@
 import json
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    capture_internal_exceptions,
-    event_from_exception,
-)
+from typing import TYPE_CHECKING
 
 from dramatiq.broker import Broker  # type: ignore
+from dramatiq.errors import Retry  # type: ignore
 from dramatiq.message import Message  # type: ignore
 from dramatiq.middleware import Middleware, default_middleware  # type: ignore
-from dramatiq.errors import Retry  # type: ignore
 
-from typing import TYPE_CHECKING
+import sentry_sdk_alpha
+from sentry_sdk_alpha.integrations import Integration
+from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk_alpha.utils import AnnotatedValue, capture_internal_exceptions, event_from_exception
 
 if TYPE_CHECKING:
-    from typing import Any, Callable, Dict, Optional, Union
+    from collections.abc import Callable
+    from typing import Any, Dict, Optional, Union
+
     from sentry_sdk_alpha._types import Event, Hint
 
 
diff --git a/src/sentry_sdk_alpha/integrations/excepthook.py b/src/sentry_sdk_alpha/integrations/excepthook.py
index 82f02a4ea0a9cb..ac33bbcf28fa38 100644
--- a/src/sentry_sdk_alpha/integrations/excepthook.py
+++ b/src/sentry_sdk_alpha/integrations/excepthook.py
@@ -1,24 +1,17 @@
 import sys
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-)
 from sentry_sdk_alpha.integrations import Integration
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
 if TYPE_CHECKING:
-    from typing import Callable
-    from typing import Any
-    from typing import Type
-    from typing import Optional
-
+    from collections.abc import Callable
     from types import TracebackType
+    from typing import Any, Optional, Type
 
     Excepthook = Callable[
-        [Type[BaseException], BaseException, Optional[TracebackType]],
+        [type[BaseException], BaseException, Optional[TracebackType]],
         Any,
     ]
 
@@ -33,8 +26,7 @@ def __init__(self, always_run=False):
 
         if not isinstance(always_run, bool):
             raise ValueError(
-                "Invalid value for always_run: %s (must be type boolean)"
-                % (always_run,)
+                "Invalid value for always_run: %s (must be type boolean)" % (always_run,)
             )
         self.always_run = always_run
 
diff --git a/src/sentry_sdk_alpha/integrations/executing.py b/src/sentry_sdk_alpha/integrations/executing.py
index 3208ddd03d27b6..f17193e308cbfc 100644
--- a/src/sentry_sdk_alpha/integrations/executing.py
+++ b/src/sentry_sdk_alpha/integrations/executing.py
@@ -1,9 +1,9 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import walk_exception_chain, iter_stacks
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import iter_stacks, walk_exception_chain
 
 if TYPE_CHECKING:
     from typing import Optional
diff --git a/src/sentry_sdk_alpha/integrations/falcon.py b/src/sentry_sdk_alpha/integrations/falcon.py
index 953b30fc41886e..7b5ef0ccd1a101 100644
--- a/src/sentry_sdk_alpha/integrations/falcon.py
+++ b/src/sentry_sdk_alpha/integrations/falcon.py
@@ -1,6 +1,8 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
 from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk_alpha.utils import (
@@ -10,19 +12,14 @@
     parse_version,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
+    from typing import Any, Dict, Optional
 
     from sentry_sdk_alpha._types import Event, EventProcessor
 
 
 try:
     import falcon  # type: ignore
-
     from falcon import __version__ as FALCON_VERSION
 except ImportError:
     raise DidNotEnable("Falcon not installed")
@@ -164,9 +161,7 @@ def sentry_patched_handle_exception(self, *args):
         ex = response = None
         with capture_internal_exceptions():
             ex = next(argument for argument in args if isinstance(argument, Exception))
-            response = next(
-                argument for argument in args if isinstance(argument, falcon.Response)
-            )
+            response = next(argument for argument in args if isinstance(argument, falcon.Response))
 
         was_handled = original_handle_exception(self, *args)
 
@@ -214,12 +209,8 @@ def sentry_patched_prepare_middleware(
 
 def _exception_leads_to_http_5xx(ex, response):
     # type: (Exception, falcon.Response) -> bool
-    is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith(
-        "5"
-    )
-    is_unhandled_error = not isinstance(
-        ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)
-    )
+    is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith("5")
+    is_unhandled_error = not isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
 
     return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response)
 
diff --git a/src/sentry_sdk_alpha/integrations/fastapi.py b/src/sentry_sdk_alpha/integrations/fastapi.py
index 3dcfe22da725f4..9cdb3672418645 100644
--- a/src/sentry_sdk_alpha/integrations/fastapi.py
+++ b/src/sentry_sdk_alpha/integrations/fastapi.py
@@ -1,20 +1,18 @@
 import asyncio
 from copy import deepcopy
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE, TransactionSource
 from sentry_sdk_alpha.integrations import DidNotEnable
 from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    transaction_from_function,
-    logger,
-)
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import logger, transaction_from_function
 
 if TYPE_CHECKING:
-    from typing import Any, Callable, Dict
+    from collections.abc import Callable
+    from typing import Any, Dict
+
     from sentry_sdk_alpha._types import Event
 
 try:
@@ -66,9 +64,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
         source = SOURCE_FOR_STYLE[transaction_style]
 
     scope.set_transaction_name(name, source=source)
-    logger.debug(
-        "[FastAPI] Set transaction name and source on scope: %s / %s", name, source
-    )
+    logger.debug("[FastAPI] Set transaction name and source on scope: %s / %s", name, source)
 
 
 def patch_get_request_handler():
@@ -136,9 +132,7 @@ def event_processor(event, hint):
                 return event_processor
 
             sentry_scope._name = FastApiIntegration.identifier
-            sentry_scope.add_event_processor(
-                _make_request_event_processor(request, integration)
-            )
+            sentry_scope.add_event_processor(_make_request_event_processor(request, integration))
 
             return await old_app(*args, **kwargs)
 
diff --git a/src/sentry_sdk_alpha/integrations/flask.py b/src/sentry_sdk_alpha/integrations/flask.py
index 05112b197bce77..746ca1dbbca080 100644
--- a/src/sentry_sdk_alpha/integrations/flask.py
+++ b/src/sentry_sdk_alpha/integrations/flask.py
@@ -1,6 +1,8 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations._wsgi_common import (
     DEFAULT_HTTP_METHODS_TO_CAPTURE,
     RequestExtractor,
@@ -14,14 +16,14 @@
     package_version,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Callable, Dict, Union
+    from collections.abc import Callable
+    from typing import Any, Dict, Union
+
+    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
 
     from sentry_sdk_alpha._types import Event, EventProcessor
     from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
 
 
 try:
@@ -32,11 +34,7 @@
 try:
     from flask import Flask, Request  # type: ignore
     from flask import request as flask_request
-    from flask.signals import (
-        before_render_template,
-        got_request_exception,
-        request_started,
-    )
+    from flask.signals import before_render_template, got_request_exception, request_started
     from markupsafe import Markup
 except ImportError:
     raise DidNotEnable("Flask is not installed")
diff --git a/src/sentry_sdk_alpha/integrations/gcp.py b/src/sentry_sdk_alpha/integrations/gcp.py
index 7accee3e78d7ba..911b67aeb0480c 100644
--- a/src/sentry_sdk_alpha/integrations/gcp.py
+++ b/src/sentry_sdk_alpha/integrations/gcp.py
@@ -3,6 +3,7 @@
 from copy import deepcopy
 from datetime import datetime, timedelta, timezone
 from os import environ
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
@@ -15,26 +16,22 @@
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
     AnnotatedValue,
+    TimeoutThread,
     capture_internal_exceptions,
     event_from_exception,
     logger,
-    TimeoutThread,
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 # Constants
 TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
 MILLIS_TO_SECONDS = 1000.0
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import TypeVar
-    from typing import Callable
-    from typing import Optional
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar
 
-    from sentry_sdk_alpha._types import EventProcessor, Event, Hint
+    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
 
     F = TypeVar("F", bound=Callable[..., Any])
 
@@ -65,16 +62,11 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
             with capture_internal_exceptions():
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(
-                    _make_request_event_processor(
-                        gcp_event, configured_time, initial_time
-                    )
+                    _make_request_event_processor(gcp_event, configured_time, initial_time)
                 )
                 scope.set_tag("gcp_region", environ.get("FUNCTION_REGION"))
                 timeout_thread = None
-                if (
-                    integration.timeout_warning
-                    and configured_time > TIMEOUT_WARNING_BUFFER
-                ):
+                if integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER:
                     waiting_time = configured_time - TIMEOUT_WARNING_BUFFER
 
                     timeout_thread = TimeoutThread(waiting_time, configured_time)
@@ -128,9 +120,7 @@ def setup_once():
         import __main__ as gcp_functions
 
         if not hasattr(gcp_functions, "worker_v1"):
-            logger.warning(
-                "GcpIntegration currently supports only Python 3.7 runtime environment."
-            )
+            logger.warning("GcpIntegration currently supports only Python 3.7 runtime environment.")
             return
 
         worker1 = gcp_functions.worker_v1
diff --git a/src/sentry_sdk_alpha/integrations/gnu_backtrace.py b/src/sentry_sdk_alpha/integrations/gnu_backtrace.py
index cc6f8cb76ce275..36daacb4303e02 100644
--- a/src/sentry_sdk_alpha/integrations/gnu_backtrace.py
+++ b/src/sentry_sdk_alpha/integrations/gnu_backtrace.py
@@ -1,14 +1,14 @@
 import re
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
 from sentry_sdk_alpha.utils import capture_internal_exceptions
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Any
+
     from sentry_sdk_alpha._types import Event
 
 
diff --git a/src/sentry_sdk_alpha/integrations/gql.py b/src/sentry_sdk_alpha/integrations/gql.py
index dd00900e613174..67960c99c067e1 100644
--- a/src/sentry_sdk_alpha/integrations/gql.py
+++ b/src/sentry_sdk_alpha/integrations/gql.py
@@ -1,23 +1,13 @@
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import (
-    event_from_exception,
-    ensure_integration_enabled,
-    parse_version,
-)
-
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.scope import should_send_default_pii
+from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception, parse_version
 
 try:
     import gql  # type: ignore[import-not-found]
-    from graphql import (
-        print_ast,
-        get_operation_ast,
-        DocumentNode,
-        VariableDefinitionNode,
-    )
-    from gql.transport import Transport, AsyncTransport  # type: ignore[import-not-found]
+    from gql.transport import AsyncTransport, Transport  # type: ignore[import-not-found]
     from gql.transport.exceptions import TransportQueryError  # type: ignore[import-not-found]
+    from graphql import DocumentNode, VariableDefinitionNode, get_operation_ast, print_ast
 except ImportError:
     raise DidNotEnable("gql is not installed")
 
@@ -25,9 +15,10 @@
 
 if TYPE_CHECKING:
     from typing import Any, Dict, Tuple, Union
+
     from sentry_sdk_alpha._types import Event, EventProcessor
 
-    EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]]
+    EventDataType = dict[str, Union[str, tuple[VariableDefinitionNode, ...]]]
 
 
 class GQLIntegration(Integration):
diff --git a/src/sentry_sdk_alpha/integrations/graphene.py b/src/sentry_sdk_alpha/integrations/graphene.py
index 4bbb4005d718f9..af5ed6e88e2cc5 100644
--- a/src/sentry_sdk_alpha/integrations/graphene.py
+++ b/src/sentry_sdk_alpha/integrations/graphene.py
@@ -2,7 +2,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
@@ -21,9 +21,11 @@
 if TYPE_CHECKING:
     from collections.abc import Generator
     from typing import Any, Dict, Union
+
     from graphene.language.source import Source  # type: ignore
     from graphql.execution import ExecutionResult
     from graphql.type import GraphQLSchema
+
     from sentry_sdk_alpha._types import Event
 
 
diff --git a/src/sentry_sdk_alpha/integrations/grpc/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/__init__.py
index 583bf71f80219b..a5b00477bd6539 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/__init__.py
@@ -1,4 +1,6 @@
+from collections.abc import Sequence
 from functools import wraps
+from typing import TYPE_CHECKING, Any, Optional
 
 import grpc
 from grpc import Channel, Server, intercept_channel
@@ -7,23 +9,18 @@
 
 from sentry_sdk_alpha.integrations import Integration
 
+from .aio.client import SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor
+from .aio.client import SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor
+from .aio.server import ServerInterceptor as AsyncServerInterceptor
 from .client import ClientInterceptor
 from .server import ServerInterceptor
-from .aio.server import ServerInterceptor as AsyncServerInterceptor
-from .aio.client import (
-    SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor,
-)
-from .aio.client import (
-    SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor,
-)
-
-from typing import TYPE_CHECKING, Any, Optional, Sequence
 
 # Hack to get new Python features working in older versions
 # without introducing a hard dependency on `typing_extensions`
 # from: https://stackoverflow.com/a/71944042/300572
 if TYPE_CHECKING:
-    from typing import ParamSpec, Callable
+    from collections.abc import Callable
+    from typing import ParamSpec
 else:
     # Fake ParamSpec
     class ParamSpec:
@@ -83,7 +80,7 @@ def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncCha
     @wraps(func)
     def patched_channel(  # type: ignore
         *args: P.args,
-        interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None,
+        interceptors: Sequence[grpc.aio.ClientInterceptor] | None = None,
         **kwargs: P.kwargs,
     ) -> Channel:
         sentry_interceptors = [
@@ -102,7 +99,7 @@ def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]:
     @wraps(func)
     def patched_server(  # type: ignore
         *args: P.args,
-        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        interceptors: Sequence[grpc.ServerInterceptor] | None = None,
         **kwargs: P.kwargs,
     ) -> Server:
         interceptors = [
@@ -123,7 +120,7 @@ def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServe
     @wraps(func)
     def patched_aio_server(  # type: ignore
         *args: P.args,
-        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        interceptors: Sequence[grpc.ServerInterceptor] | None = None,
         **kwargs: P.kwargs,
     ) -> Server:
         server_interceptor = AsyncServerInterceptor()
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
index 5b9e3b99495673..4d218152548e67 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
@@ -1,5 +1,5 @@
-from .server import ServerInterceptor
 from .client import ClientInterceptor
+from .server import ServerInterceptor
 
 __all__ = [
     "ClientInterceptor",
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/client.py b/src/sentry_sdk_alpha/integrations/grpc/aio/client.py
index 47c2084a3e5316..34617a4c76d63d 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/client.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/aio/client.py
@@ -1,14 +1,15 @@
-from typing import Callable, Union, AsyncIterable, Any
+from collections.abc import AsyncIterable, Callable
+from typing import Any, Union
 
+from google.protobuf.message import Message
 from grpc.aio import (
-    UnaryUnaryClientInterceptor,
-    UnaryStreamClientInterceptor,
     ClientCallDetails,
-    UnaryUnaryCall,
-    UnaryStreamCall,
     Metadata,
+    UnaryStreamCall,
+    UnaryStreamClientInterceptor,
+    UnaryUnaryCall,
+    UnaryUnaryClientInterceptor,
 )
-from google.protobuf.message import Message
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
@@ -42,7 +43,7 @@ async def intercept_unary_unary(
         continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall],
         client_call_details: ClientCallDetails,
         request: Message,
-    ) -> Union[UnaryUnaryCall, Message]:
+    ) -> UnaryUnaryCall | Message:
         method = client_call_details.method
         if isinstance(method, bytes):
             method = method.decode()
@@ -75,7 +76,7 @@ async def intercept_unary_stream(
         continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall],
         client_call_details: ClientCallDetails,
         request: Message,
-    ) -> Union[AsyncIterable[Any], UnaryStreamCall]:
+    ) -> AsyncIterable[Any] | UnaryStreamCall:
         method = client_call_details.method
         if isinstance(method, bytes):
             method = method.decode()
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/server.py b/src/sentry_sdk_alpha/integrations/grpc/aio/server.py
index 4dc06777ba3a12..8daf0a5f74e6cf 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/server.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/aio/server.py
@@ -1,3 +1,5 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations import DidNotEnable
@@ -5,8 +7,6 @@
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import event_from_exception
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import Awaitable, Callable
     from typing import Any, Optional
diff --git a/src/sentry_sdk_alpha/integrations/grpc/client.py b/src/sentry_sdk_alpha/integrations/grpc/client.py
index 1198613d719e84..130a5c06b2afb7 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/client.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/client.py
@@ -1,19 +1,20 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations import DidNotEnable
 from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Callable, Iterator, Iterable, Union
+    from collections.abc import Callable, Iterable, Iterator
+    from typing import Any, Union
 
 try:
     import grpc
-    from grpc import ClientCallDetails, Call
+    from google.protobuf.message import Message
+    from grpc import Call, ClientCallDetails
     from grpc._interceptor import _UnaryOutcome
     from grpc.aio._interceptor import UnaryStreamCall
-    from google.protobuf.message import Message
 except ImportError:
     raise DidNotEnable("grpcio is not installed")
 
@@ -62,9 +63,7 @@ def intercept_unary_stream(self, continuation, client_call_details, request):
                 client_call_details
             )
 
-            response = continuation(
-                client_call_details, request
-            )  # type: UnaryStreamCall
+            response = continuation(client_call_details, request)  # type: UnaryStreamCall
             # Setting code on unary-stream leads to execution getting stuck
             # span.set_attribute("code", response.code().name)
 
@@ -73,9 +72,7 @@ def intercept_unary_stream(self, continuation, client_call_details, request):
     @staticmethod
     def _update_client_call_details_metadata_from_scope(client_call_details):
         # type: (ClientCallDetails) -> ClientCallDetails
-        metadata = (
-            list(client_call_details.metadata) if client_call_details.metadata else []
-        )
+        metadata = list(client_call_details.metadata) if client_call_details.metadata else []
         for (
             key,
             value,
diff --git a/src/sentry_sdk_alpha/integrations/grpc/server.py b/src/sentry_sdk_alpha/integrations/grpc/server.py
index 4467ea3075b180..ad91bb077c70cb 100644
--- a/src/sentry_sdk_alpha/integrations/grpc/server.py
+++ b/src/sentry_sdk_alpha/integrations/grpc/server.py
@@ -1,18 +1,20 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations import DidNotEnable
 from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
 from sentry_sdk_alpha.tracing import TransactionSource
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Callable, Optional
+    from collections.abc import Callable
+    from typing import Optional
+
     from google.protobuf.message import Message
 
 try:
     import grpc
-    from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler
+    from grpc import HandlerCallDetails, RpcMethodHandler, ServicerContext
 except ImportError:
     raise DidNotEnable("grpcio is not installed")
 
diff --git a/src/sentry_sdk_alpha/integrations/httpx.py b/src/sentry_sdk_alpha/integrations/httpx.py
index 68466ba00908eb..cd522348b5f173 100644
--- a/src/sentry_sdk_alpha/integrations/httpx.py
+++ b/src/sentry_sdk_alpha/integrations/httpx.py
@@ -1,6 +1,8 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA, BAGGAGE_HEADER_NAME
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, OP, SPANDATA
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.tracing_utils import Baggage, should_propagate_trace
 from sentry_sdk_alpha.utils import (
     SENSITIVE_DATA_SUBSTITUTE,
@@ -12,8 +14,6 @@
     set_thread_info_from_span,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import MutableMapping
     from typing import Any
@@ -156,9 +156,7 @@ async def send(self, request, **kwargs):
                             key=key, value=value, url=request.url
                         )
                     )
-                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
-                        BAGGAGE_HEADER_NAME
-                    ):
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(BAGGAGE_HEADER_NAME):
                         # do not overwrite any existing baggage, just append to it
                         request.headers[key] += "," + value
                     else:
@@ -197,6 +195,4 @@ def _add_sentry_baggage_to_headers(headers, sentry_baggage):
 
     separator = "," if len(stripped_existing_baggage) > 0 else ""
 
-    headers[BAGGAGE_HEADER_NAME] = (
-        stripped_existing_baggage + separator + sentry_baggage
-    )
+    headers[BAGGAGE_HEADER_NAME] = stripped_existing_baggage + separator + sentry_baggage
diff --git a/src/sentry_sdk_alpha/integrations/huey.py b/src/sentry_sdk_alpha/integrations/huey.py
index 4a18aace248a03..46f8b66d8ad793 100644
--- a/src/sentry_sdk_alpha/integrations/huey.py
+++ b/src/sentry_sdk_alpha/integrations/huey.py
@@ -1,37 +1,37 @@
 import sys
 from datetime import datetime
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.api import get_baggage, get_traceparent
 from sentry_sdk_alpha.consts import (
-    OP,
-    SPANSTATUS,
     BAGGAGE_HEADER_NAME,
+    OP,
     SENTRY_TRACE_HEADER_NAME,
+    SPANSTATUS,
     TransactionSource,
 )
 from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
-    SENSITIVE_DATA_SUBSTITUTE,
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Callable, Optional, Union, TypeVar
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar, Union
 
-    from sentry_sdk_alpha._types import EventProcessor, Event, Hint
+    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
     from sentry_sdk_alpha.utils import ExcInfo
 
     F = TypeVar("F", bound=Callable[..., Any])
 
 try:
-    from huey.api import Huey, Result, ResultGroup, Task, PeriodicTask
+    from huey.api import Huey, PeriodicTask, Result, ResultGroup, Task
     from huey.exceptions import CancelExecution, RetryTask, TaskLockedException
 except ImportError:
     raise DidNotEnable("Huey is not installed")
@@ -89,16 +89,8 @@ def event_processor(event, hint):
             extra = event.setdefault("extra", {})
             extra["huey-job"] = {
                 "task": task.name,
-                "args": (
-                    task.args
-                    if should_send_default_pii()
-                    else SENSITIVE_DATA_SUBSTITUTE
-                ),
-                "kwargs": (
-                    task.kwargs
-                    if should_send_default_pii()
-                    else SENSITIVE_DATA_SUBSTITUTE
-                ),
+                "args": (task.args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
+                "kwargs": (task.kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
                 "retry": (task.default_retries or 0) - task.retries,
             }
 
diff --git a/src/sentry_sdk_alpha/integrations/huggingface_hub.py b/src/sentry_sdk_alpha/integrations/huggingface_hub.py
index 2ac1dedb5e50d0..135bfde34ce922 100644
--- a/src/sentry_sdk_alpha/integrations/huggingface_hub.py
+++ b/src/sentry_sdk_alpha/integrations/huggingface_hub.py
@@ -1,23 +1,18 @@
+from collections.abc import Callable, Iterable
 from functools import wraps
+from typing import Any
 
+import sentry_sdk_alpha
 from sentry_sdk_alpha import consts
 from sentry_sdk_alpha.ai.monitoring import record_token_usage
 from sentry_sdk_alpha.ai.utils import set_data_normalized
 from sentry_sdk_alpha.consts import SPANDATA
-
-from typing import Any, Iterable, Callable
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-)
+from sentry_sdk_alpha.scope import should_send_default_pii
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
 try:
     import huggingface_hub.inference._client
-
     from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput
 except ImportError:
     raise DidNotEnable("Huggingface not installed")
@@ -34,10 +29,8 @@ def __init__(self, include_prompts=True):
     @staticmethod
     def setup_once():
         # type: () -> None
-        huggingface_hub.inference._client.InferenceClient.text_generation = (
-            _wrap_text_generation(
-                huggingface_hub.inference._client.InferenceClient.text_generation
-            )
+        huggingface_hub.inference._client.InferenceClient.text_generation = _wrap_text_generation(
+            huggingface_hub.inference._client.InferenceClient.text_generation
         )
 
 
@@ -132,9 +125,7 @@ def new_details_iterator():
                         for x in res:
                             if hasattr(x, "token") and hasattr(x.token, "text"):
                                 data_buf.append(x.token.text)
-                            if hasattr(x, "details") and hasattr(
-                                x.details, "generated_tokens"
-                            ):
+                            if hasattr(x, "details") and hasattr(x.details, "generated_tokens"):
                                 tokens_used = x.details.generated_tokens
                             yield x
                         if (
@@ -142,9 +133,7 @@ def new_details_iterator():
                             and should_send_default_pii()
                             and integration.include_prompts
                         ):
-                            set_data_normalized(
-                                span, SPANDATA.AI_RESPONSES, "".join(data_buf)
-                            )
+                            set_data_normalized(span, SPANDATA.AI_RESPONSES, "".join(data_buf))
                         if tokens_used > 0:
                             record_token_usage(span, total_tokens=tokens_used)
                     span.__exit__(None, None, None)
@@ -166,9 +155,7 @@ def new_iterator():
                             and should_send_default_pii()
                             and integration.include_prompts
                         ):
-                            set_data_normalized(
-                                span, SPANDATA.AI_RESPONSES, "".join(data_buf)
-                            )
+                            set_data_normalized(span, SPANDATA.AI_RESPONSES, "".join(data_buf))
                         span.__exit__(None, None, None)
 
                 return new_iterator()
diff --git a/src/sentry_sdk_alpha/integrations/langchain.py b/src/sentry_sdk_alpha/integrations/langchain.py
index 3accb4c0614933..38cf11d6ebd784 100644
--- a/src/sentry_sdk_alpha/integrations/langchain.py
+++ b/src/sentry_sdk_alpha/integrations/langchain.py
@@ -1,29 +1,26 @@
 from collections import OrderedDict
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.ai.monitoring import set_ai_pipeline_name, record_token_usage
-from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS
+from sentry_sdk_alpha.ai.monitoring import record_token_usage, set_ai_pipeline_name
 from sentry_sdk_alpha.ai.utils import set_data_normalized
+from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.utils import logger, capture_internal_exceptions
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import capture_internal_exceptions, logger
 
 if TYPE_CHECKING:
-    from typing import Any, List, Callable, Dict, Union, Optional
+    from collections.abc import Callable
+    from typing import Any, Dict, List, Optional, Union
     from uuid import UUID
 
 try:
+    from langchain_core.agents import AgentAction, AgentFinish
+    from langchain_core.callbacks import BaseCallbackHandler, manager
     from langchain_core.messages import BaseMessage
     from langchain_core.outputs import LLMResult
-    from langchain_core.callbacks import (
-        manager,
-        BaseCallbackHandler,
-    )
-    from langchain_core.agents import AgentAction, AgentFinish
 except ImportError:
     raise DidNotEnable("langchain not installed")
 
@@ -57,9 +54,7 @@ class LangchainIntegration(Integration):
     # The most number of spans (e.g., LLM calls) that can be processed at the same time.
     max_spans = 1024
 
-    def __init__(
-        self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None
-    ):
+    def __init__(self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None):
         # type: (LangchainIntegration, bool, int, Optional[str]) -> None
         self.include_prompts = include_prompts
         self.max_spans = max_spans
@@ -234,10 +229,7 @@ def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
                 set_data_normalized(
                     span,
                     SPANDATA.AI_INPUT_MESSAGES,
-                    [
-                        [self._normalize_langchain_message(x) for x in list_]
-                        for list_ in messages
-                    ],
+                    [[self._normalize_langchain_message(x) for x in list_] for list_ in messages],
                 )
             for k, v in DATA_FIELDS.items():
                 if k in all_params:
@@ -267,9 +259,7 @@ def on_llm_end(self, response, *, run_id, **kwargs):
             if not run_id:
                 return
 
-            token_usage = (
-                response.llm_output.get("token_usage") if response.llm_output else None
-            )
+            token_usage = response.llm_output.get("token_usage") if response.llm_output else None
 
             span_data = self.span_map[run_id]
             if not span_data:
diff --git a/src/sentry_sdk_alpha/integrations/launchdarkly.py b/src/sentry_sdk_alpha/integrations/launchdarkly.py
index 59390720bfc0fb..424c0605ad16ed 100644
--- a/src/sentry_sdk_alpha/integrations/launchdarkly.py
+++ b/src/sentry_sdk_alpha/integrations/launchdarkly.py
@@ -8,11 +8,11 @@
     from ldclient.hook import Hook, Metadata
 
     if TYPE_CHECKING:
+        from typing import Any
+
         from ldclient import LDClient
-        from ldclient.hook import EvaluationSeriesContext
         from ldclient.evaluation import EvaluationDetail
-
-        from typing import Any
+        from ldclient.hook import EvaluationSeriesContext
 except ImportError:
     raise DidNotEnable("LaunchDarkly is not installed")
 
diff --git a/src/sentry_sdk_alpha/integrations/litestar.py b/src/sentry_sdk_alpha/integrations/litestar.py
index 7993f99ecb4843..81e04eb31f82c5 100644
--- a/src/sentry_sdk_alpha/integrations/litestar.py
+++ b/src/sentry_sdk_alpha/integrations/litestar.py
@@ -1,6 +1,7 @@
 from collections.abc import Set
+
 import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, TransactionSource, SOURCE_FOR_STYLE
+from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
 from sentry_sdk_alpha.integrations import (
     _DEFAULT_FAILED_REQUEST_STATUS_CODES,
     DidNotEnable,
@@ -16,12 +17,12 @@
 )
 
 try:
-    from litestar import Request, Litestar  # type: ignore
+    from litestar import Litestar, Request  # type: ignore
+    from litestar.data_extractors import ConnectionDataExtractor  # type: ignore
+    from litestar.exceptions import HTTPException  # type: ignore
     from litestar.handlers.base import BaseRouteHandler  # type: ignore
     from litestar.middleware import DefineMiddleware  # type: ignore
     from litestar.routes.http import HTTPRoute  # type: ignore
-    from litestar.data_extractors import ConnectionDataExtractor  # type: ignore
-    from litestar.exceptions import HTTPException  # type: ignore
 except ImportError:
     raise DidNotEnable("Litestar is not installed")
 
@@ -29,18 +30,13 @@
 
 if TYPE_CHECKING:
     from typing import Any, Optional, Union
-    from litestar.types.asgi_types import ASGIApp  # type: ignore
-    from litestar.types import (  # type: ignore
-        HTTPReceiveMessage,
-        HTTPScope,
-        Message,
-        Middleware,
-        Receive,
-        Scope as LitestarScope,
-        Send,
-        WebSocketReceiveMessage,
-    )
+
     from litestar.middleware import MiddlewareProtocol
+    from litestar.types import HTTPReceiveMessage, HTTPScope, Message, Middleware, Receive
+    from litestar.types import Scope as LitestarScope  # type: ignore
+    from litestar.types import Send, WebSocketReceiveMessage
+    from litestar.types.asgi_types import ASGIApp  # type: ignore
+
     from sentry_sdk_alpha._types import Event, Hint
 
 _DEFAULT_TRANSACTION_NAME = "generic Litestar request"
@@ -218,9 +214,7 @@ async def handle_wrapper(self, scope, receive, send):
         request = scope["app"].request_class(
             scope=scope, receive=receive, send=send
         )  # type: Request[Any, Any]
-        extracted_request_data = ConnectionDataExtractor(
-            parse_body=True, parse_query=True
-        )(request)
+        extracted_request_data = ConnectionDataExtractor(parse_body=True, parse_query=True)(request)
         body = extracted_request_data.pop("body")
 
         request_data = await body
diff --git a/src/sentry_sdk_alpha/integrations/logging.py b/src/sentry_sdk_alpha/integrations/logging.py
index 350a2209938dc1..577c83f0c6493e 100644
--- a/src/sentry_sdk_alpha/integrations/logging.py
+++ b/src/sentry_sdk_alpha/integrations/logging.py
@@ -2,26 +2,23 @@
 import sys
 from datetime import datetime, timezone
 from fnmatch import fnmatch
+from typing import TYPE_CHECKING, Tuple
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.client import BaseClient
+from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.utils import (
+    capture_internal_exceptions,
+    current_stacktrace,
+    event_from_exception,
     safe_repr,
     to_string,
-    event_from_exception,
-    current_stacktrace,
-    capture_internal_exceptions,
 )
-from sentry_sdk_alpha.integrations import Integration
-
-from typing import TYPE_CHECKING, Tuple
 
 if TYPE_CHECKING:
     from collections.abc import MutableMapping
     from logging import LogRecord
-    from typing import Any
-    from typing import Dict
-    from typing import Optional
+    from typing import Any, Dict, Optional
 
 DEFAULT_LEVEL = logging.INFO
 DEFAULT_EVENT_LEVEL = None  # None means no events are captured
@@ -42,14 +39,12 @@
 #
 # Note: Ignoring by logger name here is better than mucking with thread-locals.
 # We do not necessarily know whether thread-locals work 100% correctly in the user's environment.
-_IGNORED_LOGGERS = set(
-    [
-        "sentry_sdk.errors",
-        "urllib3.connectionpool",
-        "urllib3.connection",
-        "opentelemetry.*",
-    ]
-)
+_IGNORED_LOGGERS = {
+    "sentry_sdk.errors",
+    "urllib3.connectionpool",
+    "urllib3.connection",
+    "opentelemetry.*",
+}
 
 
 def ignore_logger(
@@ -125,9 +120,7 @@ def sentry_patched_callhandlers(self, record):
                 # into a recursion error when the integration is resolved
                 # (this also is slower).
                 if ignored_loggers is not None and record.name not in ignored_loggers:
-                    integration = sentry_sdk_alpha.get_client().get_integration(
-                        LoggingIntegration
-                    )
+                    integration = sentry_sdk_alpha.get_client().get_integration(LoggingIntegration)
                     if integration is not None:
                         integration._handle_record(record)
 
@@ -233,9 +226,7 @@ def _emit(self, record):
                     "values": [
                         {
                             "stacktrace": current_stacktrace(
-                                include_local_variables=client_options[
-                                    "include_local_variables"
-                                ],
+                                include_local_variables=client_options["include_local_variables"],
                                 max_value_length=client_options["max_value_length"],
                             ),
                             "crashed": False,
@@ -254,11 +245,7 @@ def _emit(self, record):
             event["level"] = level  # type: ignore[typeddict-item]
         event["logger"] = record.name
 
-        if (
-            sys.version_info < (3, 11)
-            and record.name == "py.warnings"
-            and record.msg == "%s"
-        ):
+        if sys.version_info < (3, 11) and record.name == "py.warnings" and record.msg == "%s":
             # warnings module on Python 3.10 and below sets record.msg to "%s"
             # and record.args[0] to the actual warning message.
             # This was fixed in https://github.com/python/cpython/pull/30975.
diff --git a/src/sentry_sdk_alpha/integrations/loguru.py b/src/sentry_sdk_alpha/integrations/loguru.py
index 3cbec227e698a4..9f93efc36c1558 100644
--- a/src/sentry_sdk_alpha/integrations/loguru.py
+++ b/src/sentry_sdk_alpha/integrations/loguru.py
@@ -1,17 +1,12 @@
 import enum
-
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
-from sentry_sdk_alpha.integrations.logging import (
-    BreadcrumbHandler,
-    EventHandler,
-    _BaseHandler,
-)
-
 from typing import TYPE_CHECKING
 
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
+from sentry_sdk_alpha.integrations.logging import BreadcrumbHandler, EventHandler, _BaseHandler
+
 if TYPE_CHECKING:
     from logging import LogRecord
-    from typing import Optional, Tuple, Any
+    from typing import Any, Optional, Tuple
 
 try:
     import loguru
@@ -97,9 +92,7 @@ class _LoguruBaseHandler(_BaseHandler):
     def _logging_to_event_level(self, record):
         # type: (LogRecord) -> str
         try:
-            return SENTRY_LEVEL_FROM_LOGURU_LEVEL[
-                LoggingLevels(record.levelno).name
-            ].lower()
+            return SENTRY_LEVEL_FROM_LOGURU_LEVEL[LoggingLevels(record.levelno).name].lower()
         except (ValueError, KeyError):
             return record.levelname.lower() if record.levelname else ""
 
diff --git a/src/sentry_sdk_alpha/integrations/modules.py b/src/sentry_sdk_alpha/integrations/modules.py
index 6109613299d592..fbd5efbd7bfd83 100644
--- a/src/sentry_sdk_alpha/integrations/modules.py
+++ b/src/sentry_sdk_alpha/integrations/modules.py
@@ -1,12 +1,13 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
 from sentry_sdk_alpha.utils import _get_installed_modules
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Any
+
     from sentry_sdk_alpha._types import Event
 
 
diff --git a/src/sentry_sdk_alpha/integrations/openai.py b/src/sentry_sdk_alpha/integrations/openai.py
index 87c112de74cd38..d51e5c2da8c2e7 100644
--- a/src/sentry_sdk_alpha/integrations/openai.py
+++ b/src/sentry_sdk_alpha/integrations/openai.py
@@ -1,4 +1,5 @@
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha import consts
@@ -7,23 +8,20 @@
 from sentry_sdk_alpha.consts import SPANDATA
 from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-)
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
 if TYPE_CHECKING:
-    from typing import Any, Iterable, List, Optional, Callable, AsyncIterator, Iterator
+    from collections.abc import AsyncIterator, Callable, Iterable, Iterator
+    from typing import Any, List, Optional
+
     from sentry_sdk_alpha.tracing import Span
 
 try:
-    from openai.resources.chat.completions import Completions, AsyncCompletions
-    from openai.resources import Embeddings, AsyncEmbeddings
+    from openai.resources import AsyncEmbeddings, Embeddings
+    from openai.resources.chat.completions import AsyncCompletions, Completions
 
     if TYPE_CHECKING:
-        from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk
+        from openai.types.chat import ChatCompletionChunk, ChatCompletionMessageParam
 except ImportError:
     raise DidNotEnable("OpenAI not installed")
 
@@ -48,9 +46,7 @@ def setup_once():
         Completions.create = _wrap_chat_completion_create(Completions.create)
         Embeddings.create = _wrap_embeddings_create(Embeddings.create)
 
-        AsyncCompletions.create = _wrap_async_chat_completion_create(
-            AsyncCompletions.create
-        )
+        AsyncCompletions.create = _wrap_async_chat_completion_create(AsyncCompletions.create)
         AsyncEmbeddings.create = _wrap_async_embeddings_create(AsyncEmbeddings.create)
 
     def count_tokens(self, s):
@@ -86,9 +82,7 @@ def _calculate_chat_completion_usage(
             response.usage.prompt_tokens, int
         ):
             prompt_tokens = response.usage.prompt_tokens
-        if hasattr(response.usage, "total_tokens") and isinstance(
-            response.usage.total_tokens, int
-        ):
+        if hasattr(response.usage, "total_tokens") and isinstance(response.usage.total_tokens, int):
             total_tokens = response.usage.total_tokens
 
     if prompt_tokens == 0:
@@ -159,9 +153,7 @@ def _new_chat_completion_common(f, *args, **kwargs):
                     SPANDATA.AI_RESPONSES,
                     list(map(lambda x: x.message, res.choices)),
                 )
-            _calculate_chat_completion_usage(
-                messages, res, span, None, integration.count_tokens
-            )
+            _calculate_chat_completion_usage(messages, res, span, None, integration.count_tokens)
             span.__exit__(None, None, None)
         elif hasattr(res, "_iterator"):
             data_buf: list[list[str]] = []  # one for each choice
@@ -175,9 +167,7 @@ def new_iterator():
                         if hasattr(x, "choices"):
                             choice_index = 0
                             for choice in x.choices:
-                                if hasattr(choice, "delta") and hasattr(
-                                    choice.delta, "content"
-                                ):
+                                if hasattr(choice, "delta") and hasattr(choice.delta, "content"):
                                     content = choice.delta.content
                                     if len(data_buf) <= choice_index:
                                         data_buf.append([])
@@ -185,13 +175,9 @@ def new_iterator():
                                 choice_index += 1
                         yield x
                     if len(data_buf) > 0:
-                        all_responses = list(
-                            map(lambda chunk: "".join(chunk), data_buf)
-                        )
+                        all_responses = list(map(lambda chunk: "".join(chunk), data_buf))
                         if should_send_default_pii() and integration.include_prompts:
-                            set_data_normalized(
-                                span, SPANDATA.AI_RESPONSES, all_responses
-                            )
+                            set_data_normalized(span, SPANDATA.AI_RESPONSES, all_responses)
                         _calculate_chat_completion_usage(
                             messages,
                             res,
@@ -208,9 +194,7 @@ async def new_iterator_async():
                         if hasattr(x, "choices"):
                             choice_index = 0
                             for choice in x.choices:
-                                if hasattr(choice, "delta") and hasattr(
-                                    choice.delta, "content"
-                                ):
+                                if hasattr(choice, "delta") and hasattr(choice.delta, "content"):
                                     content = choice.delta.content
                                     if len(data_buf) <= choice_index:
                                         data_buf.append([])
@@ -218,13 +202,9 @@ async def new_iterator_async():
                                 choice_index += 1
                         yield x
                     if len(data_buf) > 0:
-                        all_responses = list(
-                            map(lambda chunk: "".join(chunk), data_buf)
-                        )
+                        all_responses = list(map(lambda chunk: "".join(chunk), data_buf))
                         if should_send_default_pii() and integration.include_prompts:
-                            set_data_normalized(
-                                span, SPANDATA.AI_RESPONSES, all_responses
-                            )
+                            set_data_normalized(span, SPANDATA.AI_RESPONSES, all_responses)
                         _calculate_chat_completion_usage(
                             messages,
                             res,
@@ -327,9 +307,7 @@ def _new_embeddings_create_common(f, *args, **kwargs):
         origin=OpenAIIntegration.origin,
         only_if_parent=True,
     ) as span:
-        if "input" in kwargs and (
-            should_send_default_pii() and integration.include_prompts
-        ):
+        if "input" in kwargs and (should_send_default_pii() and integration.include_prompts):
             if isinstance(kwargs["input"], str):
                 set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, [kwargs["input"]])
             elif (
diff --git a/src/sentry_sdk_alpha/integrations/pure_eval.py b/src/sentry_sdk_alpha/integrations/pure_eval.py
index 0b41efa57bb418..bb8c2269099a8b 100644
--- a/src/sentry_sdk_alpha/integrations/pure_eval.py
+++ b/src/sentry_sdk_alpha/integrations/pure_eval.py
@@ -1,16 +1,15 @@
 import ast
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha import serializer
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import walk_exception_chain, iter_stacks
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.utils import iter_stacks, walk_exception_chain
 
 if TYPE_CHECKING:
-    from typing import Optional, Dict, Any, Tuple, List
     from types import FrameType
+    from typing import Any, Dict, List, Optional, Tuple
 
     from sentry_sdk_alpha._types import Event, Hint
 
@@ -75,9 +74,7 @@ def add_executing_info(event, hint):
                     continue
 
                 for sentry_frame, tb in zip(sentry_frames, tbs):
-                    sentry_frame["vars"] = (
-                        pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
-                    )
+                    sentry_frame["vars"] = pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
             return event
 
 
diff --git a/src/sentry_sdk_alpha/integrations/pymongo.py b/src/sentry_sdk_alpha/integrations/pymongo.py
index d9f169af249e17..27d343cce97749 100644
--- a/src/sentry_sdk_alpha/integrations/pymongo.py
+++ b/src/sentry_sdk_alpha/integrations/pymongo.py
@@ -1,11 +1,11 @@
 import copy
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SPANSTATUS, SPANDATA, OP
+from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS
 from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import capture_internal_exceptions, _serialize_span_attribute
+from sentry_sdk_alpha.utils import _serialize_span_attribute, capture_internal_exceptions
 
 try:
     from pymongo import monitoring
@@ -17,11 +17,7 @@
 if TYPE_CHECKING:
     from typing import Any, Dict, Union
 
-    from pymongo.monitoring import (
-        CommandFailedEvent,
-        CommandStartedEvent,
-        CommandSucceededEvent,
-    )
+    from pymongo.monitoring import CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent
 
 
 SAFE_COMMAND_ATTRIBUTES = [
diff --git a/src/sentry_sdk_alpha/integrations/pyramid.py b/src/sentry_sdk_alpha/integrations/pyramid.py
index 941aa40ec328f4..38da5cf723fa45 100644
--- a/src/sentry_sdk_alpha/integrations/pyramid.py
+++ b/src/sentry_sdk_alpha/integrations/pyramid.py
@@ -5,7 +5,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
 from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk_alpha.scope import should_send_default_pii
@@ -25,17 +25,16 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Dict, Optional
+
     from pyramid.response import Response
-    from typing import Any
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from typing import Callable
-    from typing import Dict
-    from typing import Optional
     from webob.cookies import RequestCookies
     from webob.request import _FieldStorageWithFile
 
-    from sentry_sdk_alpha.utils import ExcInfo
     from sentry_sdk_alpha._types import Event, EventProcessor
+    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
+    from sentry_sdk_alpha.utils import ExcInfo
 
 
 if getattr(Request, "authenticated_userid", None):
@@ -85,9 +84,7 @@ def sentry_patched_call_view(registry, request, *args, **kwargs):
                 sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request
             )
             scope = sentry_sdk_alpha.get_isolation_scope()
-            scope.add_event_processor(
-                _make_event_processor(weakref.ref(request), integration)
-            )
+            scope.add_event_processor(_make_event_processor(weakref.ref(request), integration))
 
             return old_call_view(registry, request, *args, **kwargs)
 
diff --git a/src/sentry_sdk_alpha/integrations/quart.py b/src/sentry_sdk_alpha/integrations/quart.py
index a9405a61a81519..b42eb3d0e97af1 100644
--- a/src/sentry_sdk_alpha/integrations/quart.py
+++ b/src/sentry_sdk_alpha/integrations/quart.py
@@ -1,6 +1,7 @@
 import asyncio
 import inspect
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
@@ -13,11 +14,9 @@
     ensure_integration_enabled,
     event_from_exception,
 )
-from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Union
+    from typing import Any, Union
 
     from sentry_sdk_alpha._types import Event, EventProcessor
 
@@ -28,10 +27,10 @@
 
 try:
     from quart import (  # type: ignore
+        Quart,
+        Request,
         has_request_context,
         has_websocket_context,
-        Request,
-        Quart,
         request,
         websocket,
     )
@@ -113,9 +112,7 @@ def _sentry_route(*args, **kwargs):
         def decorator(old_func):
             # type: (Any) -> Any
 
-            if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction(
-                old_func
-            ):
+            if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction(old_func):
 
                 @wraps(old_func)
                 @ensure_integration_enabled(QuartIntegration, old_func)
diff --git a/src/sentry_sdk_alpha/integrations/ray.py b/src/sentry_sdk_alpha/integrations/ray.py
index 03bcd9fb15708e..c6400071544f9d 100644
--- a/src/sentry_sdk_alpha/integrations/ray.py
+++ b/src/sentry_sdk_alpha/integrations/ray.py
@@ -3,7 +3,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANSTATUS
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
     event_from_exception,
@@ -18,12 +18,12 @@
 except ImportError:
     raise DidNotEnable("Ray not installed.")
 import functools
-
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
     from collections.abc import Callable
     from typing import Any, Optional
+
     from sentry_sdk_alpha.utils import ExcInfo
 
 DEFAULT_TRANSACTION_NAME = "unknown Ray function"
diff --git a/src/sentry_sdk_alpha/integrations/redis/__init__.py b/src/sentry_sdk_alpha/integrations/redis/__init__.py
index 63e523b1f5f7ae..f69b8e926404e8 100644
--- a/src/sentry_sdk_alpha/integrations/redis/__init__.py
+++ b/src/sentry_sdk_alpha/integrations/redis/__init__.py
@@ -1,4 +1,6 @@
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from typing import TYPE_CHECKING
+
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 from sentry_sdk_alpha.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE
 from sentry_sdk_alpha.integrations.redis.rb import _patch_rb
 from sentry_sdk_alpha.integrations.redis.redis import _patch_redis
@@ -6,8 +8,6 @@
 from sentry_sdk_alpha.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster
 from sentry_sdk_alpha.utils import logger
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Optional
 
diff --git a/src/sentry_sdk_alpha/integrations/redis/_async_common.py b/src/sentry_sdk_alpha/integrations/redis/_async_common.py
index 0200aa45423c02..8b6f9bb462a675 100644
--- a/src/sentry_sdk_alpha/integrations/redis/_async_common.py
+++ b/src/sentry_sdk_alpha/integrations/redis/_async_common.py
@@ -1,3 +1,5 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN
@@ -14,18 +16,15 @@
 )
 from sentry_sdk_alpha.utils import capture_internal_exceptions
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import Callable
     from typing import Any, Union
+
     from redis.asyncio.client import Pipeline, StrictRedis
     from redis.asyncio.cluster import ClusterPipeline, RedisCluster
 
 
-def patch_redis_async_pipeline(
-    pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn
-):
+def patch_redis_async_pipeline(pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn):
     # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Any], dict[str, Any]]) -> None
     old_execute = pipeline_cls.execute
 
@@ -48,9 +47,7 @@ async def _sentry_execute(self, *args, **kwargs):
                     is_cluster=is_cluster,
                     get_command_args_fn=get_command_args_fn,
                     is_transaction=False if is_cluster else self.is_transaction,
-                    command_stack=(
-                        self._command_stack if is_cluster else self.command_stack
-                    ),
+                    command_stack=(self._command_stack if is_cluster else self.command_stack),
                 )
                 _update_span(span, span_data, pipeline_data)
                 _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data)
@@ -102,9 +99,7 @@ async def _sentry_execute_command(self, name, *args, **kwargs):
         db_span_data = get_db_data_fn(self)
         db_client_span_data = _get_client_data(is_cluster, name, *args)
         _update_span(db_span, db_span_data, db_client_span_data)
-        _create_breadcrumb(
-            db_properties["description"], db_span_data, db_client_span_data
-        )
+        _create_breadcrumb(db_properties["description"], db_span_data, db_client_span_data)
 
         value = await old_execute_command(self, name, *args, **kwargs)
 
diff --git a/src/sentry_sdk_alpha/integrations/redis/_sync_common.py b/src/sentry_sdk_alpha/integrations/redis/_sync_common.py
index e9a4c38d4c5e71..d9ea72b9aff661 100644
--- a/src/sentry_sdk_alpha/integrations/redis/_sync_common.py
+++ b/src/sentry_sdk_alpha/integrations/redis/_sync_common.py
@@ -1,3 +1,5 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
 from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN
@@ -14,8 +16,6 @@
 )
 from sentry_sdk_alpha.utils import capture_internal_exceptions
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from collections.abc import Callable
     from typing import Any
@@ -105,9 +105,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
         db_span_data = get_db_data_fn(self)
         db_client_span_data = _get_client_data(is_cluster, name, *args)
         _update_span(db_span, db_span_data, db_client_span_data)
-        _create_breadcrumb(
-            db_properties["description"], db_span_data, db_client_span_data
-        )
+        _create_breadcrumb(db_properties["description"], db_span_data, db_client_span_data)
 
         value = old_execute_command(self, name, *args, **kwargs)
 
diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/caches.py b/src/sentry_sdk_alpha/integrations/redis/modules/caches.py
index 63b983a688738e..89607c14c2f8c3 100644
--- a/src/sentry_sdk_alpha/integrations/redis/modules/caches.py
+++ b/src/sentry_sdk_alpha/integrations/redis/modules/caches.py
@@ -12,9 +12,10 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
     from typing import Any, Optional
 
+    from sentry_sdk_alpha.integrations.redis import RedisIntegration
+
 
 def _get_op(name):
     # type: (str) -> Optional[str]
@@ -48,9 +49,7 @@ def _compile_cache_span_properties(redis_command, args, kwargs, integration):
 
     properties = {
         "op": _get_op(redis_command),
-        "description": _get_cache_span_description(
-            redis_command, args, kwargs, integration
-        ),
+        "description": _get_cache_span_description(redis_command, args, kwargs, integration),
         "key": key,
         "key_as_string": key_as_string,
         "redis_command": redis_command.lower(),
diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/queries.py b/src/sentry_sdk_alpha/integrations/redis/modules/queries.py
index ec4db8756960c6..91bb11f9ce15c8 100644
--- a/src/sentry_sdk_alpha/integrations/redis/modules/queries.py
+++ b/src/sentry_sdk_alpha/integrations/redis/modules/queries.py
@@ -2,16 +2,18 @@
 Code used for the Queries module in Sentry
 """
 
+from typing import TYPE_CHECKING
+
 from sentry_sdk_alpha.consts import OP, SPANDATA
 from sentry_sdk_alpha.integrations.redis.utils import _get_safe_command
 from sentry_sdk_alpha.utils import capture_internal_exceptions
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
+    from typing import Any
+
     from redis import Redis
+
     from sentry_sdk_alpha.integrations.redis import RedisIntegration
-    from typing import Any
 
 
 def _compile_db_span_properties(integration, redis_command, args):
diff --git a/src/sentry_sdk_alpha/integrations/redis/redis.py b/src/sentry_sdk_alpha/integrations/redis/redis.py
index 560ff7f24f12d6..ddd4ec507fd894 100644
--- a/src/sentry_sdk_alpha/integrations/redis/redis.py
+++ b/src/sentry_sdk_alpha/integrations/redis/redis.py
@@ -4,16 +4,17 @@
 https://github.com/redis/redis-py
 """
 
+from typing import TYPE_CHECKING
+
 from sentry_sdk_alpha.integrations.redis._sync_common import (
     patch_redis_client,
     patch_redis_pipeline,
 )
 from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Sequence
+    from collections.abc import Sequence
+    from typing import Any
 
 
 def _get_redis_command_args(command):
diff --git a/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py b/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
index 9bd393872388ae..9f7375b4d38515 100644
--- a/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
+++ b/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
@@ -5,24 +5,22 @@
 https://github.com/redis/redis-py/blob/master/redis/cluster.py
 """
 
+from typing import TYPE_CHECKING
+
 from sentry_sdk_alpha.integrations.redis._sync_common import (
     patch_redis_client,
     patch_redis_pipeline,
 )
 from sentry_sdk_alpha.integrations.redis.modules.queries import _get_connection_data
 from sentry_sdk_alpha.integrations.redis.utils import _parse_rediscluster_command
-
 from sentry_sdk_alpha.utils import capture_internal_exceptions
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Any
+
     from redis import RedisCluster
-    from redis.asyncio.cluster import (
-        RedisCluster as AsyncRedisCluster,
-        ClusterPipeline as AsyncClusterPipeline,
-    )
+    from redis.asyncio.cluster import ClusterPipeline as AsyncClusterPipeline
+    from redis.asyncio.cluster import RedisCluster as AsyncRedisCluster
 
 
 def _get_async_cluster_db_data(async_redis_cluster_instance):
diff --git a/src/sentry_sdk_alpha/integrations/redis/utils.py b/src/sentry_sdk_alpha/integrations/redis/utils.py
index 3b7a57bb8e0aa0..2b50fa893d3e91 100644
--- a/src/sentry_sdk_alpha/integrations/redis/utils.py
+++ b/src/sentry_sdk_alpha/integrations/redis/utils.py
@@ -1,3 +1,5 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import SPANDATA
 from sentry_sdk_alpha.integrations.redis.consts import (
@@ -10,10 +12,10 @@
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Optional, Sequence
+    from collections.abc import Sequence
+    from typing import Any, Optional
+
     from sentry_sdk_alpha.tracing import Span
 
 
diff --git a/src/sentry_sdk_alpha/integrations/rq.py b/src/sentry_sdk_alpha/integrations/rq.py
index 112075972157f2..f9142e61aee402 100644
--- a/src/sentry_sdk_alpha/integrations/rq.py
+++ b/src/sentry_sdk_alpha/integrations/rq.py
@@ -2,7 +2,7 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import _check_minimum_version, DidNotEnable, Integration
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
@@ -14,24 +14,25 @@
 )
 
 try:
+    from rq.job import JobStatus
     from rq.queue import Queue
     from rq.timeouts import JobTimeoutException
     from rq.version import VERSION as RQ_VERSION
     from rq.worker import Worker
-    from rq.job import JobStatus
 except ImportError:
     raise DidNotEnable("RQ not installed")
 
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any, Callable
+    from collections.abc import Callable
+    from typing import Any
+
+    from rq.job import Job
 
     from sentry_sdk_alpha._types import Event, EventProcessor
     from sentry_sdk_alpha.utils import ExcInfo
 
-    from rq.job import Job
-
 DEFAULT_TRANSACTION_NAME = "unknown RQ task"
 
 
@@ -65,15 +66,11 @@ def sentry_patched_perform_job(self, job, queue, *args, **kwargs):
                 except AttributeError:
                     transaction_name = DEFAULT_TRANSACTION_NAME
 
-                scope.set_transaction_name(
-                    transaction_name, source=TransactionSource.TASK
-                )
+                scope.set_transaction_name(transaction_name, source=TransactionSource.TASK)
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(_make_event_processor(weakref.ref(job)))
 
-                with sentry_sdk_alpha.continue_trace(
-                    job.meta.get("_sentry_trace_headers") or {}
-                ):
+                with sentry_sdk_alpha.continue_trace(job.meta.get("_sentry_trace_headers") or {}):
                     with sentry_sdk_alpha.start_span(
                         op=OP.QUEUE_TASK_RQ,
                         name=transaction_name,
@@ -97,11 +94,7 @@ def sentry_patched_perform_job(self, job, queue, *args, **kwargs):
 
         def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
             # type: (Worker, Any, *Any, **Any) -> Any
-            retry = (
-                hasattr(job, "retries_left")
-                and job.retries_left
-                and job.retries_left > 0
-            )
+            retry = hasattr(job, "retries_left") and job.retries_left and job.retries_left > 0
             failed = job._status == JobStatus.FAILED or job.is_failed
             if failed and not retry:
                 _capture_exception(exc_info)
diff --git a/src/sentry_sdk_alpha/integrations/rust_tracing.py b/src/sentry_sdk_alpha/integrations/rust_tracing.py
index cb12e9241e8f8e..bbae321b27c1e8 100644
--- a/src/sentry_sdk_alpha/integrations/rust_tracing.py
+++ b/src/sentry_sdk_alpha/integrations/rust_tracing.py
@@ -31,8 +31,9 @@
 """
 
 import json
+from collections.abc import Callable
 from enum import Enum, auto
-from typing import Any, Callable, Dict, Optional
+from typing import Any, Dict, Optional
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
@@ -72,7 +73,7 @@ def tracing_level_to_sentry_level(level):
         return "info"
 
 
-def extract_contexts(event: Dict[str, Any]) -> Dict[str, Any]:
+def extract_contexts(event: dict[str, Any]) -> dict[str, Any]:
     metadata = event.get("metadata", {})
     contexts = {}
 
@@ -92,7 +93,7 @@ def extract_contexts(event: Dict[str, Any]) -> Dict[str, Any]:
     return contexts
 
 
-def process_event(event: Dict[str, Any]) -> None:
+def process_event(event: dict[str, Any]) -> None:
     metadata = event.get("metadata", {})
 
     logger = metadata.get("target")
@@ -110,18 +111,18 @@ def process_event(event: Dict[str, Any]) -> None:
     sentry_sdk_alpha.capture_event(sentry_event)
 
 
-def process_exception(event: Dict[str, Any]) -> None:
+def process_exception(event: dict[str, Any]) -> None:
     process_event(event)
 
 
-def process_breadcrumb(event: Dict[str, Any]) -> None:
+def process_breadcrumb(event: dict[str, Any]) -> None:
     level = tracing_level_to_sentry_level(event.get("metadata", {}).get("level"))
     message = event.get("message")
 
     sentry_sdk_alpha.add_breadcrumb(level=level, message=message)
 
 
-def default_span_filter(metadata: Dict[str, Any]) -> bool:
+def default_span_filter(metadata: dict[str, Any]) -> bool:
     return RustTracingLevel(metadata.get("level")) in (
         RustTracingLevel.Error,
         RustTracingLevel.Warn,
@@ -129,7 +130,7 @@ def default_span_filter(metadata: Dict[str, Any]) -> bool:
     )
 
 
-def default_event_type_mapping(metadata: Dict[str, Any]) -> EventTypeMapping:
+def default_event_type_mapping(metadata: dict[str, Any]) -> EventTypeMapping:
     level = RustTracingLevel(metadata.get("level"))
     if level == RustTracingLevel.Error:
         return EventTypeMapping.Exc
@@ -146,10 +147,10 @@ def __init__(
         self,
         origin: str,
         event_type_mapping: Callable[
-            [Dict[str, Any]], EventTypeMapping
+            [dict[str, Any]], EventTypeMapping
         ] = default_event_type_mapping,
-        span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter,
-        include_tracing_fields: Optional[bool] = None,
+        span_filter: Callable[[dict[str, Any]], bool] = default_span_filter,
+        include_tracing_fields: bool | None = None,
     ):
         self.origin = origin
         self.event_type_mapping = event_type_mapping
@@ -169,7 +170,7 @@ def _include_tracing_fields(self) -> bool:
             else self.include_tracing_fields
         )
 
-    def on_event(self, event: str, _span_state: Optional[Span]) -> None:
+    def on_event(self, event: str, _span_state: Span | None) -> None:
         deserialized_event = json.loads(event)
         metadata = deserialized_event.get("metadata", {})
 
@@ -183,7 +184,7 @@ def on_event(self, event: str, _span_state: Optional[Span]) -> None:
         elif event_type == EventTypeMapping.Event:
             process_event(deserialized_event)
 
-    def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]:
+    def on_new_span(self, attrs: str, span_id: str) -> Span | None:
         attrs = json.loads(attrs)
         metadata = attrs.get("metadata", {})
 
@@ -220,11 +221,11 @@ def on_new_span(self, attrs: str, span_id: str) -> Optional[Span]:
 
         return span
 
-    def on_close(self, span_id: str, span: Optional[Span]) -> None:
+    def on_close(self, span_id: str, span: Span | None) -> None:
         if span is not None:
             span.__exit__(None, None, None)
 
-    def on_record(self, span_id: str, values: str, span: Optional[Span]) -> None:
+    def on_record(self, span_id: str, values: str, span: Span | None) -> None:
         if span is not None:
             deserialized_values = json.loads(values)
             for key, value in deserialized_values.items():
@@ -251,10 +252,10 @@ def __init__(
         identifier: str,
         initializer: Callable[[RustTracingLayer], None],
         event_type_mapping: Callable[
-            [Dict[str, Any]], EventTypeMapping
+            [dict[str, Any]], EventTypeMapping
         ] = default_event_type_mapping,
-        span_filter: Callable[[Dict[str, Any]], bool] = default_span_filter,
-        include_tracing_fields: Optional[bool] = None,
+        span_filter: Callable[[dict[str, Any]], bool] = default_span_filter,
+        include_tracing_fields: bool | None = None,
     ):
         self.identifier = identifier
         origin = f"auto.function.rust_tracing.{identifier}"
diff --git a/src/sentry_sdk_alpha/integrations/sanic.py b/src/sentry_sdk_alpha/integrations/sanic.py
index dfe27fb754713b..65f2d1d0da1a11 100644
--- a/src/sentry_sdk_alpha/integrations/sanic.py
+++ b/src/sentry_sdk_alpha/integrations/sanic.py
@@ -1,45 +1,41 @@
 import sys
 import weakref
 from inspect import isawaitable
+from typing import TYPE_CHECKING
 from urllib.parse import urlsplit
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor, _filter_headers
 from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
+    CONTEXTVARS_ERROR_MESSAGE,
+    HAS_REAL_CONTEXTVARS,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
-    HAS_REAL_CONTEXTVARS,
-    CONTEXTVARS_ERROR_MESSAGE,
     parse_version,
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from collections.abc import Container
-    from typing import Any
-    from typing import Callable
-    from typing import Optional
-    from typing import Union
-    from typing import Dict
+    from collections.abc import Callable, Container
+    from typing import Any, Dict, Optional, Union
 
     from sanic.request import Request, RequestParameters
     from sanic.response import BaseHTTPResponse
+    from sanic.router import Route
 
     from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
-    from sanic.router import Route
 
 try:
-    from sanic import Sanic, __version__ as SANIC_VERSION
+    from sanic import Sanic
+    from sanic import __version__ as SANIC_VERSION
     from sanic.exceptions import SanicException
-    from sanic.router import Router
     from sanic.handlers import ErrorHandler
+    from sanic.router import Router
 except ImportError:
     raise DidNotEnable("Sanic not installed")
 
@@ -190,9 +186,7 @@ async def _context_enter(request):
     scope.add_event_processor(_make_request_processor(weak_request))
 
     # TODO-neel-potel test if this works
-    request.ctx._sentry_continue_trace = sentry_sdk_alpha.continue_trace(
-        dict(request.headers)
-    )
+    request.ctx._sentry_continue_trace = sentry_sdk_alpha.continue_trace(dict(request.headers))
     request.ctx._sentry_continue_trace.__enter__()
     request.ctx._sentry_transaction = sentry_sdk_alpha.start_span(
         op=OP.HTTP_SERVER,
@@ -223,9 +217,7 @@ async def _context_exit(request, response=None):
                 and response_status in integration._unsampled_statuses
             ):
                 # drop the event in an event processor
-                request.ctx._sentry_scope.add_event_processor(
-                    lambda _event, _hint: None
-                )
+                request.ctx._sentry_scope.add_event_processor(lambda _event, _hint: None)
 
             request.ctx._sentry_transaction.__exit__(None, None, None)
             request.ctx._sentry_continue_trace.__exit__(None, None, None)
@@ -313,13 +305,9 @@ def _legacy_router_get(self, *args):
                     # Format: app_name.route_name
                     sanic_route = sanic_route[len(sanic_app_name) + 1 :]
 
-                scope.set_transaction_name(
-                    sanic_route, source=TransactionSource.COMPONENT
-                )
+                scope.set_transaction_name(sanic_route, source=TransactionSource.COMPONENT)
             else:
-                scope.set_transaction_name(
-                    rv[0].__name__, source=TransactionSource.COMPONENT
-                )
+                scope.set_transaction_name(rv[0].__name__, source=TransactionSource.COMPONENT)
 
     return rv
 
@@ -362,7 +350,7 @@ def sanic_processor(event, hint):
             request_info = event["request"]
             urlparts = urlsplit(request.url)
 
-            request_info["url"] = "%s://%s%s" % (
+            request_info["url"] = "{}://{}{}".format(
                 urlparts.scheme,
                 urlparts.netloc,
                 urlparts.path,
diff --git a/src/sentry_sdk_alpha/integrations/serverless.py b/src/sentry_sdk_alpha/integrations/serverless.py
index 1c2c4bcaa1947b..50239d307d36ee 100644
--- a/src/sentry_sdk_alpha/integrations/serverless.py
+++ b/src/sentry_sdk_alpha/integrations/serverless.py
@@ -1,18 +1,13 @@
 import sys
 from functools import wraps
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.utils import event_from_exception, reraise
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import TypeVar
-    from typing import Union
-    from typing import Optional
-    from typing import overload
+    from collections.abc import Callable
+    from typing import Any, Optional, TypeVar, Union, overload
 
     F = TypeVar("F", bound=Callable[..., Any])
 
diff --git a/src/sentry_sdk_alpha/integrations/socket.py b/src/sentry_sdk_alpha/integrations/socket.py
index ce1200407e5e39..6892eef1131683 100644
--- a/src/sentry_sdk_alpha/integrations/socket.py
+++ b/src/sentry_sdk_alpha/integrations/socket.py
@@ -7,7 +7,7 @@
 
 if MYPY:
     from socket import AddressFamily, SocketKind
-    from typing import Tuple, Optional, Union, List
+    from typing import List, Optional, Tuple, Union
 
 __all__ = ["SocketIntegration"]
 
@@ -39,7 +39,7 @@ def _get_span_description(host, port):
     except (UnicodeDecodeError, AttributeError):
         pass
 
-    description = "%s:%s" % (host, port)  # type: ignore
+    description = "{}:{}".format(host, port)  # type: ignore
     return description
 
 
diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_driver.py b/src/sentry_sdk_alpha/integrations/spark/spark_driver.py
index eb12a5ce474a42..d7cba5d0065b77 100644
--- a/src/sentry_sdk_alpha/integrations/spark/spark_driver.py
+++ b/src/sentry_sdk_alpha/integrations/spark/spark_driver.py
@@ -1,16 +1,16 @@
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
+    from typing import Any, Optional
 
-    from sentry_sdk_alpha._types import Event, Hint
     from pyspark import SparkContext
 
+    from sentry_sdk_alpha._types import Event, Hint
+
 
 class SparkIntegration(Integration):
     identifier = "spark"
@@ -243,7 +243,7 @@ def onJobStart(self, jobStart):  # noqa: N802,N803
         # type: (Any) -> None
         sentry_sdk_alpha.get_isolation_scope().clear_breadcrumbs()
 
-        message = "Job {} Started".format(jobStart.jobId())
+        message = f"Job {jobStart.jobId()} Started"
         self._add_breadcrumb(level="info", message=message)
         _set_app_properties()
 
@@ -255,17 +255,17 @@ def onJobEnd(self, jobEnd):  # noqa: N802,N803
 
         if jobEnd.jobResult().toString() == "JobSucceeded":
             level = "info"
-            message = "Job {} Ended".format(jobEnd.jobId())
+            message = f"Job {jobEnd.jobId()} Ended"
         else:
             level = "warning"
-            message = "Job {} Failed".format(jobEnd.jobId())
+            message = f"Job {jobEnd.jobId()} Failed"
 
         self._add_breadcrumb(level=level, message=message, data=data)
 
     def onStageSubmitted(self, stageSubmitted):  # noqa: N802,N803
         # type: (Any) -> None
         stage_info = stageSubmitted.stageInfo()
-        message = "Stage {} Submitted".format(stage_info.stageId())
+        message = f"Stage {stage_info.stageId()} Submitted"
 
         data = {"name": stage_info.name()}
         attempt_id = _get_attempt_id(stage_info)
@@ -291,10 +291,10 @@ def onStageCompleted(self, stageCompleted):  # noqa: N802,N803
         # Have to Try Except because stageInfo.failureReason() is typed with Scala Option
         try:
             data["reason"] = stage_info.failureReason().get()
-            message = "Stage {} Failed".format(stage_info.stageId())
+            message = f"Stage {stage_info.stageId()} Failed"
             level = "warning"
         except Py4JJavaError:
-            message = "Stage {} Completed".format(stage_info.stageId())
+            message = f"Stage {stage_info.stageId()} Completed"
             level = "info"
 
         self._add_breadcrumb(level=level, message=message, data=data)
diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_worker.py b/src/sentry_sdk_alpha/integrations/spark/spark_worker.py
index f0ed84b9b8f84e..6c68f3435e785c 100644
--- a/src/sentry_sdk_alpha/integrations/spark/spark_worker.py
+++ b/src/sentry_sdk_alpha/integrations/spark/spark_worker.py
@@ -1,22 +1,20 @@
 import sys
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
+    event_hint_with_exc_info,
     exc_info_from_error,
     single_exception_from_error_tuple,
     walk_exception_chain,
-    event_hint_with_exc_info,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
+    from typing import Any, Optional
 
-    from sentry_sdk_alpha._types import ExcInfo, Event, Hint
+    from sentry_sdk_alpha._types import Event, ExcInfo, Hint
 
 
 class SparkWorkerIntegration(Integration):
@@ -70,17 +68,13 @@ def _tag_task_context():
     def process_event(event, hint):
         # type: (Event, Hint) -> Optional[Event]
         with capture_internal_exceptions():
-            integration = sentry_sdk_alpha.get_client().get_integration(
-                SparkWorkerIntegration
-            )
+            integration = sentry_sdk_alpha.get_client().get_integration(SparkWorkerIntegration)
             task_context = TaskContext.get()
 
             if integration is None or task_context is None:
                 return event
 
-            event.setdefault("tags", {}).setdefault(
-                "stageId", str(task_context.stageId())
-            )
+            event.setdefault("tags", {}).setdefault("stageId", str(task_context.stageId()))
             event["tags"].setdefault("partitionId", str(task_context.partitionId()))
             event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber()))
             event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId()))
diff --git a/src/sentry_sdk_alpha/integrations/sqlalchemy.py b/src/sentry_sdk_alpha/integrations/sqlalchemy.py
index 2a1237eb8fe288..cec206a8387e41 100644
--- a/src/sentry_sdk_alpha/integrations/sqlalchemy.py
+++ b/src/sentry_sdk_alpha/integrations/sqlalchemy.py
@@ -1,5 +1,5 @@
-from sentry_sdk_alpha.consts import SPANSTATUS, SPANDATA
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.consts import SPANDATA, SPANSTATUS
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
 from sentry_sdk_alpha.utils import (
     capture_internal_exceptions,
@@ -8,18 +8,16 @@
 )
 
 try:
+    from sqlalchemy import __version__ as SQLALCHEMY_VERSION  # type: ignore
     from sqlalchemy.engine import Engine  # type: ignore
     from sqlalchemy.event import listen  # type: ignore
-    from sqlalchemy import __version__ as SQLALCHEMY_VERSION  # type: ignore
 except ImportError:
     raise DidNotEnable("SQLAlchemy not installed.")
 
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import ContextManager
-    from typing import Optional
+    from typing import Any, ContextManager, Optional
 
     from sentry_sdk_alpha.tracing import Span
 
@@ -40,9 +38,7 @@ def setup_once():
 
 
 @ensure_integration_enabled(SqlalchemyIntegration)
-def _before_cursor_execute(
-    conn, cursor, statement, parameters, context, executemany, *args
-):
+def _before_cursor_execute(conn, cursor, statement, parameters, context, executemany, *args):
     # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
     ctx_mgr = record_sql_queries(
         cursor,
diff --git a/src/sentry_sdk_alpha/integrations/starlette.py b/src/sentry_sdk_alpha/integrations/starlette.py
index c1a85877f64e45..e4ac77d80b1f24 100644
--- a/src/sentry_sdk_alpha/integrations/starlette.py
+++ b/src/sentry_sdk_alpha/integrations/starlette.py
@@ -3,13 +3,14 @@
 from collections.abc import Set
 from copy import deepcopy
 from json import JSONDecodeError
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
 from sentry_sdk_alpha.integrations import (
+    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
     DidNotEnable,
     Integration,
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
 )
 from sentry_sdk_alpha.integrations._wsgi_common import (
     DEFAULT_HTTP_METHODS_TO_CAPTURE,
@@ -28,10 +29,9 @@
     transaction_from_function,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Awaitable, Callable, Dict, Optional, Tuple
+    from collections.abc import Awaitable, Callable
+    from typing import Any, Dict, Optional, Tuple
 
     from sentry_sdk_alpha._types import Event
 
@@ -41,12 +41,12 @@
     from starlette.applications import Starlette  # type: ignore
     from starlette.datastructures import UploadFile  # type: ignore
     from starlette.middleware import Middleware  # type: ignore
-    from starlette.middleware.authentication import (  # type: ignore
-        AuthenticationMiddleware,
-    )
+    from starlette.middleware.authentication import AuthenticationMiddleware  # type: ignore
     from starlette.requests import Request  # type: ignore
     from starlette.routing import Match  # type: ignore
-    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
+    from starlette.types import ASGIApp, Receive
+    from starlette.types import Scope as StarletteScope  # type: ignore
+    from starlette.types import Send
 except ImportError:
     raise DidNotEnable("Starlette is not installed")
 
@@ -105,9 +105,7 @@ def setup_once():
         version = parse_version(STARLETTE_VERSION)
 
         if version is None:
-            raise DidNotEnable(
-                "Unparsable Starlette version: {}".format(STARLETTE_VERSION)
-            )
+            raise DidNotEnable(f"Unparsable Starlette version: {STARLETTE_VERSION}")
 
         patch_middlewares()
         patch_asgi_app()
@@ -224,9 +222,7 @@ def _sentry_middleware_init(self, *args, **kwargs):
 
             async def _sentry_patched_exception_handler(self, *args, **kwargs):
                 # type: (Any, Any, Any) -> None
-                integration = sentry_sdk_alpha.get_client().get_integration(
-                    StarletteIntegration
-                )
+                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
 
                 exp = args[0]
 
@@ -416,9 +412,7 @@ def _sentry_request_response(func):
 
             async def _sentry_async_func(*args, **kwargs):
                 # type: (*Any, **Any) -> Any
-                integration = sentry_sdk_alpha.get_client().get_integration(
-                    StarletteIntegration
-                )
+                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
                 if integration is None:
                     return await old_func(*args, **kwargs)
 
@@ -466,9 +460,7 @@ def event_processor(event, hint):
             @functools.wraps(old_func)
             def _sentry_sync_func(*args, **kwargs):
                 # type: (*Any, **Any) -> Any
-                integration = sentry_sdk_alpha.get_client().get_integration(
-                    StarletteIntegration
-                )
+                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
                 if integration is None:
                     return old_func(*args, **kwargs)
 
@@ -534,9 +526,7 @@ def patch_templates():
 
     old_jinja2templates_init = Jinja2Templates.__init__
 
-    not_yet_patched = "_sentry_jinja2templates_init" not in str(
-        old_jinja2templates_init
-    )
+    not_yet_patched = "_sentry_jinja2templates_init" not in str(old_jinja2templates_init)
 
     if not_yet_patched:
 
@@ -544,9 +534,7 @@ def _sentry_jinja2templates_init(self, *args, **kwargs):
             # type: (Jinja2Templates, *Any, **Any) -> None
             def add_sentry_trace_meta(request):
                 # type: (Request) -> Dict[str, Any]
-                trace_meta = Markup(
-                    sentry_sdk_alpha.get_current_scope().trace_propagation_meta()
-                )
+                trace_meta = Markup(sentry_sdk_alpha.get_current_scope().trace_propagation_meta())
                 return {
                     "sentry_trace_meta": trace_meta,
                 }
@@ -598,9 +586,7 @@ async def extract_request_info(self):
                 return request_info
 
             # Add annotation if body is too big
-            if content_length and not request_body_within_bounds(
-                client, content_length
-            ):
+            if content_length and not request_body_within_bounds(client, content_length):
                 request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
                 return request_info
 
@@ -617,9 +603,7 @@ async def extract_request_info(self):
                 for key, val in form.items():
                     is_file = isinstance(val, UploadFile)
                     form_data[key] = (
-                        val
-                        if not is_file
-                        else AnnotatedValue.removed_because_raw_data()
+                        val if not is_file else AnnotatedValue.removed_because_raw_data()
                     )
 
                 request_info["data"] = form_data
@@ -703,9 +687,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
         source = TransactionSource.ROUTE
 
     scope.set_transaction_name(name, source=source)
-    logger.debug(
-        "[Starlette] Set transaction name and source on scope: %s / %s", name, source
-    )
+    logger.debug("[Starlette] Set transaction name and source on scope: %s / %s", name, source)
 
 
 def _get_transaction_from_middleware(app, asgi_scope, integration):
diff --git a/src/sentry_sdk_alpha/integrations/starlite.py b/src/sentry_sdk_alpha/integrations/starlite.py
index 26069c72b06caf..11dc09e4cde925 100644
--- a/src/sentry_sdk_alpha/integrations/starlite.py
+++ b/src/sentry_sdk_alpha/integrations/starlite.py
@@ -10,13 +10,13 @@
 )
 
 try:
+    from pydantic import BaseModel  # type: ignore
     from starlite import Request, Starlite, State  # type: ignore
     from starlite.handlers.base import BaseRouteHandler  # type: ignore
     from starlite.middleware import DefineMiddleware  # type: ignore
     from starlite.plugins.base import get_plugin_for_value  # type: ignore
     from starlite.routes.http import HTTPRoute  # type: ignore
-    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
-    from pydantic import BaseModel  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, Ref, is_async_callable  # type: ignore
 except ImportError:
     raise DidNotEnable("Starlite is not installed")
 
@@ -24,7 +24,9 @@
 
 if TYPE_CHECKING:
     from typing import Any, Optional, Union
-    from starlite.types import (  # type: ignore
+
+    from starlite import MiddlewareProtocol
+    from starlite.types import (
         ASGIApp,
         Hint,
         HTTPReceiveMessage,
@@ -32,11 +34,10 @@
         Message,
         Middleware,
         Receive,
-        Scope as StarliteScope,
-        Send,
-        WebSocketReceiveMessage,
     )
-    from starlite import MiddlewareProtocol
+    from starlite.types import Scope as StarliteScope  # type: ignore
+    from starlite.types import Send, WebSocketReceiveMessage
+
     from sentry_sdk_alpha._types import Event
 
 
@@ -85,11 +86,7 @@ def injection_wrapper(self, *args, **kwargs):
         kwargs.update(
             after_exception=[
                 exception_handler,
-                *(
-                    after_exception
-                    if isinstance(after_exception, list)
-                    else [after_exception]
-                ),
+                *(after_exception if isinstance(after_exception, list) else [after_exception]),
             ]
         )
 
@@ -205,9 +202,7 @@ async def handle_wrapper(self, scope, receive, send):
         request = scope["app"].request_class(
             scope=scope, receive=receive, send=send
         )  # type: Request[Any, Any]
-        extracted_request_data = ConnectionDataExtractor(
-            parse_body=True, parse_query=True
-        )(request)
+        extracted_request_data = ConnectionDataExtractor(parse_body=True, parse_query=True)(request)
         body = extracted_request_data.pop("body")
 
         request_data = await body
diff --git a/src/sentry_sdk_alpha/integrations/statsig.py b/src/sentry_sdk_alpha/integrations/statsig.py
index ff9fc94b18c158..c89a69512acda4 100644
--- a/src/sentry_sdk_alpha/integrations/statsig.py
+++ b/src/sentry_sdk_alpha/integrations/statsig.py
@@ -1,8 +1,8 @@
 from functools import wraps
-from typing import Any, TYPE_CHECKING
+from typing import TYPE_CHECKING, Any
 
 from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable, _check_minimum_version
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.utils import parse_version
 
 try:
diff --git a/src/sentry_sdk_alpha/integrations/stdlib.py b/src/sentry_sdk_alpha/integrations/stdlib.py
index c68acb2c8b8451..01341b454dae52 100644
--- a/src/sentry_sdk_alpha/integrations/stdlib.py
+++ b/src/sentry_sdk_alpha/integrations/stdlib.py
@@ -1,8 +1,9 @@
 import os
+import platform
 import subprocess
 import sys
-import platform
 from http.client import HTTPConnection
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP, SPANDATA
@@ -17,19 +18,14 @@
     http_client_status_to_breadcrumb_level,
     is_sentry_url,
     logger,
-    safe_repr,
     parse_url,
+    safe_repr,
     set_thread_info_from_span,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import Optional
-    from typing import List
+    from collections.abc import Callable
+    from typing import Any, Dict, List, Optional
 
     from sentry_sdk_alpha._types import Event, Hint
 
@@ -80,7 +76,7 @@ def putrequest(self, method, url, *args, **kwargs):
 
         real_url = url
         if real_url is None or not real_url.startswith(("http://", "https://")):
-            real_url = "%s://%s%s%s" % (
+            real_url = "{}://{}{}{}".format(
                 default_port == 443 and "https" or "http",
                 host,
                 port != default_port and ":%s" % port or "",
@@ -93,8 +89,7 @@ def putrequest(self, method, url, *args, **kwargs):
 
         span = sentry_sdk_alpha.start_span(
             op=OP.HTTP_CLIENT,
-            name="%s %s"
-            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
             origin="auto.http.stdlib.httplib",
             only_if_parent=True,
         )
@@ -119,9 +114,7 @@ def putrequest(self, method, url, *args, **kwargs):
             for (
                 key,
                 value,
-            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(
-                span=span
-            ):
+            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(span=span):
                 logger.debug(
                     "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
                         key=key, value=value, real_url=real_url
diff --git a/src/sentry_sdk_alpha/integrations/strawberry.py b/src/sentry_sdk_alpha/integrations/strawberry.py
index 09a7cfc3eebaaf..483b08051e9137 100644
--- a/src/sentry_sdk_alpha/integrations/strawberry.py
+++ b/src/sentry_sdk_alpha/integrations/strawberry.py
@@ -4,17 +4,17 @@
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
 from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
+    _get_installed_modules,
     capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
     logger,
     package_version,
-    _get_installed_modules,
 )
 
 try:
@@ -39,6 +39,8 @@
 try:
     from strawberry.extensions.tracing import (
         SentryTracingExtension as StrawberrySentryAsyncExtension,
+    )
+    from strawberry.extensions.tracing import (
         SentryTracingExtensionSync as StrawberrySentrySyncExtension,
     )
 except ImportError:
@@ -48,10 +50,13 @@
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any, Callable, Generator, List, Optional
+    from collections.abc import Callable, Generator
+    from typing import Any, List, Optional
+
     from graphql import GraphQLError, GraphQLResolveInfo
     from strawberry.http import GraphQLHTTPResponse
     from strawberry.types import ExecutionContext
+
     from sentry_sdk_alpha._types import Event, EventProcessor
 
 
@@ -66,9 +71,7 @@ def __init__(self, async_execution=None):
         # type: (Optional[bool]) -> None
         if async_execution not in (None, False, True):
             raise ValueError(
-                'Invalid value for async_execution: "{}" (must be bool)'.format(
-                    async_execution
-                )
+                'Invalid value for async_execution: "{}" (must be bool)'.format(async_execution)
             )
         self.async_execution = async_execution
 
@@ -135,7 +138,7 @@ def _resource_name(self):
         query_hash = self.hash_query(self.execution_context.query)  # type: ignore
 
         if self.execution_context.operation_name:
-            return "{}:{}".format(self.execution_context.operation_name, query_hash)
+            return f"{self.execution_context.operation_name}:{query_hash}"
 
         return query_hash
 
@@ -162,7 +165,7 @@ def on_operation(self):
 
         description = operation_type
         if self._operation_name:
-            description += " {}".format(self._operation_name)
+            description += f" {self._operation_name}"
 
         sentry_sdk_alpha.add_breadcrumb(
             category="graphql.operation",
@@ -192,9 +195,7 @@ def on_operation(self):
             self._operation_name = self.execution_context.operation_name
 
             if self._operation_name is not None:
-                graphql_span.set_attribute(
-                    "graphql.operation.name", self._operation_name
-                )
+                graphql_span.set_attribute("graphql.operation.name", self._operation_name)
 
                 sentry_sdk_alpha.get_current_scope().set_transaction_name(
                     self._operation_name,
@@ -241,11 +242,11 @@ async def resolve(self, _next, root, info, *args, **kwargs):
         if self.should_skip_tracing(_next, info):
             return await self._resolve(_next, root, info, *args, **kwargs)
 
-        field_path = "{}.{}".format(info.parent_type, info.field_name)
+        field_path = f"{info.parent_type}.{info.field_name}"
 
         with sentry_sdk_alpha.start_span(
             op=OP.GRAPHQL_RESOLVE,
-            name="resolving {}".format(field_path),
+            name=f"resolving {field_path}",
             origin=StrawberryIntegration.origin,
         ) as span:
             span.set_attribute("graphql.field_name", info.field_name)
@@ -262,11 +263,11 @@ def resolve(self, _next, root, info, *args, **kwargs):
         if self.should_skip_tracing(_next, info):
             return _next(root, info, *args, **kwargs)
 
-        field_path = "{}.{}".format(info.parent_type, info.field_name)
+        field_path = f"{info.parent_type}.{info.field_name}"
 
         with sentry_sdk_alpha.start_span(
             op=OP.GRAPHQL_RESOLVE,
-            name="resolving {}".format(field_path),
+            name=f"resolving {field_path}",
             origin=StrawberryIntegration.origin,
         ) as span:
             span.set_attribute("graphql.field_name", info.field_name)
@@ -369,6 +370,4 @@ def inner(event, hint):
 
 def _guess_if_using_async(extensions):
     # type: (List[SchemaExtension]) -> bool
-    return bool(
-        {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules())
-    )
+    return bool({"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()))
diff --git a/src/sentry_sdk_alpha/integrations/sys_exit.py b/src/sentry_sdk_alpha/integrations/sys_exit.py
index 3ced5fe5db1fdc..9d98e20cf6b5d1 100644
--- a/src/sentry_sdk_alpha/integrations/sys_exit.py
+++ b/src/sentry_sdk_alpha/integrations/sys_exit.py
@@ -2,9 +2,9 @@
 import sys
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-from sentry_sdk_alpha.integrations import Integration
 from sentry_sdk_alpha._types import TYPE_CHECKING
+from sentry_sdk_alpha.integrations import Integration
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
 if TYPE_CHECKING:
     from collections.abc import Callable
diff --git a/src/sentry_sdk_alpha/integrations/threading.py b/src/sentry_sdk_alpha/integrations/threading.py
index 4ee77c65065b9c..ff05d95863000c 100644
--- a/src/sentry_sdk_alpha/integrations/threading.py
+++ b/src/sentry_sdk_alpha/integrations/threading.py
@@ -2,23 +2,17 @@
 import warnings
 from functools import wraps
 from threading import Thread, current_thread
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha import Scope
-from sentry_sdk_alpha.scope import ScopeType
 from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import (
-    event_from_exception,
-    capture_internal_exceptions,
-    reraise,
-)
-
-from typing import TYPE_CHECKING
+from sentry_sdk_alpha.scope import ScopeType
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception, reraise
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import TypeVar
-    from typing import Callable
+    from collections.abc import Callable
+    from typing import Any, TypeVar
 
     from sentry_sdk_alpha._types import ExcInfo
 
@@ -38,8 +32,8 @@ def setup_once():
         old_start = Thread.start
 
         try:
-            from django import VERSION as django_version  # noqa: N811
             import channels  # type: ignore[import-not-found]
+            from django import VERSION as django_version  # noqa: N811
 
             channels_version = channels.__version__
         except ImportError:
diff --git a/src/sentry_sdk_alpha/integrations/tornado.py b/src/sentry_sdk_alpha/integrations/tornado.py
index 057981ad3cb12c..da6bde6b93628e 100644
--- a/src/sentry_sdk_alpha/integrations/tornado.py
+++ b/src/sentry_sdk_alpha/integrations/tornado.py
@@ -1,44 +1,41 @@
-import weakref
 import contextlib
+import weakref
 from inspect import iscoroutinefunction
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import OP
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
+from sentry_sdk_alpha.integrations._wsgi_common import (
+    RequestExtractor,
+    _filter_headers,
+    _is_json_content_type,
+    _request_headers_to_span_attributes,
+)
+from sentry_sdk_alpha.integrations.logging import ignore_logger
 from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.tracing import TransactionSource
 from sentry_sdk_alpha.utils import (
-    HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    HAS_REAL_CONTEXTVARS,
+    capture_internal_exceptions,
     ensure_integration_enabled,
     event_from_exception,
-    capture_internal_exceptions,
     transaction_from_function,
 )
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration, DidNotEnable
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    RequestExtractor,
-    _filter_headers,
-    _is_json_content_type,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.integrations.logging import ignore_logger
 
 try:
     from tornado import version_info as TORNADO_VERSION
     from tornado.gen import coroutine
     from tornado.httputil import HTTPServerRequest
-    from tornado.web import RequestHandler, HTTPError
+    from tornado.web import HTTPError, RequestHandler
 except ImportError:
     raise DidNotEnable("Tornado not installed")
 
 from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
-    from typing import Dict
-    from typing import Callable
-    from typing import Generator
+    from collections.abc import Callable, Generator
+    from typing import Any, Dict, Optional
 
     from sentry_sdk_alpha._types import Event, EventProcessor
 
@@ -171,7 +168,7 @@ def tornado_processor(event, hint):
 
             request_info = event["request"]
 
-            request_info["url"] = "%s://%s%s" % (
+            request_info["url"] = "{}://{}{}".format(
                 request.protocol,
                 request.host,
                 request.path,
diff --git a/src/sentry_sdk_alpha/integrations/trytond.py b/src/sentry_sdk_alpha/integrations/trytond.py
index b178c6bf3500e4..05841e840e6633 100644
--- a/src/sentry_sdk_alpha/integrations/trytond.py
+++ b/src/sentry_sdk_alpha/integrations/trytond.py
@@ -1,12 +1,11 @@
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import _check_minimum_version, Integration
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception
-
 from trytond import __version__ as trytond_version  # type: ignore
 from trytond.exceptions import TrytonException  # type: ignore
 from trytond.wsgi import app  # type: ignore
 
+import sentry_sdk_alpha
+from sentry_sdk_alpha.integrations import Integration, _check_minimum_version
+from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception
 
 # TODO: trytond-worker, trytond-cron and trytond-admin intergations
 
diff --git a/src/sentry_sdk_alpha/integrations/typer.py b/src/sentry_sdk_alpha/integrations/typer.py
index 429f2e7a12acd1..b0b09decf3a8ef 100644
--- a/src/sentry_sdk_alpha/integrations/typer.py
+++ b/src/sentry_sdk_alpha/integrations/typer.py
@@ -1,22 +1,16 @@
-import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-)
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
-
 from typing import TYPE_CHECKING
 
-if TYPE_CHECKING:
-    from typing import Callable
-    from typing import Any
-    from typing import Type
-    from typing import Optional
+import sentry_sdk_alpha
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
+from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
 
+if TYPE_CHECKING:
+    from collections.abc import Callable
     from types import TracebackType
+    from typing import Any, Optional, Type
 
     Excepthook = Callable[
-        [Type[BaseException], BaseException, Optional[TracebackType]],
+        [type[BaseException], BaseException, Optional[TracebackType]],
         Any,
     ]
 
diff --git a/src/sentry_sdk_alpha/integrations/unleash.py b/src/sentry_sdk_alpha/integrations/unleash.py
index 45809337d6d8f2..08abb8a6ccb3e9 100644
--- a/src/sentry_sdk_alpha/integrations/unleash.py
+++ b/src/sentry_sdk_alpha/integrations/unleash.py
@@ -2,7 +2,7 @@
 from typing import Any
 
 from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import Integration, DidNotEnable
+from sentry_sdk_alpha.integrations import DidNotEnable, Integration
 
 try:
     from UnleashClient import UnleashClient
diff --git a/src/sentry_sdk_alpha/integrations/wsgi.py b/src/sentry_sdk_alpha/integrations/wsgi.py
index f156d6f6fa9b55..9e33c8c15b7d93 100644
--- a/src/sentry_sdk_alpha/integrations/wsgi.py
+++ b/src/sentry_sdk_alpha/integrations/wsgi.py
@@ -1,15 +1,16 @@
 import sys
 from functools import partial
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha._werkzeug import get_host, _get_headers
+from sentry_sdk_alpha._werkzeug import _get_headers, get_host
 from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.integrations._wsgi_common import (
     DEFAULT_HTTP_METHODS_TO_CAPTURE,
     _filter_headers,
     _request_headers_to_span_attributes,
 )
+from sentry_sdk_alpha.scope import should_send_default_pii
 from sentry_sdk_alpha.sessions import track_session
 from sentry_sdk_alpha.tracing import Span, TransactionSource
 from sentry_sdk_alpha.utils import (
@@ -19,20 +20,12 @@
     reraise,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Callable
-    from typing import Dict
-    from typing import Iterator
-    from typing import Any
-    from typing import Tuple
-    from typing import Optional
-    from typing import TypeVar
-    from typing import Protocol
+    from collections.abc import Callable, Iterator
+    from typing import Any, Dict, Optional, Protocol, Tuple, TypeVar
 
-    from sentry_sdk_alpha.utils import ExcInfo
     from sentry_sdk_alpha._types import Event, EventProcessor
+    from sentry_sdk_alpha.utils import ExcInfo
 
     WsgiResponseIter = TypeVar("WsgiResponseIter")
     WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
@@ -71,7 +64,7 @@ def get_request_url(environ, use_x_forwarded_for=False):
     path_info = environ.get("PATH_INFO", "").lstrip("/")
     path = f"{script_name}/{path_info}"
 
-    return "%s://%s/%s" % (
+    return "{}://{}/{}".format(
         environ.get("wsgi.url_scheme"),
         get_host(environ, use_x_forwarded_for),
         wsgi_decoding_dance(path).lstrip("/"),
@@ -107,18 +100,14 @@ def __call__(self, environ, start_response):
         _wsgi_middleware_applied.set(True)
         try:
             with sentry_sdk_alpha.isolation_scope() as scope:
-                scope.set_transaction_name(
-                    DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE
-                )
+                scope.set_transaction_name(DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE)
 
                 with track_session(scope, session_mode="request"):
                     with capture_internal_exceptions():
                         scope.clear_breadcrumbs()
                         scope._name = "wsgi"
                         scope.add_event_processor(
-                            _make_wsgi_event_processor(
-                                environ, self.use_x_forwarded_for
-                            )
+                            _make_wsgi_event_processor(environ, self.use_x_forwarded_for)
                         )
                     method = environ.get("REQUEST_METHOD", "").upper()
                     should_trace = method in self.http_methods_to_capture
@@ -133,9 +122,7 @@ def __call__(self, environ, start_response):
                                     environ, self.use_x_forwarded_for
                                 ),
                             ) as span:
-                                response = self._run_original_app(
-                                    environ, start_response, span
-                                )
+                                response = self._run_original_app(environ, start_response, span)
                     else:
                         response = self._run_original_app(environ, start_response, None)
 
@@ -349,8 +336,6 @@ def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False):
         query = wsgi_environ.get("QUERY_STRING")
         attributes["url.full"] = f"{url}?{query}"
 
-    attributes.update(
-        _request_headers_to_span_attributes(dict(_get_headers(wsgi_environ)))
-    )
+    attributes.update(_request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))))
 
     return attributes
diff --git a/src/sentry_sdk_alpha/monitor.py b/src/sentry_sdk_alpha/monitor.py
index 08820a7adb035d..5ca0516b2f9a18 100644
--- a/src/sentry_sdk_alpha/monitor.py
+++ b/src/sentry_sdk_alpha/monitor.py
@@ -1,12 +1,11 @@
 import os
 import time
-from threading import Thread, Lock
+from threading import Lock, Thread
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.utils import logger
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Optional
 
@@ -83,9 +82,7 @@ def set_downsample_factor(self):
         # type: () -> None
         if self._healthy:
             if self._downsample_factor > 0:
-                logger.debug(
-                    "[Monitor] health check positive, reverting to normal sampling"
-                )
+                logger.debug("[Monitor] health check positive, reverting to normal sampling")
             self._downsample_factor = 0
         else:
             if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR:
diff --git a/src/sentry_sdk_alpha/opentelemetry/consts.py b/src/sentry_sdk_alpha/opentelemetry/consts.py
index 8da847b4c5159e..98999277209dc1 100644
--- a/src/sentry_sdk_alpha/opentelemetry/consts.py
+++ b/src/sentry_sdk_alpha/opentelemetry/consts.py
@@ -1,6 +1,6 @@
 from opentelemetry.context import create_key
-from sentry_sdk_alpha.tracing_utils import Baggage
 
+from sentry_sdk_alpha.tracing_utils import Baggage
 
 # propagation keys
 SENTRY_TRACE_KEY = create_key("sentry-trace")
diff --git a/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py b/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
index 4f489ad11d8add..98485a9af8a20a 100644
--- a/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
+++ b/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
@@ -1,20 +1,21 @@
-from typing import cast, TYPE_CHECKING
+from typing import TYPE_CHECKING, cast
 
-from opentelemetry.trace import set_span_in_context
 from opentelemetry.context import Context, get_value, set_value
 from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext
+from opentelemetry.trace import set_span_in_context
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.opentelemetry.consts import (
-    SENTRY_SCOPES_KEY,
     SENTRY_FORK_ISOLATION_SCOPE_KEY,
+    SENTRY_SCOPES_KEY,
     SENTRY_USE_CURRENT_SCOPE_KEY,
     SENTRY_USE_ISOLATION_SCOPE_KEY,
 )
 
 if TYPE_CHECKING:
-    from typing import Optional
     from contextvars import Token
+    from typing import Optional
+
     import sentry_sdk_alpha.opentelemetry.scope as scope
 
 
@@ -23,20 +24,14 @@ def attach(self, context):
         # type: (Context) -> Token[Context]
         scopes = get_value(SENTRY_SCOPES_KEY, context)
 
-        should_fork_isolation_scope = context.pop(
-            SENTRY_FORK_ISOLATION_SCOPE_KEY, False
-        )
+        should_fork_isolation_scope = context.pop(SENTRY_FORK_ISOLATION_SCOPE_KEY, False)
         should_fork_isolation_scope = cast("bool", should_fork_isolation_scope)
 
         should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None)
-        should_use_isolation_scope = cast(
-            "Optional[scope.PotelScope]", should_use_isolation_scope
-        )
+        should_use_isolation_scope = cast("Optional[scope.PotelScope]", should_use_isolation_scope)
 
         should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None)
-        should_use_current_scope = cast(
-            "Optional[scope.PotelScope]", should_use_current_scope
-        )
+        should_use_current_scope = cast("Optional[scope.PotelScope]", should_use_current_scope)
 
         if scopes:
             scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes)
diff --git a/src/sentry_sdk_alpha/opentelemetry/propagator.py b/src/sentry_sdk_alpha/opentelemetry/propagator.py
index b4fb6bc73ce23b..a1d01b4cb9cfcb 100644
--- a/src/sentry_sdk_alpha/opentelemetry/propagator.py
+++ b/src/sentry_sdk_alpha/opentelemetry/propagator.py
@@ -1,12 +1,7 @@
-from typing import cast
+from typing import TYPE_CHECKING, cast
 
 from opentelemetry import trace
-from opentelemetry.context import (
-    Context,
-    get_current,
-    get_value,
-    set_value,
-)
+from opentelemetry.context import Context, get_current, get_value, set_value
 from opentelemetry.propagators.textmap import (
     CarrierT,
     Getter,
@@ -15,27 +10,19 @@
     default_getter,
     default_setter,
 )
-from opentelemetry.trace import (
-    NonRecordingSpan,
-    SpanContext,
-    TraceFlags,
-)
+from opentelemetry.trace import NonRecordingSpan, SpanContext, TraceFlags
 
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    SENTRY_TRACE_HEADER_NAME,
-)
+from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME
 from sentry_sdk_alpha.opentelemetry.consts import (
     SENTRY_BAGGAGE_KEY,
-    SENTRY_TRACE_KEY,
     SENTRY_SCOPES_KEY,
+    SENTRY_TRACE_KEY,
 )
 from sentry_sdk_alpha.tracing_utils import Baggage, extract_sentrytrace_data
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
     from typing import Optional, Set
+
     import sentry_sdk.opentelemetry.scope as scope
 
 
diff --git a/src/sentry_sdk_alpha/opentelemetry/sampler.py b/src/sentry_sdk_alpha/opentelemetry/sampler.py
index 2bf2b4cadcb10f..05256820bce5b1 100644
--- a/src/sentry_sdk_alpha/opentelemetry/sampler.py
+++ b/src/sentry_sdk_alpha/opentelemetry/sampler.py
@@ -1,27 +1,24 @@
 from decimal import Decimal
-from typing import cast
+from typing import TYPE_CHECKING, cast
 
 from opentelemetry import trace
-from opentelemetry.sdk.trace.sampling import Sampler, SamplingResult, Decision
+from opentelemetry.sdk.trace.sampling import Decision, Sampler, SamplingResult
 from opentelemetry.trace.span import TraceState
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.opentelemetry.consts import (
-    TRACESTATE_SAMPLED_KEY,
     TRACESTATE_SAMPLE_RAND_KEY,
     TRACESTATE_SAMPLE_RATE_KEY,
+    TRACESTATE_SAMPLED_KEY,
     SentrySpanAttribute,
 )
-from sentry_sdk_alpha.tracing_utils import (
-    _generate_sample_rand,
-    has_tracing_enabled,
-)
+from sentry_sdk_alpha.tracing_utils import _generate_sample_rand, has_tracing_enabled
 from sentry_sdk_alpha.utils import is_valid_sample_rate, logger
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any, Optional, Sequence, Union
+    from collections.abc import Sequence
+    from typing import Any, Optional, Union
+
     from opentelemetry.context import Context
     from opentelemetry.trace import Link, SpanKind
     from opentelemetry.trace.span import SpanContext
@@ -212,9 +209,7 @@ def should_sample(
             sample_rand = cast(Decimal, _generate_sample_rand(str(trace_id), (0, 1)))
 
         # Explicit sampled value provided at start_span
-        custom_sampled = cast(
-            "Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED)
-        )
+        custom_sampled = cast("Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED))
         if custom_sampled is not None:
             if is_root_span:
                 sample_rate = float(custom_sampled)
@@ -233,9 +228,7 @@ def should_sample(
                         sample_rand=sample_rand,
                     )
             else:
-                logger.debug(
-                    f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}"
-                )
+                logger.debug(f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}")
 
         # Check if there is a traces_sampler
         # Traces_sampler is responsible to check parent sampled to have full transactions.
@@ -253,9 +246,7 @@ def should_sample(
             # Check if there is a parent with a sampling decision
             if parent_sampled is not None:
                 sample_rate = bool(parent_sampled)
-                sample_rate_to_propagate = (
-                    parent_sample_rate if parent_sample_rate else sample_rate
-                )
+                sample_rate_to_propagate = parent_sample_rate if parent_sample_rate else sample_rate
             else:
                 # Check if there is a traces_sample_rate
                 sample_rate = client.options.get("traces_sample_rate")
@@ -263,9 +254,7 @@ def should_sample(
 
         # If the sample rate is invalid, drop the span
         if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__):
-            logger.warning(
-                f"[Tracing.Sampler] Discarding {name} because of invalid sample rate."
-            )
+            logger.warning(f"[Tracing.Sampler] Discarding {name} because of invalid sample rate.")
             return dropped_result(parent_span_context, attributes)
 
         # Down-sample in case of back pressure monitor says so
@@ -313,9 +302,7 @@ def create_sampling_context(name, attributes, parent_span_context, trace_id):
         "transaction_context": {
             "name": name,
             "op": attributes.get(SentrySpanAttribute.OP) if attributes else None,
-            "source": (
-                attributes.get(SentrySpanAttribute.SOURCE) if attributes else None
-            ),
+            "source": (attributes.get(SentrySpanAttribute.SOURCE) if attributes else None),
         },
         "parent_sampled": get_parent_sampled(parent_span_context, trace_id),
     }  # type: dict[str, Any]
diff --git a/src/sentry_sdk_alpha/opentelemetry/scope.py b/src/sentry_sdk_alpha/opentelemetry/scope.py
index 2c0b030ba3914c..c7f3eb24609b9d 100644
--- a/src/sentry_sdk_alpha/opentelemetry/scope.py
+++ b/src/sentry_sdk_alpha/opentelemetry/scope.py
@@ -1,39 +1,26 @@
-from typing import cast
-from contextlib import contextmanager
 import warnings
+from contextlib import contextmanager
+from typing import cast
 
-from opentelemetry.context import (
-    get_value,
-    set_value,
-    attach,
-    detach,
-    get_current,
-)
-from opentelemetry.trace import (
-    SpanContext,
-    NonRecordingSpan,
-    TraceFlags,
-    TraceState,
-    use_span,
-)
+from opentelemetry.context import attach, detach, get_current, get_value, set_value
+from opentelemetry.trace import NonRecordingSpan, SpanContext, TraceFlags, TraceState, use_span
 
+from sentry_sdk_alpha._types import TYPE_CHECKING
 from sentry_sdk_alpha.opentelemetry.consts import (
-    SENTRY_SCOPES_KEY,
     SENTRY_FORK_ISOLATION_SCOPE_KEY,
+    SENTRY_SCOPES_KEY,
     SENTRY_USE_CURRENT_SCOPE_KEY,
     SENTRY_USE_ISOLATION_SCOPE_KEY,
     TRACESTATE_SAMPLED_KEY,
 )
-from sentry_sdk_alpha.opentelemetry.contextvars_context import (
-    SentryContextVarsRuntimeContext,
-)
+from sentry_sdk_alpha.opentelemetry.contextvars_context import SentryContextVarsRuntimeContext
 from sentry_sdk_alpha.opentelemetry.utils import trace_state_from_baggage
 from sentry_sdk_alpha.scope import Scope, ScopeType
 from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha._types import TYPE_CHECKING
 
 if TYPE_CHECKING:
-    from typing import Tuple, Optional, Generator, Dict, Any
+    from collections.abc import Generator
+    from typing import Any, Dict, Optional, Tuple
 
 
 class PotelScope(Scope):
@@ -43,9 +30,7 @@ def _get_scopes(cls):
         """
         Returns the current scopes tuple on the otel context. Internal use only.
         """
-        return cast(
-            "Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY)
-        )
+        return cast("Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY))
 
     @classmethod
     def get_current_scope(cls):
@@ -107,9 +92,7 @@ def _incoming_otel_span_context(self):
             return None
 
         trace_flags = TraceFlags(
-            TraceFlags.SAMPLED
-            if self._propagation_context.parent_sampled
-            else TraceFlags.DEFAULT
+            TraceFlags.SAMPLED if self._propagation_context.parent_sampled else TraceFlags.DEFAULT
         )
 
         if self._propagation_context.baggage:
diff --git a/src/sentry_sdk_alpha/opentelemetry/span_processor.py b/src/sentry_sdk_alpha/opentelemetry/span_processor.py
index 614db99f8a8756..802306720e4e6d 100644
--- a/src/sentry_sdk_alpha/opentelemetry/span_processor.py
+++ b/src/sentry_sdk_alpha/opentelemetry/span_processor.py
@@ -1,45 +1,39 @@
-from collections import deque, defaultdict
+from collections import defaultdict, deque
 from typing import cast
 
-from opentelemetry.trace import (
-    format_trace_id,
-    format_span_id,
-    get_current_span,
-    INVALID_SPAN,
-    Span as AbstractSpan,
-)
 from opentelemetry.context import Context
-from opentelemetry.sdk.trace import Span, ReadableSpan, SpanProcessor
+from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor
+from opentelemetry.trace import INVALID_SPAN
+from opentelemetry.trace import Span as AbstractSpan
+from opentelemetry.trace import format_span_id, format_trace_id, get_current_span
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SPANDATA, DEFAULT_SPAN_ORIGIN
-from sentry_sdk_alpha.utils import get_current_thread_meta
-from sentry_sdk_alpha.opentelemetry.consts import (
-    OTEL_SENTRY_CONTEXT,
-    SentrySpanAttribute,
-)
+from sentry_sdk_alpha._types import TYPE_CHECKING
+from sentry_sdk_alpha.consts import DEFAULT_SPAN_ORIGIN, SPANDATA
+from sentry_sdk_alpha.opentelemetry.consts import OTEL_SENTRY_CONTEXT, SentrySpanAttribute
 from sentry_sdk_alpha.opentelemetry.sampler import create_sampling_context
 from sentry_sdk_alpha.opentelemetry.utils import (
-    is_sentry_span,
     convert_from_otel_timestamp,
     extract_span_attributes,
     extract_span_data,
     extract_transaction_name_source,
-    get_trace_context,
     get_profile_context,
     get_sentry_meta,
+    get_trace_context,
+    is_sentry_span,
     set_sentry_meta,
 )
 from sentry_sdk_alpha.profiler.continuous_profiler import (
-    try_autostart_continuous_profiler,
     get_profiler_id,
+    try_autostart_continuous_profiler,
     try_profile_lifecycle_trace_start,
 )
 from sentry_sdk_alpha.profiler.transaction_profiler import Profile
-from sentry_sdk_alpha._types import TYPE_CHECKING
+from sentry_sdk_alpha.utils import get_current_thread_meta
 
 if TYPE_CHECKING:
-    from typing import Optional, List, Any, Deque, DefaultDict
+    from typing import Any, DefaultDict, Deque, List, Optional
+
     from sentry_sdk_alpha._types import Event
 
 
@@ -60,10 +54,8 @@ def __new__(cls):
 
     def __init__(self):
         # type: () -> None
-        self._children_spans = defaultdict(
-            list
-        )  # type: DefaultDict[int, List[ReadableSpan]]
-        self._dropped_spans = defaultdict(lambda: 0)  # type: DefaultDict[int, int]
+        self._children_spans = defaultdict(list)  # type: DefaultDict[int, List[ReadableSpan]]
+        self._dropped_spans = defaultdict(int)  # type: DefaultDict[int, int]
 
     def on_start(self, span, parent_context=None):
         # type: (Span, Optional[Context]) -> None
@@ -205,9 +197,7 @@ def _collect_children(self, span):
             node_children = self._children_spans.pop(parent_span_id, [])
             dropped_spans += self._dropped_spans.pop(parent_span_id, 0)
             children.extend(node_children)
-            bfs_queue.extend(
-                [child.context.span_id for child in node_children if child.context]
-            )
+            bfs_queue.extend([child.context.span_id for child in node_children if child.context])
 
         return children, dropped_spans
 
@@ -263,9 +253,7 @@ def _span_to_json(self, span):
             return None
 
         # This is a safe cast because dict[str, Any] is a superset of Event
-        span_json = cast(
-            "dict[str, Any]", self._common_span_transaction_attributes_as_json(span)
-        )
+        span_json = cast("dict[str, Any]", self._common_span_transaction_attributes_as_json(span))
         if span_json is None:
             return None
 
@@ -321,8 +309,7 @@ def _log_debug_info(self):
         pprint.pprint(
             {
                 format_span_id(span_id): [
-                    (format_span_id(child.context.span_id), child.name)
-                    for child in children
+                    (format_span_id(child.context.span_id), child.name) for child in children
                 ]
                 for span_id, children in self._children_spans.items()
             }
diff --git a/src/sentry_sdk_alpha/opentelemetry/tracing.py b/src/sentry_sdk_alpha/opentelemetry/tracing.py
index 0e7d8290605c47..51a101821e3da1 100644
--- a/src/sentry_sdk_alpha/opentelemetry/tracing.py
+++ b/src/sentry_sdk_alpha/opentelemetry/tracing.py
@@ -1,12 +1,8 @@
 from opentelemetry import trace
 from opentelemetry.propagate import set_global_textmap
-from opentelemetry.sdk.trace import TracerProvider, Span, ReadableSpan
+from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider
 
-from sentry_sdk_alpha.opentelemetry import (
-    SentryPropagator,
-    SentrySampler,
-    SentrySpanProcessor,
-)
+from sentry_sdk_alpha.opentelemetry import SentryPropagator, SentrySampler, SentrySpanProcessor
 
 
 def patch_readable_span():
diff --git a/src/sentry_sdk_alpha/opentelemetry/utils.py b/src/sentry_sdk_alpha/opentelemetry/utils.py
index 609cee587687a0..98806f429c43d8 100644
--- a/src/sentry_sdk_alpha/opentelemetry/utils.py
+++ b/src/sentry_sdk_alpha/opentelemetry/utils.py
@@ -1,36 +1,31 @@
 import re
-from typing import cast
 from datetime import datetime, timezone
-
-from urllib3.util import parse_url as urlparse
+from typing import cast
 from urllib.parse import quote, unquote
-from opentelemetry.trace import (
-    Span as AbstractSpan,
-    SpanKind,
-    StatusCode,
-    format_trace_id,
-    format_span_id,
-    TraceState,
-)
-from opentelemetry.semconv.trace import SpanAttributes
+
 from opentelemetry.sdk.trace import ReadableSpan
+from opentelemetry.semconv.trace import SpanAttributes
+from opentelemetry.trace import Span as AbstractSpan
+from opentelemetry.trace import SpanKind, StatusCode, TraceState, format_span_id, format_trace_id
+from urllib3.util import parse_url as urlparse
 
 import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import Dsn
+from sentry_sdk_alpha._types import TYPE_CHECKING
 from sentry_sdk_alpha.consts import (
-    SPANSTATUS,
-    OP,
-    SPANDATA,
     DEFAULT_SPAN_ORIGIN,
     LOW_QUALITY_TRANSACTION_SOURCES,
+    OP,
+    SPANDATA,
+    SPANSTATUS,
 )
 from sentry_sdk_alpha.opentelemetry.consts import SentrySpanAttribute
 from sentry_sdk_alpha.tracing_utils import Baggage, get_span_status_from_http_code
-
-from sentry_sdk_alpha._types import TYPE_CHECKING
+from sentry_sdk_alpha.utils import Dsn
 
 if TYPE_CHECKING:
-    from typing import Any, Optional, Mapping, Sequence, Union
+    from collections.abc import Mapping, Sequence
+    from typing import Any, Optional, Union
+
     from sentry_sdk_alpha._types import OtelExtractedSpanData
 
 
@@ -124,9 +119,7 @@ def extract_span_data(span):
 
     attribute_op = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.OP))
     op = attribute_op or op
-    description = cast(
-        "str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description
-    )
+    description = cast("str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description)
     origin = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.ORIGIN))
 
     http_method = span.attributes.get(SpanAttributes.HTTP_METHOD)
@@ -184,9 +177,9 @@ def span_data_for_http_method(span):
     peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME)
 
     # TODO-neel-potel remove description completely
-    description = span_attributes.get(
-        SentrySpanAttribute.DESCRIPTION
-    ) or span_attributes.get(SentrySpanAttribute.NAME)
+    description = span_attributes.get(SentrySpanAttribute.DESCRIPTION) or span_attributes.get(
+        SentrySpanAttribute.NAME
+    )
     description = cast("Optional[str]", description)
     if description is None:
         description = f"{http_method}"
@@ -203,9 +196,7 @@ def span_data_for_http_method(span):
 
             if url:
                 parsed_url = urlparse(url)
-                url = "{}://{}{}".format(
-                    parsed_url.scheme, parsed_url.netloc, parsed_url.path
-                )
+                url = "{}://{}{}".format(parsed_url.scheme, parsed_url.netloc, parsed_url.path)
                 description = f"{http_method} {url}"
 
     status, http_status = extract_span_status(span)
@@ -248,10 +239,7 @@ def extract_span_status(span):
             if http_status is not None:
                 return (inferred_status, http_status)
 
-            if (
-                status.description is not None
-                and status.description in GRPC_ERROR_MAP.values()
-            ):
+            if status.description is not None and status.description in GRPC_ERROR_MAP.values():
                 return (status.description, None)
             else:
                 return (SPANSTATUS.UNKNOWN_ERROR, None)
@@ -436,14 +424,9 @@ def get_trace_state(span):
 
         root_span = get_sentry_meta(span, "root_span")
         if root_span and isinstance(root_span, ReadableSpan):
-            transaction_name, transaction_source = extract_transaction_name_source(
-                root_span
-            )
+            transaction_name, transaction_source = extract_transaction_name_source(root_span)
 
-            if (
-                transaction_name
-                and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES
-            ):
+            if transaction_name and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES:
                 trace_state = trace_state.update(
                     Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name)
                 )
diff --git a/src/sentry_sdk_alpha/profiler/__init__.py b/src/sentry_sdk_alpha/profiler/__init__.py
index 853eea8233485d..01d6ba03071e93 100644
--- a/src/sentry_sdk_alpha/profiler/__init__.py
+++ b/src/sentry_sdk_alpha/profiler/__init__.py
@@ -1,7 +1,4 @@
-from sentry_sdk_alpha.profiler.continuous_profiler import (
-    start_profiler,
-    stop_profiler,
-)
+from sentry_sdk_alpha.profiler.continuous_profiler import start_profiler, stop_profiler
 
 __all__ = [
     "start_profiler",
diff --git a/src/sentry_sdk_alpha/profiler/continuous_profiler.py b/src/sentry_sdk_alpha/profiler/continuous_profiler.py
index 6945f84b3ddcc4..18143bb0bb1fcb 100644
--- a/src/sentry_sdk_alpha/profiler/continuous_profiler.py
+++ b/src/sentry_sdk_alpha/profiler/continuous_profiler.py
@@ -7,14 +7,12 @@
 import uuid
 from collections import deque
 from datetime import datetime, timezone
+from typing import TYPE_CHECKING
 
+from sentry_sdk_alpha._lru_cache import LRUCache
 from sentry_sdk_alpha.consts import VERSION
 from sentry_sdk_alpha.envelope import Envelope
-from sentry_sdk_alpha._lru_cache import LRUCache
-from sentry_sdk_alpha.profiler.utils import (
-    DEFAULT_SAMPLING_FREQUENCY,
-    extract_stack,
-)
+from sentry_sdk_alpha.profiler.utils import DEFAULT_SAMPLING_FREQUENCY, extract_stack
 from sentry_sdk_alpha.utils import (
     capture_internal_exception,
     is_gevent,
@@ -23,37 +21,24 @@
     set_in_app_in_frames,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Deque
-    from typing import Dict
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Type
-    from typing import Union
-    from typing_extensions import TypedDict
+    from collections.abc import Callable
+    from typing import Any, Deque, Dict, List, Optional, Set, Type, TypedDict, Union
+
     from sentry_sdk_alpha._types import ContinuousProfilerMode, SDKInfo
     from sentry_sdk_alpha.profiler.utils import (
         ExtractedSample,
         FrameId,
-        StackId,
-        ThreadId,
         ProcessedFrame,
         ProcessedStack,
+        StackId,
+        ThreadId,
     )
 
-    ProcessedSample = TypedDict(
-        "ProcessedSample",
-        {
-            "timestamp": float,
-            "thread_id": ThreadId,
-            "stack_id": int,
-        },
-    )
+    class ProcessedSample(TypedDict):
+        timestamp: float
+        thread_id: ThreadId
+        stack_id: int
 
 
 try:
@@ -94,20 +79,14 @@ def setup_continuous_profiler(options, sdk_info, capture_func):
     frequency = DEFAULT_SAMPLING_FREQUENCY
 
     if profiler_mode == ThreadContinuousScheduler.mode:
-        _scheduler = ThreadContinuousScheduler(
-            frequency, options, sdk_info, capture_func
-        )
+        _scheduler = ThreadContinuousScheduler(frequency, options, sdk_info, capture_func)
     elif profiler_mode == GeventContinuousScheduler.mode:
-        _scheduler = GeventContinuousScheduler(
-            frequency, options, sdk_info, capture_func
-        )
+        _scheduler = GeventContinuousScheduler(frequency, options, sdk_info, capture_func)
     else:
-        raise ValueError("Unknown continuous profiler mode: {}".format(profiler_mode))
+        raise ValueError(f"Unknown continuous profiler mode: {profiler_mode}")
 
     logger.debug(
-        "[Profiling] Setting up continuous profiler in {mode} mode".format(
-            mode=_scheduler.mode
-        )
+        "[Profiling] Setting up continuous profiler in {mode} mode".format(mode=_scheduler.mode)
     )
 
     atexit.register(teardown_continuous_profiler)
@@ -198,9 +177,7 @@ def __init__(self, frequency, options, sdk_info, capture_func):
 
         self.lifecycle = self.options.get("profile_lifecycle")
         profile_session_sample_rate = self.options.get("profile_session_sample_rate")
-        self.sampled = determine_profile_session_sampling_decision(
-            profile_session_sample_rate
-        )
+        self.sampled = determine_profile_session_sampling_decision(profile_session_sample_rate)
 
         self.sampler = self.make_sampler()
         self.buffer = None  # type: Optional[ProfileBuffer]
@@ -494,7 +471,7 @@ def __init__(self, frequency, options, sdk_info, capture_func):
         # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None
 
         if ThreadPool is None:
-            raise ValueError("Profiler mode: {} is not available".format(self.mode))
+            raise ValueError(f"Profiler mode: {self.mode} is not available")
 
         super().__init__(frequency, options, sdk_info, capture_func)
 
@@ -567,9 +544,7 @@ def __init__(self, options, sdk_info, buffer_size, capture_func):
         #
         # Subtracting the start_monotonic_time here to find a fixed starting position
         # for relative monotonic timestamps for each sample.
-        self.start_timestamp = (
-            datetime.now(timezone.utc).timestamp() - self.start_monotonic_time
-        )
+        self.start_timestamp = datetime.now(timezone.utc).timestamp() - self.start_monotonic_time
 
     def write(self, monotonic_time, sample):
         # type: (float, ExtractedSample) -> None
@@ -619,9 +594,7 @@ def write(self, ts, sample):
                             self.frames.append(frames[i])
 
                     self.indexed_stacks[stack_id] = len(self.indexed_stacks)
-                    self.stacks.append(
-                        [self.indexed_frames[frame_id] for frame_id in frame_ids]
-                    )
+                    self.stacks.append([self.indexed_frames[frame_id] for frame_id in frame_ids])
 
                 self.samples.append(
                     {
diff --git a/src/sentry_sdk_alpha/profiler/transaction_profiler.py b/src/sentry_sdk_alpha/profiler/transaction_profiler.py
index 266dc376734f34..dc9a573cd351e1 100644
--- a/src/sentry_sdk_alpha/profiler/transaction_profiler.py
+++ b/src/sentry_sdk_alpha/profiler/transaction_profiler.py
@@ -35,13 +35,11 @@
 import uuid
 from abc import ABC, abstractmethod
 from collections import deque
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha._lru_cache import LRUCache
-from sentry_sdk_alpha.profiler.utils import (
-    DEFAULT_SAMPLING_FREQUENCY,
-    extract_stack,
-)
+from sentry_sdk_alpha.profiler.utils import DEFAULT_SAMPLING_FREQUENCY, extract_stack
 from sentry_sdk_alpha.utils import (
     capture_internal_exception,
     get_current_thread_meta,
@@ -51,48 +49,31 @@
     set_in_app_in_frames,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Deque
-    from typing import Dict
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Type
-    from typing_extensions import TypedDict
+    from collections.abc import Callable
+    from typing import Any, Deque, Dict, List, Optional, Set, Type, TypedDict
 
+    from sentry_sdk_alpha._types import Event, ProfilerMode, SamplingContext
     from sentry_sdk_alpha.profiler.utils import (
-        ProcessedStack,
+        ExtractedSample,
+        FrameId,
         ProcessedFrame,
+        ProcessedStack,
         ProcessedThreadMetadata,
-        FrameId,
         StackId,
         ThreadId,
-        ExtractedSample,
-    )
-    from sentry_sdk_alpha._types import Event, SamplingContext, ProfilerMode
-
-    ProcessedSample = TypedDict(
-        "ProcessedSample",
-        {
-            "elapsed_since_start_ns": str,
-            "thread_id": ThreadId,
-            "stack_id": int,
-        },
     )
 
-    ProcessedProfile = TypedDict(
-        "ProcessedProfile",
-        {
-            "frames": List[ProcessedFrame],
-            "stacks": List[ProcessedStack],
-            "samples": List[ProcessedSample],
-            "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata],
-        },
-    )
+    class ProcessedSample(TypedDict):
+        elapsed_since_start_ns: str
+        thread_id: ThreadId
+        stack_id: int
+
+    class ProcessedProfile(TypedDict):
+        frames: List[ProcessedFrame]
+        stacks: List[ProcessedStack]
+        samples: List[ProcessedSample]
+        thread_metadata: Dict[ThreadId, ProcessedThreadMetadata]
 
 
 try:
@@ -160,11 +141,9 @@ def setup_profiler(options):
     elif profiler_mode == GeventScheduler.mode:
         _scheduler = GeventScheduler(frequency=frequency)
     else:
-        raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+        raise ValueError(f"Unknown profiler mode: {profiler_mode}")
 
-    logger.debug(
-        "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode)
-    )
+    logger.debug(f"[Profiling] Setting up profiler in {_scheduler.mode} mode")
     _scheduler.setup()
 
     atexit.register(teardown_profiler)
@@ -225,9 +204,7 @@ def update_active_thread_id(self):
         # type: () -> None
         self.active_thread_id = get_current_thread_meta()[0]
         logger.debug(
-            "[Profiling] updating active thread id to {tid}".format(
-                tid=self.active_thread_id
-            )
+            "[Profiling] updating active thread id to {tid}".format(tid=self.active_thread_id)
         )
 
     def _set_initial_sampling_decision(self, sampling_context):
@@ -245,17 +222,13 @@ def _set_initial_sampling_decision(self, sampling_context):
         # The corresponding transaction was not sampled,
         # so don't generate a profile for it.
         if not self.sampled:
-            logger.debug(
-                "[Profiling] Discarding profile because transaction is discarded."
-            )
+            logger.debug("[Profiling] Discarding profile because transaction is discarded.")
             self.sampled = False
             return
 
         # The profiler hasn't been properly initialized.
         if self.scheduler is None:
-            logger.debug(
-                "[Profiling] Discarding profile because profiler was not started."
-            )
+            logger.debug("[Profiling] Discarding profile because profiler was not started.")
             self.sampled = False
             return
 
@@ -275,16 +248,12 @@ def _set_initial_sampling_decision(self, sampling_context):
         # The profiles_sample_rate option was not set, so profiling
         # was never enabled.
         if sample_rate is None:
-            logger.debug(
-                "[Profiling] Discarding profile because profiling was not enabled."
-            )
+            logger.debug("[Profiling] Discarding profile because profiling was not enabled.")
             self.sampled = False
             return
 
         if not is_valid_sample_rate(sample_rate, source="Profiling"):
-            logger.warning(
-                "[Profiling] Discarding profile because of invalid sample rate."
-            )
+            logger.warning("[Profiling] Discarding profile because of invalid sample rate.")
             self.sampled = False
             return
 
@@ -373,9 +342,7 @@ def write(self, ts, sample):
                             self.frames.append(frames[i])
 
                     self.indexed_stacks[stack_id] = len(self.indexed_stacks)
-                    self.stacks.append(
-                        [self.indexed_frames[frame_id] for frame_id in frame_ids]
-                    )
+                    self.stacks.append([self.indexed_frames[frame_id] for frame_id in frame_ids])
 
                 self.samples.append(
                     {
@@ -471,16 +438,12 @@ def valid(self):
 
         if self.sampled is None or not self.sampled:
             if client.transport:
-                client.transport.record_lost_event(
-                    "sample_rate", data_category="profile"
-                )
+                client.transport.record_lost_event("sample_rate", data_category="profile")
             return False
 
         if self.unique_samples < PROFILE_MINIMUM_SAMPLES:
             if client.transport:
-                client.transport.record_lost_event(
-                    "insufficient_data", data_category="profile"
-                )
+                client.transport.record_lost_event("insufficient_data", data_category="profile")
             logger.debug("[Profiling] Discarding profile because insufficient samples.")
             return False
 
@@ -714,7 +677,7 @@ def __init__(self, frequency):
         # type: (int) -> None
 
         if ThreadPool is None:
-            raise ValueError("Profiler mode: {} is not available".format(self.mode))
+            raise ValueError(f"Profiler mode: {self.mode} is not available")
 
         super().__init__(frequency=frequency)
 
diff --git a/src/sentry_sdk_alpha/profiler/utils.py b/src/sentry_sdk_alpha/profiler/utils.py
index a62c79aedbfa42..0e6fa5e1fc2148 100644
--- a/src/sentry_sdk_alpha/profiler/utils.py
+++ b/src/sentry_sdk_alpha/profiler/utils.py
@@ -1,56 +1,46 @@
 import os
 from collections import deque
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha._compat import PY311
 from sentry_sdk_alpha.utils import filename_for_module
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from sentry_sdk_alpha._lru_cache import LRUCache
+    from collections.abc import Sequence
     from types import FrameType
-    from typing import Deque
-    from typing import List
-    from typing import Optional
-    from typing import Sequence
-    from typing import Tuple
-    from typing_extensions import TypedDict
+    from typing import Deque, List, Optional, Tuple, TypedDict
+
+    from sentry_sdk_alpha._lru_cache import LRUCache
 
     ThreadId = str
 
-    ProcessedStack = List[int]
-
-    ProcessedFrame = TypedDict(
-        "ProcessedFrame",
-        {
-            "abs_path": str,
-            "filename": Optional[str],
-            "function": str,
-            "lineno": int,
-            "module": Optional[str],
-        },
-    )
-
-    ProcessedThreadMetadata = TypedDict(
-        "ProcessedThreadMetadata",
-        {"name": str},
-    )
-
-    FrameId = Tuple[
+    ProcessedStack = list[int]
+
+    class ProcessedFrame(TypedDict):
+        abs_path: str
+        filename: Optional[str]
+        function: str
+        lineno: int
+        module: Optional[str]
+
+    class ProcessedThreadMetadata(TypedDict):
+        name: str
+
+    FrameId = tuple[
         str,  # abs_path
         int,  # lineno
         str,  # function
     ]
-    FrameIds = Tuple[FrameId, ...]
+    FrameIds = tuple[FrameId, ...]
 
     # The exact value of this id is not very meaningful. The purpose
     # of this id is to give us a compact and unique identifier for a
     # raw stack that can be used as a key to a dictionary so that it
     # can be used during the sampled format generation.
-    StackId = Tuple[int, int]
+    StackId = tuple[int, int]
 
-    ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]]
-    ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]]
+    ExtractedStack = tuple[StackId, FrameIds, list[ProcessedFrame]]
+    ExtractedSample = Sequence[tuple[ThreadId, ExtractedStack]]
 
 # The default sampling frequency to use. This is set at 101 in order to
 # mitigate the effects of lockstep sampling.
@@ -91,7 +81,7 @@ def get_frame_name(frame):
             ):
                 for cls in type(frame.f_locals["self"]).__mro__:
                     if name in cls.__dict__:
-                        return "{}.{}".format(cls.__name__, name)
+                        return f"{cls.__name__}.{name}"
         except (AttributeError, ValueError):
             pass
 
@@ -107,7 +97,7 @@ def get_frame_name(frame):
             ):
                 for cls in frame.f_locals["cls"].__mro__:
                     if name in cls.__dict__:
-                        return "{}.{}".format(cls.__name__, name)
+                        return f"{cls.__name__}.{name}"
         except (AttributeError, ValueError):
             pass
 
diff --git a/src/sentry_sdk_alpha/scope.py b/src/sentry_sdk_alpha/scope.py
index 49b14443731960..3697eb5d633c27 100644
--- a/src/sentry_sdk_alpha/scope.py
+++ b/src/sentry_sdk_alpha/scope.py
@@ -1,38 +1,32 @@
 import os
 import sys
 import warnings
-from copy import copy, deepcopy
 from collections import deque
 from contextlib import contextmanager
-from enum import Enum
+from copy import copy, deepcopy
 from datetime import datetime, timezone
+from enum import Enum
 from functools import wraps
 from itertools import chain
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha._types import AnnotatedValue
 from sentry_sdk_alpha.attachments import Attachment
 from sentry_sdk_alpha.consts import (
+    BAGGAGE_HEADER_NAME,
     DEFAULT_MAX_BREADCRUMBS,
     FALSE_VALUES,
-    BAGGAGE_HEADER_NAME,
     SENTRY_TRACE_HEADER_NAME,
 )
-from sentry_sdk_alpha.feature_flags import FlagBuffer, DEFAULT_FLAG_CAPACITY
+from sentry_sdk_alpha.feature_flags import DEFAULT_FLAG_CAPACITY, FlagBuffer
 from sentry_sdk_alpha.profiler.transaction_profiler import Profile
 from sentry_sdk_alpha.session import Session
-from sentry_sdk_alpha.tracing_utils import (
-    Baggage,
-    has_tracing_enabled,
-    PropagationContext,
-)
-from sentry_sdk_alpha.tracing import (
-    NoOpSpan,
-    Span,
-)
+from sentry_sdk_alpha.tracing import NoOpSpan, Span
+from sentry_sdk_alpha.tracing_utils import Baggage, PropagationContext, has_tracing_enabled
 from sentry_sdk_alpha.utils import (
+    ContextVar,
     capture_internal_exception,
     capture_internal_exceptions,
-    ContextVar,
     datetime_from_isoformat,
     disable_capture_event,
     event_from_exception,
@@ -40,25 +34,11 @@
     logger,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from collections.abc import Mapping, MutableMapping
-
-    from typing import Any
-    from typing import Callable
-    from typing import Deque
-    from typing import Dict
-    from typing import Generator
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import ParamSpec
-    from typing import Tuple
-    from typing import TypeVar
-    from typing import Union
-    from typing import Self
+    from collections.abc import Callable, Generator, Iterator, Mapping, MutableMapping
+    from typing import Any, Deque, Dict, List, Optional, ParamSpec, Self, Tuple, TypeVar, Union
 
+    import sentry_sdk_alpha
     from sentry_sdk_alpha._types import (
         Breadcrumb,
         BreadcrumbHint,
@@ -71,8 +51,6 @@
         Type,
     )
 
-    import sentry_sdk_alpha
-
     P = ParamSpec("P")
     R = TypeVar("R")
 
@@ -437,9 +415,7 @@ def _load_trace_data_from_env(self):
         """
         incoming_trace_information = None
 
-        sentry_use_environment = (
-            os.environ.get("SENTRY_USE_ENVIRONMENT") or ""
-        ).lower()
+        sentry_use_environment = (os.environ.get("SENTRY_USE_ENVIRONMENT") or "").lower()
         use_environment = sentry_use_environment not in FALSE_VALUES
         if use_environment:
             incoming_trace_information = {}
@@ -496,11 +472,7 @@ def get_traceparent(self, *args, **kwargs):
         client = self.get_client()
 
         # If we have an active span, return traceparent from there
-        if (
-            has_tracing_enabled(client.options)
-            and self.span is not None
-            and self.span.is_valid
-        ):
+        if has_tracing_enabled(client.options) and self.span is not None and self.span.is_valid:
             return self.span.to_traceparent()
 
         # If this scope has a propagation context, return traceparent from there
@@ -520,11 +492,7 @@ def get_baggage(self, *args, **kwargs):
         client = self.get_client()
 
         # If we have an active span, return baggage from there
-        if (
-            has_tracing_enabled(client.options)
-            and self.span is not None
-            and self.span.is_valid
-        ):
+        if has_tracing_enabled(client.options) and self.span is not None and self.span.is_valid:
             return self.span.to_baggage()
 
         # If this scope has a propagation context, return baggage from there
@@ -564,14 +532,14 @@ def trace_propagation_meta(self, *args, **kwargs):
 
         sentry_trace = self.get_traceparent()
         if sentry_trace is not None:
-            meta += '' % (
+            meta += ''.format(
                 SENTRY_TRACE_HEADER_NAME,
                 sentry_trace,
             )
 
         baggage = self.get_baggage()
         if baggage is not None:
-            meta += '' % (
+            meta += ''.format(
                 BAGGAGE_HEADER_NAME,
                 baggage.serialize(),
             )
@@ -606,26 +574,22 @@ def iter_trace_propagation_headers(self, *args, **kwargs):
         span = span or self.span
 
         if has_tracing_enabled(client.options) and span is not None and span.is_valid:
-            for header in span.iter_headers():
-                yield header
+            yield from span.iter_headers()
         else:
             # If this scope has a propagation context, return headers from there
             # (it could be that self is not the current scope nor the isolation scope)
             if self._propagation_context is not None:
-                for header in self.iter_headers():
-                    yield header
+                yield from self.iter_headers()
             else:
                 # otherwise try headers from current scope
                 current_scope = self.get_current_scope()
                 if current_scope._propagation_context is not None:
-                    for header in current_scope.iter_headers():
-                        yield header
+                    yield from current_scope.iter_headers()
                 else:
                     # otherwise fall back to headers from isolation scope
                     isolation_scope = self.get_isolation_scope()
                     if isolation_scope._propagation_context is not None:
-                        for header in isolation_scope.iter_headers():
-                            yield header
+                        yield from isolation_scope.iter_headers()
 
     def get_active_propagation_context(self):
         # type: () -> Optional[PropagationContext]
@@ -1018,9 +982,7 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs):
         else:
             exc_info = sys.exc_info()
 
-        event, hint = event_from_exception(
-            exc_info, client_options=self.get_client().options
-        )
+        event, hint = event_from_exception(exc_info, client_options=self.get_client().options)
 
         try:
             return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs)
@@ -1138,9 +1100,7 @@ def _apply_breadcrumbs_to_event(self, event, hint, options):
                     if isinstance(crumb["timestamp"], str):
                         crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"])
 
-                event["breadcrumbs"]["values"].sort(
-                    key=lambda crumb: crumb["timestamp"]
-                )
+                event["breadcrumbs"]["values"].sort(key=lambda crumb: crumb["timestamp"])
         except Exception as err:
             logger.debug("Error when sorting breadcrumbs", exc_info=err)
             pass
@@ -1184,11 +1144,7 @@ def _apply_contexts_to_event(self, event, hint, options):
 
         # Add "trace" context
         if contexts.get("trace") is None:
-            if (
-                has_tracing_enabled(options)
-                and self._span is not None
-                and self._span.is_valid
-            ):
+            if has_tracing_enabled(options) and self._span is not None and self._span.is_valid:
                 contexts["trace"] = self._span.get_trace_context()
             else:
                 contexts["trace"] = self.get_trace_context()
@@ -1197,9 +1153,7 @@ def _apply_flags_to_event(self, event, hint, options):
         # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         flags = self.flags.get()
         if len(flags) > 0:
-            event.setdefault("contexts", {}).setdefault("flags", {}).update(
-                {"values": flags}
-            )
+            event.setdefault("contexts", {}).setdefault("flags", {}).update({"values": flags})
 
     def _drop(self, cause, ty):
         # type: (Any, str) -> Optional[Any]
@@ -1284,9 +1238,7 @@ def apply_to_event(
 
         if is_check_in:
             # Check-ins only support the trace context, strip all others
-            event["contexts"] = {
-                "trace": event.setdefault("contexts", {}).get("trace", {})
-            }
+            event["contexts"] = {"trace": event.setdefault("contexts", {}).get("trace", {})}
 
         if not is_check_in:
             self._apply_level_to_event(event, hint, options)
@@ -1379,7 +1331,7 @@ def update_from_kwargs(
 
     def __repr__(self):
         # type: () -> str
-        return "<%s id=%s name=%s type=%s>" % (
+        return "<{} id={} name={} type={}>".format(
             self.__class__.__name__,
             hex(id(self)),
             self._name,
@@ -1391,8 +1343,7 @@ def flags(self):
         # type: () -> FlagBuffer
         if self._flags is None:
             max_flags = (
-                self.get_client().options["_experiments"].get("max_flags")
-                or DEFAULT_FLAG_CAPACITY
+                self.get_client().options["_experiments"].get("max_flags") or DEFAULT_FLAG_CAPACITY
             )
             self._flags = FlagBuffer(capacity=max_flags)
         return self._flags
diff --git a/src/sentry_sdk_alpha/scrubber.py b/src/sentry_sdk_alpha/scrubber.py
index 77f41ee0a7cd6b..6bbbff36912f95 100644
--- a/src/sentry_sdk_alpha/scrubber.py
+++ b/src/sentry_sdk_alpha/scrubber.py
@@ -1,15 +1,12 @@
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    AnnotatedValue,
-    iter_event_frames,
-)
+from typing import TYPE_CHECKING, Dict, List, cast
 
-from typing import TYPE_CHECKING, cast, List, Dict
+from sentry_sdk_alpha.utils import AnnotatedValue, capture_internal_exceptions, iter_event_frames
 
 if TYPE_CHECKING:
-    from sentry_sdk_alpha._types import Event
     from typing import Optional
 
+    from sentry_sdk_alpha._types import Event
+
 
 DEFAULT_DENYLIST = [
     # stolen from relay
@@ -59,9 +56,7 @@
 
 
 class EventScrubber:
-    def __init__(
-        self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None
-    ):
+    def __init__(self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None):
         # type: (Optional[List[str]], bool, bool, Optional[List[str]]) -> None
         """
         A scrubber that goes through the event payload and removes sensitive data configured through denylists.
@@ -74,9 +69,7 @@ def __init__(
         self.denylist = DEFAULT_DENYLIST.copy() if denylist is None else denylist
 
         if not send_default_pii:
-            pii_denylist = (
-                DEFAULT_PII_DENYLIST.copy() if pii_denylist is None else pii_denylist
-            )
+            pii_denylist = DEFAULT_PII_DENYLIST.copy() if pii_denylist is None else pii_denylist
             self.denylist += pii_denylist
 
         self.denylist = [x.lower() for x in self.denylist]
@@ -163,7 +156,7 @@ def scrub_spans(self, event):
         # type: (Event) -> None
         with capture_internal_exceptions():
             if "spans" in event:
-                for span in cast(List[Dict[str, object]], event["spans"]):
+                for span in cast(list[dict[str, object]], event["spans"]):
                     if "data" in span:
                         self.scrub_dict(span["data"])
 
diff --git a/src/sentry_sdk_alpha/serializer.py b/src/sentry_sdk_alpha/serializer.py
index aaf5dc0a931652..8ecb9abf692ca8 100644
--- a/src/sentry_sdk_alpha/serializer.py
+++ b/src/sentry_sdk_alpha/serializer.py
@@ -1,7 +1,8 @@
-import sys
 import math
+import sys
 from collections.abc import Mapping, Sequence, Set
 from datetime import datetime
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.utils import (
     AnnotatedValue,
@@ -12,25 +13,16 @@
     strip_string,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
+    from collections.abc import Callable
     from types import TracebackType
-
-    from typing import Any
-    from typing import Callable
-    from typing import ContextManager
-    from typing import Dict
-    from typing import List
-    from typing import Optional
-    from typing import Type
-    from typing import Union
+    from typing import Any, ContextManager, Dict, List, Optional, Type, Union
 
     from sentry_sdk_alpha._types import NotImplementedType
 
-    Span = Dict[str, Any]
+    Span = dict[str, Any]
 
-    ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
+    ReprProcessor = Callable[[Any, dict[str, Any]], Union[NotImplementedType, str]]
     Segment = Union[str, int]
 
 
@@ -120,9 +112,7 @@ def serialize(event, **kwargs):
     path = []  # type: List[Segment]
     meta_stack = []  # type: List[Dict[str, Any]]
 
-    keep_request_bodies = (
-        kwargs.pop("max_request_body_size", None) == "always"
-    )  # type: bool
+    keep_request_bodies = kwargs.pop("max_request_body_size", None) == "always"  # type: bool
     max_value_length = kwargs.pop("max_value_length", None)  # type: Optional[int]
     is_vars = kwargs.pop("is_vars", False)
     custom_repr = kwargs.pop("custom_repr", None)  # type: Callable[..., Optional[str]]
@@ -296,9 +286,7 @@ def _serialize_node_impl(
 
         elif isinstance(obj, datetime):
             return (
-                str(format_timestamp(obj))
-                if not should_repr_strings
-                else _safe_repr_wrapper(obj)
+                str(format_timestamp(obj)) if not should_repr_strings else _safe_repr_wrapper(obj)
             )
 
         elif isinstance(obj, Mapping):
@@ -321,9 +309,7 @@ def _serialize_node_impl(
                     should_repr_strings=should_repr_strings,
                     is_databag=is_databag,
                     is_request_body=is_request_body,
-                    remaining_depth=(
-                        remaining_depth - 1 if remaining_depth is not None else None
-                    ),
+                    remaining_depth=(remaining_depth - 1 if remaining_depth is not None else None),
                     remaining_breadth=remaining_breadth,
                 )
                 rv_dict[str_k] = v
@@ -331,9 +317,7 @@ def _serialize_node_impl(
 
             return rv_dict
 
-        elif not isinstance(obj, serializable_str_types) and isinstance(
-            obj, (Set, Sequence)
-        ):
+        elif not isinstance(obj, serializable_str_types) and isinstance(obj, (Set, Sequence)):
             rv_list = []
 
             for i, v in enumerate(obj):
@@ -366,9 +350,7 @@ def _serialize_node_impl(
             if not isinstance(obj, str):
                 obj = _safe_repr_wrapper(obj)
 
-        is_span_description = (
-            len(path) == 3 and path[0] == "spans" and path[-1] == "description"
-        )
+        is_span_description = len(path) == 3 and path[0] == "spans" and path[-1] == "description"
         if is_span_description:
             return obj
 
diff --git a/src/sentry_sdk_alpha/session.py b/src/sentry_sdk_alpha/session.py
index c72a77f4f54c86..718a2acb15c9ff 100644
--- a/src/sentry_sdk_alpha/session.py
+++ b/src/sentry_sdk_alpha/session.py
@@ -1,15 +1,11 @@
 import uuid
 from datetime import datetime, timezone
+from typing import TYPE_CHECKING
 
 from sentry_sdk_alpha.utils import format_timestamp
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Optional
-    from typing import Union
-    from typing import Any
-    from typing import Dict
+    from typing import Any, Dict, Optional, Union
 
     from sentry_sdk_alpha._types import SessionStatus
 
diff --git a/src/sentry_sdk_alpha/sessions.py b/src/sentry_sdk_alpha/sessions.py
index a28bb601a3d282..61d3f061ff1989 100644
--- a/src/sentry_sdk_alpha/sessions.py
+++ b/src/sentry_sdk_alpha/sessions.py
@@ -1,22 +1,17 @@
 import os
 import time
-from threading import Thread, Lock
 from contextlib import contextmanager
+from threading import Lock, Thread
+from typing import TYPE_CHECKING
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.envelope import Envelope
 from sentry_sdk_alpha.session import Session
 from sentry_sdk_alpha.utils import format_timestamp
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import Generator
-    from typing import List
-    from typing import Optional
+    from collections.abc import Callable, Generator
+    from typing import Any, Dict, List, Optional
 
 
 def _is_auto_session_tracking_enabled(scope):
diff --git a/src/sentry_sdk_alpha/spotlight.py b/src/sentry_sdk_alpha/spotlight.py
index 0aaa8127b713a3..6f830acf400be6 100644
--- a/src/sentry_sdk_alpha/spotlight.py
+++ b/src/sentry_sdk_alpha/spotlight.py
@@ -1,30 +1,25 @@
 import io
 import logging
 import os
+import sys
+import urllib.error
 import urllib.parse
 import urllib.request
-import urllib.error
-import urllib3
-import sys
-
 from itertools import chain, product
-
 from typing import TYPE_CHECKING
 
+import urllib3
+
 if TYPE_CHECKING:
     from typing import Any
-    from typing import Callable
+    from collections.abc import Callable
     from typing import Dict
     from typing import Optional
     from typing import Self
 
-from sentry_sdk_alpha.utils import (
-    logger as sentry_logger,
-    env_to_bool,
-    capture_internal_exceptions,
-)
 from sentry_sdk_alpha.envelope import Envelope
-
+from sentry_sdk_alpha.utils import capture_internal_exceptions, env_to_bool
+from sentry_sdk_alpha.utils import logger as sentry_logger
 
 logger = logging.getLogger("spotlight")
 
@@ -69,9 +64,9 @@ def capture_envelope(self, envelope):
 
 
 try:
-    from django.utils.deprecation import MiddlewareMixin
-    from django.http import HttpResponseServerError, HttpResponse, HttpRequest
     from django.conf import settings
+    from django.http import HttpRequest, HttpResponse, HttpResponseServerError
+    from django.utils.deprecation import MiddlewareMixin
 
     SPOTLIGHT_JS_ENTRY_PATH = "/assets/main.js"
     SPOTLIGHT_JS_SNIPPET_PATTERN = (
@@ -139,13 +134,10 @@ def spotlight_script(self):
         def process_response(self, _request, response):
             # type: (Self, HttpRequest, HttpResponse) -> Optional[HttpResponse]
             content_type_header = tuple(
-                p.strip()
-                for p in response.headers.get("Content-Type", "").lower().split(";")
+                p.strip() for p in response.headers.get("Content-Type", "").lower().split(";")
             )
             content_type = content_type_header[0]
-            if len(content_type_header) > 1 and content_type_header[1].startswith(
-                CHARSET_PREFIX
-            ):
+            if len(content_type_header) > 1 and content_type_header[1].startswith(CHARSET_PREFIX):
                 encoding = content_type_header[1][len(CHARSET_PREFIX) :]
             else:
                 encoding = "utf-8"
@@ -187,9 +179,7 @@ def process_exception(self, _request, exception):
                 return None
 
             try:
-                spotlight = (
-                    urllib.request.urlopen(self._spotlight_url).read().decode("utf-8")
-                )
+                spotlight = urllib.request.urlopen(self._spotlight_url).read().decode("utf-8")
             except urllib.error.URLError:
                 return None
             else:
diff --git a/src/sentry_sdk_alpha/tracing.py b/src/sentry_sdk_alpha/tracing.py
index 1cf54023e280a7..325235829bfadc 100644
--- a/src/sentry_sdk_alpha/tracing.py
+++ b/src/sentry_sdk_alpha/tracing.py
@@ -1,41 +1,35 @@
-from datetime import datetime
 import json
 import warnings
+from datetime import datetime
+from typing import TYPE_CHECKING, cast
 
-from opentelemetry import trace as otel_trace, context
-from opentelemetry.trace import (
-    format_trace_id,
-    format_span_id,
-    Span as OtelSpan,
-    TraceState,
-    get_current_span,
-    INVALID_SPAN,
-)
-from opentelemetry.trace.status import Status, StatusCode
+from opentelemetry import context
+from opentelemetry import trace as otel_trace
 from opentelemetry.sdk.trace import ReadableSpan
+from opentelemetry.trace import INVALID_SPAN
+from opentelemetry.trace import Span as OtelSpan
+from opentelemetry.trace import TraceState, format_span_id, format_trace_id, get_current_span
+from opentelemetry.trace.status import Status, StatusCode
 from opentelemetry.version import __version__ as otel_version
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import (
+    BAGGAGE_HEADER_NAME,
     DEFAULT_SPAN_NAME,
     DEFAULT_SPAN_ORIGIN,
-    BAGGAGE_HEADER_NAME,
     SENTRY_TRACE_HEADER_NAME,
-    SPANSTATUS,
     SPANDATA,
+    SPANSTATUS,
     TransactionSource,
 )
-from sentry_sdk_alpha.opentelemetry.consts import (
-    TRACESTATE_SAMPLE_RATE_KEY,
-    SentrySpanAttribute,
-)
+from sentry_sdk_alpha.opentelemetry.consts import TRACESTATE_SAMPLE_RATE_KEY, SentrySpanAttribute
 from sentry_sdk_alpha.opentelemetry.utils import (
     baggage_from_trace_state,
     convert_from_otel_timestamp,
     convert_to_otel_timestamp,
+    get_sentry_meta,
     get_trace_context,
     get_trace_state,
-    get_sentry_meta,
     serialize_trace_state,
 )
 from sentry_sdk_alpha.tracing_utils import get_span_status_from_http_code
@@ -46,28 +40,14 @@
     should_be_treated_as_error,
 )
 
-from typing import TYPE_CHECKING, cast
-
-
 if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any
-    from typing import Dict
-    from typing import Iterator
-    from typing import Optional
-    from typing import overload
-    from typing import ParamSpec
-    from typing import Tuple
-    from typing import Union
-    from typing import TypeVar
+    from collections.abc import Callable, Iterator
+    from typing import Any, Dict, Optional, ParamSpec, Tuple, TypeVar, Union, overload
 
     P = ParamSpec("P")
     R = TypeVar("R")
 
-    from sentry_sdk_alpha._types import (
-        SamplingContext,
-    )
-
+    from sentry_sdk_alpha._types import SamplingContext
     from sentry_sdk_alpha.tracing_utils import Baggage
 
 _FLAGS_CAPACITY = 10
@@ -202,9 +182,7 @@ def __init__(
             skip_span = False
             if only_if_parent and parent_span is None:
                 parent_span_context = get_current_span().get_span_context()
-                skip_span = (
-                    not parent_span_context.is_valid or parent_span_context.is_remote
-                )
+                skip_span = not parent_span_context.is_valid or parent_span_context.is_remote
 
             if skip_span:
                 self._otel_span = INVALID_SPAN
@@ -229,9 +207,7 @@ def __init__(
 
                 parent_context = None
                 if parent_span is not None:
-                    parent_context = otel_trace.set_span_in_context(
-                        parent_span._otel_span
-                    )
+                    parent_context = otel_trace.set_span_in_context(parent_span._otel_span)
 
                 self._otel_span = tracer.start_span(
                     span_name,
@@ -325,9 +301,7 @@ def origin(self, value):
     @property
     def root_span(self):
         # type: () -> Optional[Span]
-        root_otel_span = cast(
-            "Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span")
-        )
+        root_otel_span = cast("Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span"))
         return Span(otel_span=root_otel_span) if root_otel_span else None
 
     @property
@@ -338,10 +312,7 @@ def is_root_span(self):
     @property
     def parent_span_id(self):
         # type: () -> Optional[str]
-        if (
-            not isinstance(self._otel_span, ReadableSpan)
-            or self._otel_span.parent is None
-        ):
+        if not isinstance(self._otel_span, ReadableSpan) or self._otel_span.parent is None:
             return None
         return format_span_id(self._otel_span.parent.span_id)
 
@@ -370,9 +341,7 @@ def sampled(self):
     @property
     def sample_rate(self):
         # type: () -> Optional[float]
-        sample_rate = self._otel_span.get_span_context().trace_state.get(
-            TRACESTATE_SAMPLE_RATE_KEY
-        )
+        sample_rate = self._otel_span.get_span_context().trace_state.get(TRACESTATE_SAMPLE_RATE_KEY)
         return float(sample_rate) if sample_rate is not None else None
 
     @property
@@ -398,9 +367,7 @@ def name(self, value):
     @property
     def source(self):
         # type: () -> str
-        return (
-            self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM
-        )
+        return self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM
 
     @source.setter
     def source(self, value):
@@ -449,9 +416,9 @@ def to_traceparent(self):
         else:
             sampled = None
 
-        traceparent = "%s-%s" % (self.trace_id, self.span_id)
+        traceparent = "{}-{}".format(self.trace_id, self.span_id)
         if sampled is not None:
-            traceparent += "-%s" % (sampled,)
+            traceparent += "-{}".format(sampled)
 
         return traceparent
 
@@ -485,10 +452,7 @@ def set_data(self, key, value):
 
     def get_attribute(self, name):
         # type: (str) -> Optional[Any]
-        if (
-            not isinstance(self._otel_span, ReadableSpan)
-            or not self._otel_span.attributes
-        ):
+        if not isinstance(self._otel_span, ReadableSpan) or not self._otel_span.attributes:
             return None
         return self._otel_span.attributes.get(name)
 
diff --git a/src/sentry_sdk_alpha/tracing_utils.py b/src/sentry_sdk_alpha/tracing_utils.py
index 6841490be97682..c6763c6f58b1fe 100644
--- a/src/sentry_sdk_alpha/tracing_utils.py
+++ b/src/sentry_sdk_alpha/tracing_utils.py
@@ -10,39 +10,35 @@
 from decimal import ROUND_DOWN, Decimal, DefaultContext, localcontext
 from functools import wraps
 from random import Random
+from typing import TYPE_CHECKING
 from urllib.parse import quote, unquote
 
 import sentry_sdk_alpha
 from sentry_sdk_alpha.consts import (
+    BAGGAGE_HEADER_NAME,
     OP,
+    SENTRY_TRACE_HEADER_NAME,
     SPANDATA,
     SPANSTATUS,
-    BAGGAGE_HEADER_NAME,
-    SENTRY_TRACE_HEADER_NAME,
 )
 from sentry_sdk_alpha.utils import (
+    Dsn,
+    _is_external_source,
+    _is_in_project_root,
+    _module_in_list,
     capture_internal_exceptions,
     filename_for_module,
-    Dsn,
+    is_sentry_url,
     logger,
     match_regex_list,
     qualname_from_function,
     to_string,
-    is_sentry_url,
-    _is_external_source,
-    _is_in_project_root,
-    _module_in_list,
 )
 
-from typing import TYPE_CHECKING
-
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Dict
-    from typing import Generator
-    from typing import Optional
-    from typing import Union
+    from collections.abc import Generator
     from types import FrameType
+    from typing import Any, Dict, Optional, Union
 
 
 SENTRY_TRACE_REGEX = re.compile(
@@ -107,8 +103,7 @@ def has_tracing_enabled(options):
         return False
 
     return bool(
-        options.get("traces_sample_rate") is not None
-        or options.get("traces_sampler") is not None
+        options.get("traces_sample_rate") is not None or options.get("traces_sampler") is not None
     )
 
 
@@ -180,9 +175,7 @@ def _should_be_included(
     # type: (...) -> bool
     # in_app_include takes precedence over in_app_exclude
     should_be_included = _module_in_list(namespace, in_app_include)
-    should_be_excluded = _is_external_source(abs_path) or _module_in_list(
-        namespace, in_app_exclude
-    )
+    should_be_excluded = _is_external_source(abs_path) or _module_in_list(namespace, in_app_exclude)
     return not is_sentry_sdk_frame and (
         should_be_included
         or (_is_in_project_root(abs_path, project_root) and not should_be_excluded)
@@ -229,9 +222,7 @@ def add_query_source(span):
         except Exception:
             namespace = None
 
-        is_sentry_sdk_frame = namespace is not None and namespace.startswith(
-            "sentry_sdk."
-        )
+        is_sentry_sdk_frame = namespace is not None and namespace.startswith("sentry_sdk.")
 
         should_be_included = _should_be_included(
             is_sentry_sdk_frame=is_sentry_sdk_frame,
@@ -302,9 +293,9 @@ def extract_sentrytrace_data(header):
     parent_sampled = None
 
     if trace_id:
-        trace_id = "{:032x}".format(int(trace_id, 16))
+        trace_id = f"{int(trace_id, 16):032x}"
     if parent_span_id:
-        parent_span_id = "{:016x}".format(int(parent_span_id, 16))
+        parent_span_id = f"{int(parent_span_id, 16):016x}"
     if sampled_str:
         parent_sampled = sampled_str != "0"
 
@@ -441,9 +432,9 @@ def to_traceparent(self):
         else:
             sampled = None
 
-        traceparent = "%s-%s" % (self.trace_id, self.span_id)
+        traceparent = "{}-{}".format(self.trace_id, self.span_id)
         if sampled is not None:
-            traceparent += "-%s" % (sampled,)
+            traceparent += "-{}".format(sampled)
 
         return traceparent
 
@@ -488,9 +479,7 @@ def _fill_sample_rand(self):
             try:
                 sample_rand = Decimal(sentry_baggage["sample_rand"])
             except Exception:
-                logger.debug(
-                    f"Failed to convert incoming sample_rand to Decimal: {sample_rand}"
-                )
+                logger.debug(f"Failed to convert incoming sample_rand to Decimal: {sample_rand}")
 
         if sample_rand is not None and 0 <= sample_rand < 1:
             # sample_rand is present and valid, so don't overwrite it
@@ -501,9 +490,7 @@ def _fill_sample_rand(self):
             try:
                 sample_rate = float(sentry_baggage["sample_rate"])
             except Exception:
-                logger.debug(
-                    f"Failed to convert incoming sample_rate to float: {sample_rate}"
-                )
+                logger.debug(f"Failed to convert incoming sample_rate to float: {sample_rate}")
 
         lower, upper = _sample_rand_range(self.parent_sampled, sample_rate)
 
@@ -664,11 +651,9 @@ def strip_sentry_baggage(header):
         Given a Baggage header, return a new Baggage header with all Sentry baggage items removed.
         """
         return ",".join(
-            (
-                item
-                for item in header.split(",")
-                if not Baggage.SENTRY_PREFIX_REGEX.match(item.strip())
-            )
+            item
+            for item in header.split(",")
+            if not Baggage.SENTRY_PREFIX_REGEX.match(item.strip())
         )
 
     def __repr__(self):
diff --git a/src/sentry_sdk_alpha/transport.py b/src/sentry_sdk_alpha/transport.py
index 06d62269e872b3..49764738d9e069 100644
--- a/src/sentry_sdk_alpha/transport.py
+++ b/src/sentry_sdk_alpha/transport.py
@@ -1,12 +1,12 @@
-from abc import ABC, abstractmethod
+import gzip
 import io
 import os
-import gzip
 import socket
 import ssl
 import time
-from datetime import datetime, timedelta, timezone
+from abc import ABC, abstractmethod
 from collections import defaultdict
+from datetime import datetime, timedelta, timezone
 from urllib.request import getproxies
 
 try:
@@ -14,30 +14,21 @@
 except ImportError:
     brotli = None
 
-import urllib3
+from typing import TYPE_CHECKING, Dict, List, cast
+
 import certifi
+import urllib3
 
 from sentry_sdk_alpha.consts import EndpointType
-from sentry_sdk_alpha.utils import Dsn, logger, capture_internal_exceptions
-from sentry_sdk_alpha.worker import BackgroundWorker
 from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef
-
-from typing import TYPE_CHECKING, cast, List, Dict
+from sentry_sdk_alpha.utils import Dsn, capture_internal_exceptions, logger
+from sentry_sdk_alpha.worker import BackgroundWorker
 
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Callable
-    from typing import DefaultDict
-    from typing import Iterable
-    from typing import Mapping
-    from typing import Optional
-    from typing import Self
-    from typing import Tuple
-    from typing import Type
-    from typing import Union
-
-    from urllib3.poolmanager import PoolManager
-    from urllib3.poolmanager import ProxyManager
+    from collections.abc import Callable, Iterable, Mapping
+    from typing import Any, DefaultDict, Optional, Self, Tuple, Type, Union
+
+    from urllib3.poolmanager import PoolManager, ProxyManager
 
     from sentry_sdk_alpha._types import EventDataCategory
 
@@ -211,9 +202,7 @@ def __init__(self, options):
             compression_level = None
 
         if compression_algo not in ("br", "gzip"):
-            logger.warning(
-                "Unknown compression algo %s, disabling compression", compression_algo
-            )
+            logger.warning("Unknown compression algo %s, disabling compression", compression_algo)
             self._compression_level = 0
             self._compression_algo = None
         else:
@@ -247,9 +236,7 @@ def record_lost_event(
                 event = item.get_transaction_event() or {}
 
                 # +1 for the transaction itself
-                span_count = (
-                    len(cast(List[Dict[str, object]], event.get("spans") or [])) + 1
-                )
+                span_count = len(cast(list[dict[str, object]], event.get("spans") or [])) + 1
                 self.record_lost_event(reason, "span", quantity=span_count)
 
             elif data_category == "attachment":
@@ -287,9 +274,7 @@ def _update_rate_limits(self, response):
                 if retry_after_value is not None
                 else None
             ) or 60
-            self._disabled_until[None] = datetime.now(timezone.utc) + timedelta(
-                seconds=retry_after
-            )
+            self._disabled_until[None] = datetime.now(timezone.utc) + timedelta(seconds=retry_after)
 
     def _send_request(
         self,
@@ -343,7 +328,7 @@ def record_loss(reason):
                     response.status,
                     getattr(response, "data", getattr(response, "content", None)),
                 )
-                self.on_dropped_event("status_{}".format(response.status))
+                self.on_dropped_event(f"status_{response.status}")
                 record_loss("network_error")
         finally:
             response.close()
@@ -400,9 +385,7 @@ def _disabled(bucket):
 
     def _is_rate_limited(self):
         # type: (Self) -> bool
-        return any(
-            ts > datetime.now(timezone.utc) for ts in self._disabled_until.values()
-        )
+        return any(ts > datetime.now(timezone.utc) for ts in self._disabled_until.values())
 
     def _is_worker_full(self):
         # type: (Self) -> bool
@@ -474,11 +457,7 @@ def _serialize_envelope(self, envelope):
         else:
             content_encoding = self._compression_algo
             if self._compression_algo == "br" and brotli is not None:
-                body.write(
-                    brotli.compress(
-                        envelope.serialize(), quality=self._compression_level
-                    )
-                )
+                body.write(brotli.compress(envelope.serialize(), quality=self._compression_level))
             else:  # assume gzip as we sanitize the algo value in init
                 with gzip.GzipFile(
                     fileobj=body, mode="w", compresslevel=self._compression_level
@@ -551,7 +530,7 @@ def kill(self):
 
 class HttpTransport(BaseHttpTransport):
     if TYPE_CHECKING:
-        _pool: Union[PoolManager, ProxyManager]
+        _pool: PoolManager | ProxyManager
 
     def _get_pool_options(self):
         # type: (Self) -> Dict[str, Any]
@@ -587,12 +566,8 @@ def _get_pool_options(self):
             or certifi.where()
         )
 
-        options["cert_file"] = self.options["cert_file"] or os.environ.get(
-            "CLIENT_CERT_FILE"
-        )
-        options["key_file"] = self.options["key_file"] or os.environ.get(
-            "CLIENT_KEY_FILE"
-        )
+        options["cert_file"] = self.options["cert_file"] or os.environ.get("CLIENT_CERT_FILE")
+        options["key_file"] = self.options["key_file"] or os.environ.get("CLIENT_KEY_FILE")
 
         return options
 
@@ -659,8 +634,8 @@ def _request(
 
 
 try:
-    import httpcore
     import h2  # noqa: F401
+    import httpcore
 except ImportError:
     # Sorry, no Http2Transport for you
     class Http2Transport(HttpTransport):
@@ -679,9 +654,7 @@ class Http2Transport(BaseHttpTransport):  # type: ignore
         TIMEOUT = 15
 
         if TYPE_CHECKING:
-            _pool: Union[
-                httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool
-            ]
+            _pool: httpcore.SOCKSProxy | httpcore.HTTPProxy | httpcore.ConnectionPool
 
         def _get_header_value(self, response, header):
             # type: (Self, httpcore.Response, str) -> Optional[str]
@@ -721,15 +694,12 @@ def _request(
         def _get_pool_options(self):
             # type: (Self) -> Dict[str, Any]
             options = {
-                "http2": self.parsed_dsn is not None
-                and self.parsed_dsn.scheme == "https",
+                "http2": self.parsed_dsn is not None and self.parsed_dsn.scheme == "https",
                 "retries": 3,
             }  # type: Dict[str, Any]
 
             socket_options = (
-                self.options["socket_options"]
-                if self.options["socket_options"] is not None
-                else []
+                self.options["socket_options"] if self.options["socket_options"] is not None else []
             )
 
             used_options = {(o[0], o[1]) for o in socket_options}
diff --git a/src/sentry_sdk_alpha/utils.py b/src/sentry_sdk_alpha/utils.py
index 7b3e13b7b6f0ea..7e15f92465e3f6 100644
--- a/src/sentry_sdk_alpha/utils.py
+++ b/src/sentry_sdk_alpha/utils.py
@@ -24,7 +24,10 @@
     # Python 3.10 and below
     BaseExceptionGroup = None  # type: ignore
 
+from typing import TYPE_CHECKING
+
 import sentry_sdk_alpha
+from sentry_sdk_alpha._types import SENSITIVE_DATA_SUBSTITUTE, Annotated, AnnotatedValue
 from sentry_sdk_alpha.consts import (
     DEFAULT_ADD_FULL_STACK,
     DEFAULT_MAX_STACK_FRAMES,
@@ -32,29 +35,25 @@
     SPANDATA,
     EndpointType,
 )
-from sentry_sdk_alpha._types import Annotated, AnnotatedValue, SENSITIVE_DATA_SUBSTITUTE
-
-from typing import TYPE_CHECKING
 
 if TYPE_CHECKING:
+    from collections.abc import Callable, Iterator
     from types import FrameType, TracebackType
     from typing import (
         Any,
-        Callable,
-        cast,
         ContextManager,
         Dict,
-        Iterator,
         List,
         NoReturn,
         Optional,
-        overload,
         ParamSpec,
         Set,
         Tuple,
         Type,
         TypeVar,
         Union,
+        cast,
+        overload,
     )
 
     from gevent.hub import Hub as GeventHub
@@ -139,7 +138,7 @@ def get_git_revision():
                 .strip()
                 .decode("utf-8")
             )
-    except (OSError, IOError, FileNotFoundError):
+    except (OSError, FileNotFoundError):
         return None
 
     return revision
@@ -200,7 +199,7 @@ def get_sdk_name(installed_integrations):
 
     for integration in framework_integrations:
         if integration in installed_integrations:
-            return "sentry.python.{}".format(integration)
+            return f"sentry.python.{integration}"
 
     return "sentry.python"
 
@@ -319,7 +318,7 @@ def netloc(self):
         """The netloc part of a DSN."""
         rv = self.host
         if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
-            rv = "%s:%s" % (rv, self.port)
+            rv = "{}:{}".format(rv, self.port)
         return rv
 
     def to_auth(self, client=None):
@@ -337,7 +336,7 @@ def to_auth(self, client=None):
 
     def __str__(self):
         # type: () -> str
-        return "%s://%s%s@%s%s%s" % (
+        return "{}://{}{}@{}{}{}".format(
             self.scheme,
             self.public_key,
             self.secret_key and "@" + self.secret_key or "",
@@ -376,7 +375,7 @@ def get_api_url(
     ):
         # type: (...) -> str
         """Returns the API url for storing events."""
-        return "%s://%s%sapi/%s/%s/" % (
+        return "{}://{}{}api/{}/{}/".format(
             self.scheme,
             self.host,
             self.path,
@@ -392,7 +391,7 @@ def to_header(self):
             rv.append(("sentry_client", self.client))
         if self.secret_key is not None:
             rv.append(("sentry_secret", self.secret_key))
-        return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv)
+        return "Sentry " + ", ".join("{}={}".format(key, value) for key, value in rv)
 
 
 def get_type_name(cls):
@@ -449,7 +448,7 @@ def get_lines_from_file(
     if loader is not None and hasattr(loader, "get_source"):
         try:
             source_str = loader.get_source(module)  # type: Optional[str]
-        except (ImportError, IOError):
+        except (ImportError, OSError):
             source_str = None
         if source_str is not None:
             source = source_str.splitlines()
@@ -457,7 +456,7 @@ def get_lines_from_file(
     if source is None:
         try:
             source = linecache.getlines(filename)
-        except (OSError, IOError):
+        except OSError:
             return [], None, []
 
     if not source:
@@ -503,9 +502,7 @@ def get_source_context(
 
     if tb_lineno is not None and abs_path:
         lineno = tb_lineno - 1
-        return get_lines_from_file(
-            abs_path, lineno, max_value_length, loader=loader, module=module
-        )
+        return get_lines_from_file(abs_path, lineno, max_value_length, loader=loader, module=module)
 
     return [], None, []
 
@@ -543,9 +540,7 @@ def filename_for_module(module, abs_path):
         if not base_module_path:
             return abs_path
 
-        return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(
-            os.sep
-        )
+        return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(os.sep)
     except Exception:
         return abs_path
 
@@ -590,9 +585,7 @@ def serialize_frame(
     if include_local_variables:
         from sentry_sdk_alpha.serializer import serialize
 
-        rv["vars"] = serialize(
-            dict(frame.f_locals), is_vars=True, custom_repr=custom_repr
-        )
+        rv["vars"] = serialize(dict(frame.f_locals), is_vars=True, custom_repr=custom_repr)
 
     return rv
 
@@ -632,9 +625,7 @@ def get_errno(exc_value):
 def get_error_message(exc_value):
     # type: (Optional[BaseException]) -> str
     message = (
-        getattr(exc_value, "message", "")
-        or getattr(exc_value, "detail", "")
-        or safe_str(exc_value)
+        getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") or safe_str(exc_value)
     )  # type: str
 
     # __notes__ should be a list of strings when notes are added
@@ -679,9 +670,9 @@ def single_exception_from_error_tuple(
         errno = None
 
     if errno is not None:
-        exception_value["mechanism"].setdefault("meta", {}).setdefault(
-            "errno", {}
-        ).setdefault("number", errno)
+        exception_value["mechanism"].setdefault("meta", {}).setdefault("errno", {}).setdefault(
+            "number", errno
+        )
 
     if source is not None:
         exception_value["mechanism"]["source"] = source
@@ -736,9 +727,7 @@ def single_exception_from_error_tuple(
         # intelligently trim by removing frames in the middle of the stacktrace, but
         # since we don't have the whole stacktrace, we can't do that. Instead, we
         # drop the entire stacktrace.
-        exception_value["stacktrace"] = AnnotatedValue.removed_because_over_size_limit(
-            value=None
-        )
+        exception_value["stacktrace"] = AnnotatedValue.removed_because_over_size_limit(value=None)
 
     elif frames:
         if not full_stack:
@@ -844,9 +833,7 @@ def exceptions_from_error(
         # Explicitly chained exceptions (Like: raise NewException() from OriginalException())
         # The field `__cause__` is set to OriginalException
         has_explicit_causing_exception = (
-            exc_value
-            and hasattr(exc_value, "__cause__")
-            and exc_value.__cause__ is not None
+            exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None
         )
         if has_explicit_causing_exception:
             exception_source = "__cause__"
@@ -856,9 +843,7 @@ def exceptions_from_error(
         # The field `__context__` is set in the exception that occurs while handling another exception,
         # to the other exception.
         has_implicit_causing_exception = (
-            exc_value
-            and hasattr(exc_value, "__context__")
-            and exc_value.__context__ is not None
+            exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None
         )
         if has_implicit_causing_exception:
             exception_source = "__context__"
@@ -974,8 +959,7 @@ def iter_event_frames(event):
         if isinstance(stacktrace, AnnotatedValue):
             stacktrace = stacktrace.value or {}
 
-        for frame in stacktrace.get("frames") or ():
-            yield frame
+        yield from stacktrace.get("frames") or ()
 
 
 def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None):
@@ -1092,9 +1076,7 @@ def merge_stack_frames(frames, full_stack, client_options):
 
     # Limit the number of frames
     max_stack_frames = (
-        client_options.get("max_stack_frames", DEFAULT_MAX_STACK_FRAMES)
-        if client_options
-        else None
+        client_options.get("max_stack_frames", DEFAULT_MAX_STACK_FRAMES) if client_options else None
     )
     if max_stack_frames is not None:
         new_frames = new_frames[len(new_frames) - max_stack_frames :]
@@ -1153,9 +1135,7 @@ def _is_external_source(abs_path):
     if abs_path is None:
         return False
 
-    external_source = (
-        re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None
-    )
+    external_source = re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None
     return external_source
 
 
@@ -1398,7 +1378,7 @@ def qualname_from_function(func):
 
     # Python 2
     try:
-        return "%s.%s.%s" % (
+        return "{}.{}.{}".format(
             func.im_class.__module__,  # type: ignore
             func.im_class.__name__,  # type: ignore
             func.__name__,
@@ -1499,7 +1479,7 @@ def to_base64(original):
         base64_bytes = base64.b64encode(utf8_bytes)
         base64_string = base64_bytes.decode("UTF-8")
     except Exception as err:
-        logger.warning("Unable to encode {orig} to base64:".format(orig=original), err)
+        logger.warning(f"Unable to encode {original} to base64:", err)
 
     return base64_string
 
@@ -1519,9 +1499,7 @@ def from_base64(base64_string):
         utf8_bytes = base64.b64decode(base64_bytes)
         utf8_string = utf8_bytes.decode("UTF-8")
     except Exception as err:
-        logger.warning(
-            "Unable to decode {b64} from base64:".format(b64=base64_string), err
-        )
+        logger.warning(f"Unable to decode {base64_string} from base64:", err)
 
     return utf8_string
 
@@ -1541,7 +1519,7 @@ def sanitize_url(url, remove_authority=True, remove_query_values=True, split=Fal
     if remove_authority:
         netloc_parts = parsed_url.netloc.split("@")
         if len(netloc_parts) > 1:
-            netloc = "%s:%s@%s" % (
+            netloc = "{}:{}@{}".format(
                 SENSITIVE_DATA_SUBSTITUTE,
                 SENSITIVE_DATA_SUBSTITUTE,
                 netloc_parts[-1],
@@ -1553,9 +1531,7 @@ def sanitize_url(url, remove_authority=True, remove_query_values=True, split=Fal
 
     # strip values from query string
     if remove_query_values:
-        query_string = unquote(
-            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
-        )
+        query_string = unquote(urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params}))
     else:
         query_string = parsed_url.query
 
@@ -1583,9 +1559,7 @@ def parse_url(url, sanitize=True):
     parameters will be sanitized to remove sensitive data. The autority (username and password)
     in the URL will always be removed.
     """
-    parsed_url = sanitize_url(
-        url, remove_authority=True, remove_query_values=sanitize, split=True
-    )
+    parsed_url = sanitize_url(url, remove_authority=True, remove_query_values=sanitize, split=True)
 
     base_url = urlunsplit(
         Components(
@@ -1908,9 +1882,7 @@ def datetime_from_isoformat(value):
         result = datetime.fromisoformat(value)
     except (AttributeError, ValueError):
         # py 3.6
-        timestamp_format = (
-            "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S"
-        )
+        timestamp_format = "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S"
         if value.endswith("Z"):
             value = value[:-1] + "+0000"
 
diff --git a/src/sentry_sdk_alpha/worker.py b/src/sentry_sdk_alpha/worker.py
index 4030ddf6c8042e..09828bdefb2178 100644
--- a/src/sentry_sdk_alpha/worker.py
+++ b/src/sentry_sdk_alpha/worker.py
@@ -1,17 +1,15 @@
 import os
 import threading
-
 from time import sleep, time
-from sentry_sdk_alpha._queue import Queue, FullError
-from sentry_sdk_alpha.utils import logger
-from sentry_sdk_alpha.consts import DEFAULT_QUEUE_SIZE
-
 from typing import TYPE_CHECKING
 
+from sentry_sdk_alpha._queue import FullError, Queue
+from sentry_sdk_alpha.consts import DEFAULT_QUEUE_SIZE
+from sentry_sdk_alpha.utils import logger
+
 if TYPE_CHECKING:
-    from typing import Any
-    from typing import Optional
-    from typing import Callable
+    from collections.abc import Callable
+    from typing import Any, Optional
 
 
 _TERMINATOR = object()

From cdb2f47725cf8befc5ed9afdef8acccb5d07054e Mon Sep 17 00:00:00 2001
From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com>
Date: Thu, 15 May 2025 08:22:14 +0000
Subject: [PATCH 10/22] :hammer_and_wrench: apply pre-commit fixes

---
 src/sentry_sdk_alpha/_types.py                | 12 +++++-----
 src/sentry_sdk_alpha/attachments.py           |  2 +-
 src/sentry_sdk_alpha/client.py                |  4 ++--
 src/sentry_sdk_alpha/consts.py                | 24 +++++++++----------
 src/sentry_sdk_alpha/envelope.py              |  4 ++--
 .../integrations/_asgi_common.py              |  6 ++---
 src/sentry_sdk_alpha/integrations/aiohttp.py  |  4 +++-
 src/sentry_sdk_alpha/integrations/boto3.py    |  2 +-
 .../integrations/cloud_resource_context.py    |  4 ++--
 .../integrations/excepthook.py                |  2 +-
 src/sentry_sdk_alpha/integrations/socket.py   |  2 +-
 src/sentry_sdk_alpha/integrations/stdlib.py   |  4 +++-
 .../integrations/strawberry.py                |  2 +-
 src/sentry_sdk_alpha/opentelemetry/utils.py   |  2 +-
 .../profiler/continuous_profiler.py           |  4 +---
 .../profiler/transaction_profiler.py          | 12 ++++------
 src/sentry_sdk_alpha/profiler/utils.py        |  4 ++--
 src/sentry_sdk_alpha/tracing.py               |  4 ++--
 src/sentry_sdk_alpha/tracing_utils.py         |  4 ++--
 src/sentry_sdk_alpha/utils.py                 |  4 ++--
 20 files changed, 53 insertions(+), 53 deletions(-)

diff --git a/src/sentry_sdk_alpha/_types.py b/src/sentry_sdk_alpha/_types.py
index efd482ab2d19b3..d1c7f8be01254a 100644
--- a/src/sentry_sdk_alpha/_types.py
+++ b/src/sentry_sdk_alpha/_types.py
@@ -116,7 +116,7 @@ class Event(TypedDict, total=False):
         check_in_id: str
         contexts: dict[str, dict[str, object]]
         dist: str
-        duration: Optional[float]
+        duration: float | None
         environment: str
         errors: list[dict[str, Any]]  # TODO: We can expand on this type
         event_id: str
@@ -130,7 +130,7 @@ class Event(TypedDict, total=False):
         message: str
         modules: dict[str, str]
         monitor_config: Mapping[str, object]
-        monitor_slug: Optional[str]
+        monitor_slug: str | None
         platform: Literal["python"]
         profile: object  # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
         release: str
@@ -141,12 +141,12 @@ class Event(TypedDict, total=False):
         stacktrace: dict[str, object]
         # We access this key in the code, but I am unsure whether we ever set it
         start_timestamp: datetime
-        status: Optional[str]
+        status: str | None
         tags: MutableMapping[str, str]
         # Tags must be less than 200 characters each
         threads: dict[Literal["values"], list[dict[str, Any]]]
         # TODO: We can expand on this type
-        timestamp: Optional[datetime]  # Must be set before sending the event
+        timestamp: datetime | None  # Must be set before sending the event
         transaction: str
         transaction_info: Mapping[str, Any]  # TODO: We can expand on this type
         type: Literal["check_in", "transaction"]
@@ -167,7 +167,7 @@ class Log(TypedDict):
         body: str
         attributes: dict[str, str | bool | float | int]
         time_unix_nano: int
-        trace_id: Optional[str]
+        trace_id: str | None
 
     # TODO: Make a proper type definition for this (PRs welcome!)
     Breadcrumb = dict[str, Any]
@@ -222,7 +222,7 @@ class Log(TypedDict):
 
     class MonitorConfigSchedule(TypedDict, total=False):
         type: MonitorConfigScheduleType
-        value: Union[int, str]
+        value: int | str
         unit: MonitorConfigScheduleUnit
 
     class MonitorConfig(TypedDict, total=False):
diff --git a/src/sentry_sdk_alpha/attachments.py b/src/sentry_sdk_alpha/attachments.py
index 92b379713d70cd..30bccf87b84b9d 100644
--- a/src/sentry_sdk_alpha/attachments.py
+++ b/src/sentry_sdk_alpha/attachments.py
@@ -72,4 +72,4 @@ def to_envelope_item(self):
 
     def __repr__(self):
         # type: () -> str
-        return "".format(self.filename)
+        return f""
diff --git a/src/sentry_sdk_alpha/client.py b/src/sentry_sdk_alpha/client.py
index d0667566411882..54482cc25dfebf 100644
--- a/src/sentry_sdk_alpha/client.py
+++ b/src/sentry_sdk_alpha/client.py
@@ -87,7 +87,7 @@ def _get_options(*args, **kwargs):
 
     for key, value in options.items():
         if key not in rv:
-            raise TypeError("Unknown option {!r}".format(key))
+            raise TypeError(f"Unknown option {key!r}")
 
         rv[key] = value
 
@@ -605,7 +605,7 @@ def _is_ignored_error(self, event, hint):
 
         error = exc_info[0]
         error_type_name = get_type_name(exc_info[0])
-        error_full_name = "{}.{}".format(exc_info[0].__module__, error_type_name)
+        error_full_name = f"{exc_info[0].__module__}.{error_type_name}"
 
         for ignored_error in self.options["ignore_errors"]:
             # String types are matched against the type name in the
diff --git a/src/sentry_sdk_alpha/consts.py b/src/sentry_sdk_alpha/consts.py
index fe76a2adcd8387..9d5532519e2f89 100644
--- a/src/sentry_sdk_alpha/consts.py
+++ b/src/sentry_sdk_alpha/consts.py
@@ -46,18 +46,18 @@ class CompressionAlgo(Enum):
     # code is highly discouraged. They are not subject to any stability
     # guarantees such as the ones from semantic versioning.
     class Experiments(TypedDict, total=False):
-        max_spans: Optional[int]
-        max_flags: Optional[int]
-        record_sql_params: Optional[bool]
-        continuous_profiling_auto_start: Optional[bool]
-        continuous_profiling_mode: Optional[ContinuousProfilerMode]
-        otel_powered_performance: Optional[bool]
-        transport_zlib_compression_level: Optional[int]
-        transport_compression_level: Optional[int]
-        transport_compression_algo: Optional[CompressionAlgo]
-        transport_num_pools: Optional[int]
-        transport_http2: Optional[bool]
-        enable_logs: Optional[bool]
+        max_spans: int | None
+        max_flags: int | None
+        record_sql_params: bool | None
+        continuous_profiling_auto_start: bool | None
+        continuous_profiling_mode: ContinuousProfilerMode | None
+        otel_powered_performance: bool | None
+        transport_zlib_compression_level: int | None
+        transport_compression_level: int | None
+        transport_compression_algo: CompressionAlgo | None
+        transport_num_pools: int | None
+        transport_http2: bool | None
+        enable_logs: bool | None
 
 
 DEFAULT_QUEUE_SIZE = 100
diff --git a/src/sentry_sdk_alpha/envelope.py b/src/sentry_sdk_alpha/envelope.py
index 71669671fd9034..628110d766bb80 100644
--- a/src/sentry_sdk_alpha/envelope.py
+++ b/src/sentry_sdk_alpha/envelope.py
@@ -165,7 +165,7 @@ def deserialize(
 
     def __repr__(self):
         # type: (...) -> str
-        return "".format(self.headers, self.items)
+        return f""
 
 
 class PayloadRef:
@@ -207,7 +207,7 @@ def inferred_content_type(self):
 
     def __repr__(self):
         # type: (...) -> str
-        return "".format(self.inferred_content_type)
+        return f""
 
 
 class Item:
diff --git a/src/sentry_sdk_alpha/integrations/_asgi_common.py b/src/sentry_sdk_alpha/integrations/_asgi_common.py
index 8b2b7893e47033..7e6c661a560436 100644
--- a/src/sentry_sdk_alpha/integrations/_asgi_common.py
+++ b/src/sentry_sdk_alpha/integrations/_asgi_common.py
@@ -38,14 +38,14 @@ def _get_url(asgi_scope, default_scheme=None, host=None):
     path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
 
     if host:
-        return "{}://{}{}".format(scheme, host, path)
+        return f"{scheme}://{host}{path}"
 
     if server is not None:
         host, port = server
         default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
         if port != default_port:
-            return "{}://{}:{}{}".format(scheme, host, port, path)
-        return "{}://{}{}".format(scheme, host, path)
+            return f"{scheme}://{host}:{port}{path}"
+        return f"{scheme}://{host}{path}"
     return path
 
 
diff --git a/src/sentry_sdk_alpha/integrations/aiohttp.py b/src/sentry_sdk_alpha/integrations/aiohttp.py
index 75121b80eac38b..487f7a46a1cc87 100644
--- a/src/sentry_sdk_alpha/integrations/aiohttp.py
+++ b/src/sentry_sdk_alpha/integrations/aiohttp.py
@@ -227,7 +227,9 @@ async def on_request_start(session, trace_config_ctx, params):
 
         span = sentry_sdk_alpha.start_span(
             op=OP.HTTP_CLIENT,
-            name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            name="{} {}".format(
+                method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE
+            ),
             origin=AioHttpIntegration.origin,
             only_if_parent=True,
         )
diff --git a/src/sentry_sdk_alpha/integrations/boto3.py b/src/sentry_sdk_alpha/integrations/boto3.py
index 466e0da371cc92..60d87d32c27e28 100644
--- a/src/sentry_sdk_alpha/integrations/boto3.py
+++ b/src/sentry_sdk_alpha/integrations/boto3.py
@@ -55,7 +55,7 @@ def sentry_patched_init(self, *args, **kwargs):
 @ensure_integration_enabled(Boto3Integration)
 def _sentry_request_created(service_id, request, operation_name, **kwargs):
     # type: (str, AWSRequest, str, **Any) -> None
-    description = "aws.{}.{}".format(service_id, operation_name)
+    description = f"aws.{service_id}.{operation_name}"
     span = sentry_sdk_alpha.start_span(
         op=OP.HTTP_CLIENT,
         name=description,
diff --git a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
index 7d40ec80668b1f..8101b9d081e5d2 100644
--- a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
+++ b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
@@ -17,10 +17,10 @@
 
 AWS_METADATA_HOST = "169.254.169.254"
 AWS_TOKEN_URL = f"http://{AWS_METADATA_HOST}/latest/api/token"
-AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(AWS_METADATA_HOST)
+AWS_METADATA_URL = f"http://{AWS_METADATA_HOST}/latest/dynamic/instance-identity/document"
 
 GCP_METADATA_HOST = "metadata.google.internal"
-GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(GCP_METADATA_HOST)
+GCP_METADATA_URL = f"http://{GCP_METADATA_HOST}/computeMetadata/v1/?recursive=true"
 
 
 class CLOUD_PROVIDER:  # noqa: N801
diff --git a/src/sentry_sdk_alpha/integrations/excepthook.py b/src/sentry_sdk_alpha/integrations/excepthook.py
index ac33bbcf28fa38..33b65a9700b70d 100644
--- a/src/sentry_sdk_alpha/integrations/excepthook.py
+++ b/src/sentry_sdk_alpha/integrations/excepthook.py
@@ -26,7 +26,7 @@ def __init__(self, always_run=False):
 
         if not isinstance(always_run, bool):
             raise ValueError(
-                "Invalid value for always_run: %s (must be type boolean)" % (always_run,)
+                "Invalid value for always_run: {} (must be type boolean)".format(always_run)
             )
         self.always_run = always_run
 
diff --git a/src/sentry_sdk_alpha/integrations/socket.py b/src/sentry_sdk_alpha/integrations/socket.py
index 6892eef1131683..ee1950e3dd9f92 100644
--- a/src/sentry_sdk_alpha/integrations/socket.py
+++ b/src/sentry_sdk_alpha/integrations/socket.py
@@ -39,7 +39,7 @@ def _get_span_description(host, port):
     except (UnicodeDecodeError, AttributeError):
         pass
 
-    description = "{}:{}".format(host, port)  # type: ignore
+    description = f"{host}:{port}"  # type: ignore
     return description
 
 
diff --git a/src/sentry_sdk_alpha/integrations/stdlib.py b/src/sentry_sdk_alpha/integrations/stdlib.py
index 01341b454dae52..7e8bb9e825b8c1 100644
--- a/src/sentry_sdk_alpha/integrations/stdlib.py
+++ b/src/sentry_sdk_alpha/integrations/stdlib.py
@@ -89,7 +89,9 @@ def putrequest(self, method, url, *args, **kwargs):
 
         span = sentry_sdk_alpha.start_span(
             op=OP.HTTP_CLIENT,
-            name="%s %s" % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+            name="{} {}".format(
+                method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE
+            ),
             origin="auto.http.stdlib.httplib",
             only_if_parent=True,
         )
diff --git a/src/sentry_sdk_alpha/integrations/strawberry.py b/src/sentry_sdk_alpha/integrations/strawberry.py
index 483b08051e9137..8ec6f0d12d98bc 100644
--- a/src/sentry_sdk_alpha/integrations/strawberry.py
+++ b/src/sentry_sdk_alpha/integrations/strawberry.py
@@ -71,7 +71,7 @@ def __init__(self, async_execution=None):
         # type: (Optional[bool]) -> None
         if async_execution not in (None, False, True):
             raise ValueError(
-                'Invalid value for async_execution: "{}" (must be bool)'.format(async_execution)
+                f'Invalid value for async_execution: "{async_execution}" (must be bool)'
             )
         self.async_execution = async_execution
 
diff --git a/src/sentry_sdk_alpha/opentelemetry/utils.py b/src/sentry_sdk_alpha/opentelemetry/utils.py
index 98806f429c43d8..d32dcf63695a85 100644
--- a/src/sentry_sdk_alpha/opentelemetry/utils.py
+++ b/src/sentry_sdk_alpha/opentelemetry/utils.py
@@ -196,7 +196,7 @@ def span_data_for_http_method(span):
 
             if url:
                 parsed_url = urlparse(url)
-                url = "{}://{}{}".format(parsed_url.scheme, parsed_url.netloc, parsed_url.path)
+                url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
                 description = f"{http_method} {url}"
 
     status, http_status = extract_span_status(span)
diff --git a/src/sentry_sdk_alpha/profiler/continuous_profiler.py b/src/sentry_sdk_alpha/profiler/continuous_profiler.py
index 18143bb0bb1fcb..a8ca6d91ece119 100644
--- a/src/sentry_sdk_alpha/profiler/continuous_profiler.py
+++ b/src/sentry_sdk_alpha/profiler/continuous_profiler.py
@@ -85,9 +85,7 @@ def setup_continuous_profiler(options, sdk_info, capture_func):
     else:
         raise ValueError(f"Unknown continuous profiler mode: {profiler_mode}")
 
-    logger.debug(
-        "[Profiling] Setting up continuous profiler in {mode} mode".format(mode=_scheduler.mode)
-    )
+    logger.debug(f"[Profiling] Setting up continuous profiler in {_scheduler.mode} mode")
 
     atexit.register(teardown_continuous_profiler)
 
diff --git a/src/sentry_sdk_alpha/profiler/transaction_profiler.py b/src/sentry_sdk_alpha/profiler/transaction_profiler.py
index dc9a573cd351e1..b5e9887dc70593 100644
--- a/src/sentry_sdk_alpha/profiler/transaction_profiler.py
+++ b/src/sentry_sdk_alpha/profiler/transaction_profiler.py
@@ -70,10 +70,10 @@ class ProcessedSample(TypedDict):
         stack_id: int
 
     class ProcessedProfile(TypedDict):
-        frames: List[ProcessedFrame]
-        stacks: List[ProcessedStack]
-        samples: List[ProcessedSample]
-        thread_metadata: Dict[ThreadId, ProcessedThreadMetadata]
+        frames: list[ProcessedFrame]
+        stacks: list[ProcessedStack]
+        samples: list[ProcessedSample]
+        thread_metadata: dict[ThreadId, ProcessedThreadMetadata]
 
 
 try:
@@ -203,9 +203,7 @@ def __init__(
     def update_active_thread_id(self):
         # type: () -> None
         self.active_thread_id = get_current_thread_meta()[0]
-        logger.debug(
-            "[Profiling] updating active thread id to {tid}".format(tid=self.active_thread_id)
-        )
+        logger.debug(f"[Profiling] updating active thread id to {self.active_thread_id}")
 
     def _set_initial_sampling_decision(self, sampling_context):
         # type: (SamplingContext) -> None
diff --git a/src/sentry_sdk_alpha/profiler/utils.py b/src/sentry_sdk_alpha/profiler/utils.py
index 0e6fa5e1fc2148..0905189397aaa5 100644
--- a/src/sentry_sdk_alpha/profiler/utils.py
+++ b/src/sentry_sdk_alpha/profiler/utils.py
@@ -18,10 +18,10 @@
 
     class ProcessedFrame(TypedDict):
         abs_path: str
-        filename: Optional[str]
+        filename: str | None
         function: str
         lineno: int
-        module: Optional[str]
+        module: str | None
 
     class ProcessedThreadMetadata(TypedDict):
         name: str
diff --git a/src/sentry_sdk_alpha/tracing.py b/src/sentry_sdk_alpha/tracing.py
index 325235829bfadc..6fd621fc26c737 100644
--- a/src/sentry_sdk_alpha/tracing.py
+++ b/src/sentry_sdk_alpha/tracing.py
@@ -416,9 +416,9 @@ def to_traceparent(self):
         else:
             sampled = None
 
-        traceparent = "{}-{}".format(self.trace_id, self.span_id)
+        traceparent = f"{self.trace_id}-{self.span_id}"
         if sampled is not None:
-            traceparent += "-{}".format(sampled)
+            traceparent += f"-{sampled}"
 
         return traceparent
 
diff --git a/src/sentry_sdk_alpha/tracing_utils.py b/src/sentry_sdk_alpha/tracing_utils.py
index c6763c6f58b1fe..ec317d7675be18 100644
--- a/src/sentry_sdk_alpha/tracing_utils.py
+++ b/src/sentry_sdk_alpha/tracing_utils.py
@@ -432,9 +432,9 @@ def to_traceparent(self):
         else:
             sampled = None
 
-        traceparent = "{}-{}".format(self.trace_id, self.span_id)
+        traceparent = f"{self.trace_id}-{self.span_id}"
         if sampled is not None:
-            traceparent += "-{}".format(sampled)
+            traceparent += f"-{sampled}"
 
         return traceparent
 
diff --git a/src/sentry_sdk_alpha/utils.py b/src/sentry_sdk_alpha/utils.py
index 7e15f92465e3f6..b7890ea40edd9a 100644
--- a/src/sentry_sdk_alpha/utils.py
+++ b/src/sentry_sdk_alpha/utils.py
@@ -318,7 +318,7 @@ def netloc(self):
         """The netloc part of a DSN."""
         rv = self.host
         if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
-            rv = "{}:{}".format(rv, self.port)
+            rv = f"{rv}:{self.port}"
         return rv
 
     def to_auth(self, client=None):
@@ -391,7 +391,7 @@ def to_header(self):
             rv.append(("sentry_client", self.client))
         if self.secret_key is not None:
             rv.append(("sentry_secret", self.secret_key))
-        return "Sentry " + ", ".join("{}={}".format(key, value) for key, value in rv)
+        return "Sentry " + ", ".join(f"{key}={value}" for key, value in rv)
 
 
 def get_type_name(cls):

From 14372d8152f19e0ccf2273144cf37f4f00bd07ae Mon Sep 17 00:00:00 2001
From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com>
Date: Thu, 15 May 2025 08:23:10 +0000
Subject: [PATCH 11/22] :hammer_and_wrench: apply pre-commit fixes

---
 src/sentry_sdk_alpha/integrations/excepthook.py | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/src/sentry_sdk_alpha/integrations/excepthook.py b/src/sentry_sdk_alpha/integrations/excepthook.py
index 33b65a9700b70d..f61bcf59569982 100644
--- a/src/sentry_sdk_alpha/integrations/excepthook.py
+++ b/src/sentry_sdk_alpha/integrations/excepthook.py
@@ -25,9 +25,7 @@ def __init__(self, always_run=False):
         # type: (bool) -> None
 
         if not isinstance(always_run, bool):
-            raise ValueError(
-                "Invalid value for always_run: {} (must be type boolean)".format(always_run)
-            )
+            raise ValueError(f"Invalid value for always_run: {always_run} (must be type boolean)")
         self.always_run = always_run
 
     @staticmethod

From a6dfd34b6de3a9354be3d439296c20be1e9cd1b3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Vjeran=20Grozdani=C4=87?= 
Date: Thu, 15 May 2025 13:37:01 +0200
Subject: [PATCH 12/22] del all cached imports (#91705)

---
 src/sentry/utils/sdk.py | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index a5f26733682fa6..fc7a1eb6ab6560 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -518,9 +518,11 @@ def redirect_import(original_module, target_module):
         redirector = ImportRedirector(original_module, target_module)
         sys.meta_path.insert(0, redirector)
         # TODO: Not sure the original module should be deleted....
-        if original_module in sys.modules:
-            # cleaning up cache if the module is already imported
-            del sys.modules[original_module]
+        # iterating over a copy to be able to delete from the original
+        for cached_module in sys.modules.copy():
+            if cached_module.startswith(original_module):
+                # cleaning up cache if the module is already imported
+                del sys.modules[cached_module]
 
     # monkey patch to anything but sentry_sdk
     if in_random_rollout("sentry-sdk.use-python-sdk-alpha") or True:

From 83414aeac8fafe5d6e1e165d7c068bac18bcde70 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 12:32:54 +0200
Subject: [PATCH 13/22] removed dummy alpha code

---
 src/sentry_sdk_alpha/__init__.py              |   52 -
 src/sentry_sdk_alpha/_compat.py               |   95 -
 src/sentry_sdk_alpha/_init_implementation.py  |   46 -
 src/sentry_sdk_alpha/_log_batcher.py          |  157 --
 src/sentry_sdk_alpha/_lru_cache.py            |   47 -
 src/sentry_sdk_alpha/_queue.py                |  287 ---
 src/sentry_sdk_alpha/_types.py                |  238 --
 src/sentry_sdk_alpha/_werkzeug.py             |   97 -
 src/sentry_sdk_alpha/ai/__init__.py           |    0
 src/sentry_sdk_alpha/ai/monitoring.py         |  110 -
 src/sentry_sdk_alpha/ai/utils.py              |   32 -
 src/sentry_sdk_alpha/api.py                   |  328 ---
 src/sentry_sdk_alpha/attachments.py           |   75 -
 src/sentry_sdk_alpha/client.py                |  975 ---------
 src/sentry_sdk_alpha/consts.py                | 1048 ---------
 src/sentry_sdk_alpha/crons/__init__.py        |    9 -
 src/sentry_sdk_alpha/crons/api.py             |   57 -
 src/sentry_sdk_alpha/crons/consts.py          |    4 -
 src/sentry_sdk_alpha/crons/decorator.py       |  126 --
 src/sentry_sdk_alpha/debug.py                 |   31 -
 src/sentry_sdk_alpha/envelope.py              |  348 ---
 src/sentry_sdk_alpha/feature_flags.py         |   70 -
 src/sentry_sdk_alpha/integrations/__init__.py |  278 ---
 .../integrations/_asgi_common.py              |  103 -
 .../integrations/_wsgi_common.py              |  233 --
 src/sentry_sdk_alpha/integrations/aiohttp.py  |  395 ----
 .../integrations/anthropic.py                 |  283 ---
 src/sentry_sdk_alpha/integrations/argv.py     |   30 -
 src/sentry_sdk_alpha/integrations/ariadne.py  |  166 --
 src/sentry_sdk_alpha/integrations/arq.py      |  247 ---
 src/sentry_sdk_alpha/integrations/asgi.py     |  344 ---
 src/sentry_sdk_alpha/integrations/asyncio.py  |  124 --
 src/sentry_sdk_alpha/integrations/asyncpg.py  |  221 --
 src/sentry_sdk_alpha/integrations/atexit.py   |   56 -
 .../integrations/aws_lambda.py                |  492 -----
 src/sentry_sdk_alpha/integrations/beam.py     |  173 --
 src/sentry_sdk_alpha/integrations/boto3.py    |  162 --
 src/sentry_sdk_alpha/integrations/bottle.py   |  209 --
 .../integrations/celery/__init__.py           |  516 -----
 .../integrations/celery/beat.py               |  277 ---
 .../integrations/celery/utils.py              |   44 -
 src/sentry_sdk_alpha/integrations/chalice.py  |  126 --
 .../integrations/clickhouse_driver.py         |  185 --
 .../integrations/cloud_resource_context.py    |  264 ---
 src/sentry_sdk_alpha/integrations/cohere.py   |  264 ---
 src/sentry_sdk_alpha/integrations/dedupe.py   |   51 -
 .../integrations/django/__init__.py           |  713 ------
 .../integrations/django/asgi.py               |  242 ---
 .../integrations/django/caching.py            |  171 --
 .../integrations/django/middleware.py         |  183 --
 .../integrations/django/signals_handlers.py   |   86 -
 .../integrations/django/templates.py          |  179 --
 .../integrations/django/transactions.py       |  153 --
 .../integrations/django/views.py              |   99 -
 src/sentry_sdk_alpha/integrations/dramatiq.py |  165 --
 .../integrations/excepthook.py                |   73 -
 .../integrations/executing.py                 |   67 -
 src/sentry_sdk_alpha/integrations/falcon.py   |  245 ---
 src/sentry_sdk_alpha/integrations/fastapi.py  |  141 --
 src/sentry_sdk_alpha/integrations/flask.py    |  273 ---
 src/sentry_sdk_alpha/integrations/gcp.py      |  249 ---
 .../integrations/gnu_backtrace.py             |  107 -
 src/sentry_sdk_alpha/integrations/gql.py      |  136 --
 src/sentry_sdk_alpha/integrations/graphene.py |  146 --
 .../integrations/grpc/__init__.py             |  148 --
 .../integrations/grpc/aio/__init__.py         |    7 -
 .../integrations/grpc/aio/client.py           |  101 -
 .../integrations/grpc/aio/server.py           |   98 -
 .../integrations/grpc/client.py               |   91 -
 .../integrations/grpc/consts.py               |    1 -
 .../integrations/grpc/server.py               |   66 -
 src/sentry_sdk_alpha/integrations/httpx.py    |  198 --
 src/sentry_sdk_alpha/integrations/huey.py     |  170 --
 .../integrations/huggingface_hub.py           |  163 --
 .../integrations/langchain.py                 |  462 ----
 .../integrations/launchdarkly.py              |   62 -
 src/sentry_sdk_alpha/integrations/litestar.py |  302 ---
 src/sentry_sdk_alpha/integrations/logging.py  |  400 ----
 src/sentry_sdk_alpha/integrations/loguru.py   |  123 --
 src/sentry_sdk_alpha/integrations/modules.py  |   30 -
 src/sentry_sdk_alpha/integrations/openai.py   |  409 ----
 .../integrations/openfeature.py               |   37 -
 .../integrations/pure_eval.py                 |  136 --
 src/sentry_sdk_alpha/integrations/pymongo.py  |  203 --
 src/sentry_sdk_alpha/integrations/pyramid.py  |  226 --
 src/sentry_sdk_alpha/integrations/quart.py    |  234 --
 src/sentry_sdk_alpha/integrations/ray.py      |  147 --
 .../integrations/redis/__init__.py            |   38 -
 .../integrations/redis/_async_common.py       |  115 -
 .../integrations/redis/_sync_common.py        |  121 --
 .../integrations/redis/consts.py              |   19 -
 .../integrations/redis/modules/__init__.py    |    0
 .../integrations/redis/modules/caches.py      |  123 --
 .../integrations/redis/modules/queries.py     |   73 -
 src/sentry_sdk_alpha/integrations/redis/rb.py |   32 -
 .../integrations/redis/redis.py               |   70 -
 .../integrations/redis/redis_cluster.py       |   99 -
 .../redis/redis_py_cluster_legacy.py          |   50 -
 .../integrations/redis/utils.py               |  190 --
 src/sentry_sdk_alpha/integrations/rq.py       |  199 --
 .../integrations/rust_tracing.py              |  270 ---
 src/sentry_sdk_alpha/integrations/sanic.py    |  366 ----
 .../integrations/serverless.py                |   71 -
 src/sentry_sdk_alpha/integrations/socket.py   |  100 -
 .../integrations/spark/__init__.py            |    4 -
 .../integrations/spark/spark_driver.py        |  315 ---
 .../integrations/spark/spark_worker.py        |  110 -
 .../integrations/sqlalchemy.py                |  142 --
 .../integrations/starlette.py                 |  705 ------
 src/sentry_sdk_alpha/integrations/starlite.py |  289 ---
 src/sentry_sdk_alpha/integrations/statsig.py  |   37 -
 src/sentry_sdk_alpha/integrations/stdlib.py   |  307 ---
 .../integrations/strawberry.py                |  373 ----
 src/sentry_sdk_alpha/integrations/sys_exit.py |   70 -
 .../integrations/threading.py                 |  128 --
 src/sentry_sdk_alpha/integrations/tornado.py  |  256 ---
 src/sentry_sdk_alpha/integrations/trytond.py  |   52 -
 src/sentry_sdk_alpha/integrations/typer.py    |   54 -
 src/sentry_sdk_alpha/integrations/unleash.py  |   33 -
 src/sentry_sdk_alpha/integrations/wsgi.py     |  341 ---
 src/sentry_sdk_alpha/logger.py                |   56 -
 src/sentry_sdk_alpha/monitor.py               |  121 --
 .../opentelemetry/__init__.py                 |    9 -
 src/sentry_sdk_alpha/opentelemetry/consts.py  |   33 -
 .../opentelemetry/contextvars_context.py      |   68 -
 .../opentelemetry/propagator.py               |   95 -
 src/sentry_sdk_alpha/opentelemetry/sampler.py |  313 ---
 src/sentry_sdk_alpha/opentelemetry/scope.py   |  201 --
 .../opentelemetry/span_processor.py           |  316 ---
 src/sentry_sdk_alpha/opentelemetry/tracing.py |   31 -
 src/sentry_sdk_alpha/opentelemetry/utils.py   |  459 ----
 src/sentry_sdk_alpha/profiler/__init__.py     |    6 -
 .../profiler/continuous_profiler.py           |  646 ------
 .../profiler/transaction_profiler.py          |  747 -------
 src/sentry_sdk_alpha/profiler/utils.py        |  189 --
 src/sentry_sdk_alpha/py.typed                 |    0
 src/sentry_sdk_alpha/scope.py                 | 1514 -------------
 src/sentry_sdk_alpha/scrubber.py              |  170 --
 src/sentry_sdk_alpha/serializer.py            |  370 ----
 src/sentry_sdk_alpha/session.py               |  171 --
 src/sentry_sdk_alpha/sessions.py              |  186 --
 src/sentry_sdk_alpha/spotlight.py             |  232 --
 src/sentry_sdk_alpha/tracing.py               |  606 ------
 src/sentry_sdk_alpha/tracing_utils.py         |  854 --------
 src/sentry_sdk_alpha/transport.py             |  793 -------
 src/sentry_sdk_alpha/types.py                 |   49 -
 src/sentry_sdk_alpha/utils.py                 | 1924 -----------------
 src/sentry_sdk_alpha/worker.py                |  139 --
 148 files changed, 32167 deletions(-)
 delete mode 100644 src/sentry_sdk_alpha/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/_compat.py
 delete mode 100644 src/sentry_sdk_alpha/_init_implementation.py
 delete mode 100644 src/sentry_sdk_alpha/_log_batcher.py
 delete mode 100644 src/sentry_sdk_alpha/_lru_cache.py
 delete mode 100644 src/sentry_sdk_alpha/_queue.py
 delete mode 100644 src/sentry_sdk_alpha/_types.py
 delete mode 100644 src/sentry_sdk_alpha/_werkzeug.py
 delete mode 100644 src/sentry_sdk_alpha/ai/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/ai/monitoring.py
 delete mode 100644 src/sentry_sdk_alpha/ai/utils.py
 delete mode 100644 src/sentry_sdk_alpha/api.py
 delete mode 100644 src/sentry_sdk_alpha/attachments.py
 delete mode 100644 src/sentry_sdk_alpha/client.py
 delete mode 100644 src/sentry_sdk_alpha/consts.py
 delete mode 100644 src/sentry_sdk_alpha/crons/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/crons/api.py
 delete mode 100644 src/sentry_sdk_alpha/crons/consts.py
 delete mode 100644 src/sentry_sdk_alpha/crons/decorator.py
 delete mode 100644 src/sentry_sdk_alpha/debug.py
 delete mode 100644 src/sentry_sdk_alpha/envelope.py
 delete mode 100644 src/sentry_sdk_alpha/feature_flags.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/_asgi_common.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/_wsgi_common.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/aiohttp.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/anthropic.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/argv.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/ariadne.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/arq.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/asgi.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/asyncio.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/asyncpg.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/atexit.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/aws_lambda.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/beam.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/boto3.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/bottle.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/celery/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/celery/beat.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/celery/utils.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/chalice.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/clickhouse_driver.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/cloud_resource_context.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/cohere.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/dedupe.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/asgi.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/caching.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/middleware.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/signals_handlers.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/templates.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/transactions.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/django/views.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/dramatiq.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/excepthook.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/executing.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/falcon.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/fastapi.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/flask.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/gcp.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/gnu_backtrace.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/gql.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/graphene.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/client.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/aio/server.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/client.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/consts.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/grpc/server.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/httpx.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/huey.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/huggingface_hub.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/langchain.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/launchdarkly.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/litestar.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/logging.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/loguru.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/modules.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/openai.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/openfeature.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/pure_eval.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/pymongo.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/pyramid.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/quart.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/ray.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/_async_common.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/_sync_common.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/consts.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/caches.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/modules/queries.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/rb.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/redis.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/redis/utils.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/rq.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/rust_tracing.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/sanic.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/serverless.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/socket.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/spark/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/spark/spark_driver.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/spark/spark_worker.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/sqlalchemy.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/starlette.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/starlite.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/statsig.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/stdlib.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/strawberry.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/sys_exit.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/threading.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/tornado.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/trytond.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/typer.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/unleash.py
 delete mode 100644 src/sentry_sdk_alpha/integrations/wsgi.py
 delete mode 100644 src/sentry_sdk_alpha/logger.py
 delete mode 100644 src/sentry_sdk_alpha/monitor.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/consts.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/propagator.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/sampler.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/scope.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/span_processor.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/tracing.py
 delete mode 100644 src/sentry_sdk_alpha/opentelemetry/utils.py
 delete mode 100644 src/sentry_sdk_alpha/profiler/__init__.py
 delete mode 100644 src/sentry_sdk_alpha/profiler/continuous_profiler.py
 delete mode 100644 src/sentry_sdk_alpha/profiler/transaction_profiler.py
 delete mode 100644 src/sentry_sdk_alpha/profiler/utils.py
 delete mode 100644 src/sentry_sdk_alpha/py.typed
 delete mode 100644 src/sentry_sdk_alpha/scope.py
 delete mode 100644 src/sentry_sdk_alpha/scrubber.py
 delete mode 100644 src/sentry_sdk_alpha/serializer.py
 delete mode 100644 src/sentry_sdk_alpha/session.py
 delete mode 100644 src/sentry_sdk_alpha/sessions.py
 delete mode 100644 src/sentry_sdk_alpha/spotlight.py
 delete mode 100644 src/sentry_sdk_alpha/tracing.py
 delete mode 100644 src/sentry_sdk_alpha/tracing_utils.py
 delete mode 100644 src/sentry_sdk_alpha/transport.py
 delete mode 100644 src/sentry_sdk_alpha/types.py
 delete mode 100644 src/sentry_sdk_alpha/utils.py
 delete mode 100644 src/sentry_sdk_alpha/worker.py

diff --git a/src/sentry_sdk_alpha/__init__.py b/src/sentry_sdk_alpha/__init__.py
deleted file mode 100644
index 8c908ce9184d15..00000000000000
--- a/src/sentry_sdk_alpha/__init__.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# TODO-neel scope switch
-# TODO-neel avoid duplication between api and __init__
-from sentry_sdk_alpha.api import *  # noqa
-from sentry_sdk_alpha.client import Client
-from sentry_sdk_alpha.consts import VERSION  # noqa
-from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope
-from sentry_sdk_alpha.transport import HttpTransport, Transport
-
-__all__ = [  # noqa
-    "Scope",
-    "Client",
-    "Transport",
-    "HttpTransport",
-    "integrations",
-    # From sentry_sdk.api
-    "init",
-    "add_attachment",
-    "add_breadcrumb",
-    "capture_event",
-    "capture_exception",
-    "capture_message",
-    "continue_trace",
-    "flush",
-    "get_baggage",
-    "get_client",
-    "get_global_scope",
-    "get_isolation_scope",
-    "get_current_scope",
-    "get_current_span",
-    "get_traceparent",
-    "is_initialized",
-    "isolation_scope",
-    "last_event_id",
-    "new_scope",
-    "set_context",
-    "set_extra",
-    "set_level",
-    "set_tag",
-    "set_tags",
-    "set_user",
-    "start_span",
-    "start_transaction",
-    "trace",
-    "monitor",
-    "logger",
-]
-
-# Initialize the debug support after everything is loaded
-from sentry_sdk_alpha.debug import init_debug_support
-
-init_debug_support()
-del init_debug_support
diff --git a/src/sentry_sdk_alpha/_compat.py b/src/sentry_sdk_alpha/_compat.py
deleted file mode 100644
index f43ea04f35b9c3..00000000000000
--- a/src/sentry_sdk_alpha/_compat.py
+++ /dev/null
@@ -1,95 +0,0 @@
-import sys
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, TypeVar
-
-    T = TypeVar("T")
-
-
-PY38 = sys.version_info[0] == 3 and sys.version_info[1] >= 8
-PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
-PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
-
-
-def with_metaclass(meta, *bases):
-    # type: (Any, *Any) -> Any
-    class MetaClass(type):
-        def __new__(metacls, name, this_bases, d):
-            # type: (Any, Any, Any, Any) -> Any
-            return meta(name, bases, d)
-
-    return type.__new__(MetaClass, "temporary_class", (), {})
-
-
-def check_uwsgi_thread_support():
-    # type: () -> bool
-    # We check two things here:
-    #
-    # 1. uWSGI doesn't run in threaded mode by default -- issue a warning if
-    #    that's the case.
-    #
-    # 2. Additionally, if uWSGI is running in preforking mode (default), it needs
-    #    the --py-call-uwsgi-fork-hooks option for the SDK to work properly. This
-    #    is because any background threads spawned before the main process is
-    #    forked are NOT CLEANED UP IN THE CHILDREN BY DEFAULT even if
-    #    --enable-threads is on. One has to explicitly provide
-    #    --py-call-uwsgi-fork-hooks to force uWSGI to run regular cpython
-    #    after-fork hooks that take care of cleaning up stale thread data.
-    try:
-        from uwsgi import opt  # type: ignore
-    except ImportError:
-        return True
-
-    from sentry_sdk_alpha.consts import FALSE_VALUES
-
-    def enabled(option):
-        # type: (str) -> bool
-        value = opt.get(option, False)
-        if isinstance(value, bool):
-            return value
-
-        if isinstance(value, bytes):
-            try:
-                value = value.decode()
-            except Exception:
-                pass
-
-        return value and str(value).lower() not in FALSE_VALUES
-
-    # When `threads` is passed in as a uwsgi option,
-    # `enable-threads` is implied on.
-    threads_enabled = "threads" in opt or enabled("enable-threads")
-    fork_hooks_on = enabled("py-call-uwsgi-fork-hooks")
-    lazy_mode = enabled("lazy-apps") or enabled("lazy")
-
-    if lazy_mode and not threads_enabled:
-        from warnings import warn
-
-        warn(
-            Warning(
-                "IMPORTANT: "
-                "We detected the use of uWSGI without thread support. "
-                "This might lead to unexpected issues. "
-                'Please run uWSGI with "--enable-threads" for full support.'
-            )
-        )
-
-        return False
-
-    elif not lazy_mode and (not threads_enabled or not fork_hooks_on):
-        from warnings import warn
-
-        warn(
-            Warning(
-                "IMPORTANT: "
-                "We detected the use of uWSGI in preforking mode without "
-                "thread support. This might lead to crashing workers. "
-                'Please run uWSGI with both "--enable-threads" and '
-                '"--py-call-uwsgi-fork-hooks" for full support.'
-            )
-        )
-
-        return False
-
-    return True
diff --git a/src/sentry_sdk_alpha/_init_implementation.py b/src/sentry_sdk_alpha/_init_implementation.py
deleted file mode 100644
index 2799e179d765e4..00000000000000
--- a/src/sentry_sdk_alpha/_init_implementation.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import ClientConstructor
-from sentry_sdk_alpha.opentelemetry.scope import setup_scope_context_management
-
-if TYPE_CHECKING:
-    from typing import Any, Optional
-
-
-def _check_python_deprecations():
-    # type: () -> None
-    # Since we're likely to deprecate Python versions in the future, I'm keeping
-    # this handy function around. Use this to detect the Python version used and
-    # to output logger.warning()s if it's deprecated.
-    pass
-
-
-def _init(*args, **kwargs):
-    # type: (*Optional[str], **Any) -> None
-    """Initializes the SDK and optionally integrations.
-
-    This takes the same arguments as the client constructor.
-    """
-    setup_scope_context_management()
-    client = sentry_sdk_alpha.Client(*args, **kwargs)
-    sentry_sdk_alpha.get_global_scope().set_client(client)
-    _check_python_deprecations()
-
-
-if TYPE_CHECKING:
-    # Make mypy, PyCharm and other static analyzers think `init` is a type to
-    # have nicer autocompletion for params.
-    #
-    # Use `ClientConstructor` to define the argument types of `init` and
-    # `ContextManager[Any]` to tell static analyzers about the return type.
-
-    class init(ClientConstructor):  # noqa: N801
-        pass
-
-else:
-    # Alias `init` for actual usage. Go through the lambda indirection to throw
-    # PyCharm off of the weakly typed signature (it would otherwise discover
-    # both the weakly typed signature of `_init` and our faked `init` type).
-
-    init = (lambda: _init)()
diff --git a/src/sentry_sdk_alpha/_log_batcher.py b/src/sentry_sdk_alpha/_log_batcher.py
deleted file mode 100644
index 49f40a882eb9e3..00000000000000
--- a/src/sentry_sdk_alpha/_log_batcher.py
+++ /dev/null
@@ -1,157 +0,0 @@
-import os
-import random
-import threading
-from collections.abc import Callable
-from datetime import datetime, timezone
-from typing import TYPE_CHECKING, Any, List, Optional
-
-from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef
-from sentry_sdk_alpha.utils import format_timestamp, safe_repr
-
-if TYPE_CHECKING:
-    from sentry_sdk_alpha._types import Log
-
-
-class LogBatcher:
-    MAX_LOGS_BEFORE_FLUSH = 100
-    FLUSH_WAIT_TIME = 5.0
-
-    def __init__(
-        self,
-        capture_func,  # type: Callable[[Envelope], None]
-    ):
-        # type: (...) -> None
-        self._log_buffer = []  # type: List[Log]
-        self._capture_func = capture_func
-        self._running = True
-        self._lock = threading.Lock()
-
-        self._flush_event = threading.Event()  # type: threading.Event
-
-        self._flusher = None  # type: Optional[threading.Thread]
-        self._flusher_pid = None  # type: Optional[int]
-
-    def _ensure_thread(self):
-        # type: (...) -> bool
-        """For forking processes we might need to restart this thread.
-        This ensures that our process actually has that thread running.
-        """
-        if not self._running:
-            return False
-
-        pid = os.getpid()
-        if self._flusher_pid == pid:
-            return True
-
-        with self._lock:
-            # Recheck to make sure another thread didn't get here and start the
-            # the flusher in the meantime
-            if self._flusher_pid == pid:
-                return True
-
-            self._flusher_pid = pid
-
-            self._flusher = threading.Thread(target=self._flush_loop)
-            self._flusher.daemon = True
-
-            try:
-                self._flusher.start()
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self._running = False
-                return False
-
-        return True
-
-    def _flush_loop(self):
-        # type: (...) -> None
-        while self._running:
-            self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
-            self._flush_event.clear()
-            self._flush()
-
-    def add(
-        self,
-        log,  # type: Log
-    ):
-        # type: (...) -> None
-        if not self._ensure_thread() or self._flusher is None:
-            return None
-
-        with self._lock:
-            self._log_buffer.append(log)
-            if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH:
-                self._flush_event.set()
-
-    def kill(self):
-        # type: (...) -> None
-        if self._flusher is None:
-            return
-
-        self._running = False
-        self._flush_event.set()
-        self._flusher = None
-
-    def flush(self):
-        # type: (...) -> None
-        self._flush()
-
-    @staticmethod
-    def _log_to_transport_format(log):
-        # type: (Log) -> Any
-        def format_attribute(val):
-            # type: (int | float | str | bool) -> Any
-            if isinstance(val, bool):
-                return {"value": val, "type": "boolean"}
-            if isinstance(val, int):
-                return {"value": val, "type": "integer"}
-            if isinstance(val, float):
-                return {"value": val, "type": "double"}
-            if isinstance(val, str):
-                return {"value": val, "type": "string"}
-            return {"value": safe_repr(val), "type": "string"}
-
-        if "sentry.severity_number" not in log["attributes"]:
-            log["attributes"]["sentry.severity_number"] = log["severity_number"]
-        if "sentry.severity_text" not in log["attributes"]:
-            log["attributes"]["sentry.severity_text"] = log["severity_text"]
-
-        res = {
-            "timestamp": int(log["time_unix_nano"]) / 1.0e9,
-            "trace_id": log.get("trace_id", "00000000-0000-0000-0000-000000000000"),
-            "level": str(log["severity_text"]),
-            "body": str(log["body"]),
-            "attributes": {k: format_attribute(v) for (k, v) in log["attributes"].items()},
-        }
-
-        return res
-
-    def _flush(self):
-        # type: (...) -> Optional[Envelope]
-
-        envelope = Envelope(headers={"sent_at": format_timestamp(datetime.now(timezone.utc))})
-        with self._lock:
-            if len(self._log_buffer) == 0:
-                return None
-
-            envelope.add_item(
-                Item(
-                    type="log",
-                    content_type="application/vnd.sentry.items.log+json",
-                    headers={
-                        "item_count": len(self._log_buffer),
-                    },
-                    payload=PayloadRef(
-                        json={
-                            "items": [
-                                self._log_to_transport_format(log) for log in self._log_buffer
-                            ]
-                        }
-                    ),
-                )
-            )
-            self._log_buffer.clear()
-
-        self._capture_func(envelope)
-        return envelope
diff --git a/src/sentry_sdk_alpha/_lru_cache.py b/src/sentry_sdk_alpha/_lru_cache.py
deleted file mode 100644
index cbadd9723b6fc5..00000000000000
--- a/src/sentry_sdk_alpha/_lru_cache.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any
-
-
-_SENTINEL = object()
-
-
-class LRUCache:
-    def __init__(self, max_size):
-        # type: (int) -> None
-        if max_size <= 0:
-            raise AssertionError(f"invalid max_size: {max_size}")
-        self.max_size = max_size
-        self._data = {}  # type: dict[Any, Any]
-        self.hits = self.misses = 0
-        self.full = False
-
-    def set(self, key, value):
-        # type: (Any, Any) -> None
-        current = self._data.pop(key, _SENTINEL)
-        if current is not _SENTINEL:
-            self._data[key] = value
-        elif self.full:
-            self._data.pop(next(iter(self._data)))
-            self._data[key] = value
-        else:
-            self._data[key] = value
-        self.full = len(self._data) >= self.max_size
-
-    def get(self, key, default=None):
-        # type: (Any, Any) -> Any
-        try:
-            ret = self._data.pop(key)
-        except KeyError:
-            self.misses += 1
-            ret = default
-        else:
-            self.hits += 1
-            self._data[key] = ret
-
-        return ret
-
-    def get_all(self):
-        # type: () -> list[tuple[Any, Any]]
-        return list(self._data.items())
diff --git a/src/sentry_sdk_alpha/_queue.py b/src/sentry_sdk_alpha/_queue.py
deleted file mode 100644
index 99b3916bb65396..00000000000000
--- a/src/sentry_sdk_alpha/_queue.py
+++ /dev/null
@@ -1,287 +0,0 @@
-"""
-A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py')
-with Lock swapped out for RLock to avoid a deadlock while garbage collecting.
-
-https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py
-
-
-See also
-https://codewithoutrules.com/2017/08/16/concurrency-python/
-https://bugs.python.org/issue14976
-https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
-
-We also vendor the code to evade eventlet's broken monkeypatching, see
-https://github.com/getsentry/sentry-python/pull/484
-
-
-Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
-
-All Rights Reserved
-
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
-All Rights Reserved" are retained in Python alone or in any derivative version
-prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee.  This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-
-"""
-
-import threading
-from collections import deque
-from time import time
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any
-
-__all__ = ["EmptyError", "FullError", "Queue"]
-
-
-class EmptyError(Exception):
-    "Exception raised by Queue.get(block=0)/get_nowait()."
-
-    pass
-
-
-class FullError(Exception):
-    "Exception raised by Queue.put(block=0)/put_nowait()."
-
-    pass
-
-
-class Queue:
-    """Create a queue object with a given maximum size.
-
-    If maxsize is <= 0, the queue size is infinite.
-    """
-
-    def __init__(self, maxsize=0):
-        self.maxsize = maxsize
-        self._init(maxsize)
-
-        # mutex must be held whenever the queue is mutating.  All methods
-        # that acquire mutex must release it before returning.  mutex
-        # is shared between the three conditions, so acquiring and
-        # releasing the conditions also acquires and releases mutex.
-        self.mutex = threading.RLock()
-
-        # Notify not_empty whenever an item is added to the queue; a
-        # thread waiting to get is notified then.
-        self.not_empty = threading.Condition(self.mutex)
-
-        # Notify not_full whenever an item is removed from the queue;
-        # a thread waiting to put is notified then.
-        self.not_full = threading.Condition(self.mutex)
-
-        # Notify all_tasks_done whenever the number of unfinished tasks
-        # drops to zero; thread waiting to join() is notified to resume
-        self.all_tasks_done = threading.Condition(self.mutex)
-        self.unfinished_tasks = 0
-
-    def task_done(self):
-        """Indicate that a formerly enqueued task is complete.
-
-        Used by Queue consumer threads.  For each get() used to fetch a task,
-        a subsequent call to task_done() tells the queue that the processing
-        on the task is complete.
-
-        If a join() is currently blocking, it will resume when all items
-        have been processed (meaning that a task_done() call was received
-        for every item that had been put() into the queue).
-
-        Raises a ValueError if called more times than there were items
-        placed in the queue.
-        """
-        with self.all_tasks_done:
-            unfinished = self.unfinished_tasks - 1
-            if unfinished <= 0:
-                if unfinished < 0:
-                    raise ValueError("task_done() called too many times")
-                self.all_tasks_done.notify_all()
-            self.unfinished_tasks = unfinished
-
-    def join(self):
-        """Blocks until all items in the Queue have been gotten and processed.
-
-        The count of unfinished tasks goes up whenever an item is added to the
-        queue. The count goes down whenever a consumer thread calls task_done()
-        to indicate the item was retrieved and all work on it is complete.
-
-        When the count of unfinished tasks drops to zero, join() unblocks.
-        """
-        with self.all_tasks_done:
-            while self.unfinished_tasks:
-                self.all_tasks_done.wait()
-
-    def qsize(self):
-        """Return the approximate size of the queue (not reliable!)."""
-        with self.mutex:
-            return self._qsize()
-
-    def empty(self):
-        """Return True if the queue is empty, False otherwise (not reliable!).
-
-        This method is likely to be removed at some point.  Use qsize() == 0
-        as a direct substitute, but be aware that either approach risks a race
-        condition where a queue can grow before the result of empty() or
-        qsize() can be used.
-
-        To create code that needs to wait for all queued tasks to be
-        completed, the preferred technique is to use the join() method.
-        """
-        with self.mutex:
-            return not self._qsize()
-
-    def full(self):
-        """Return True if the queue is full, False otherwise (not reliable!).
-
-        This method is likely to be removed at some point.  Use qsize() >= n
-        as a direct substitute, but be aware that either approach risks a race
-        condition where a queue can shrink before the result of full() or
-        qsize() can be used.
-        """
-        with self.mutex:
-            return 0 < self.maxsize <= self._qsize()
-
-    def put(self, item, block=True, timeout=None):
-        """Put an item into the queue.
-
-        If optional args 'block' is true and 'timeout' is None (the default),
-        block if necessary until a free slot is available. If 'timeout' is
-        a non-negative number, it blocks at most 'timeout' seconds and raises
-        the FullError exception if no free slot was available within that time.
-        Otherwise ('block' is false), put an item on the queue if a free slot
-        is immediately available, else raise the FullError exception ('timeout'
-        is ignored in that case).
-        """
-        with self.not_full:
-            if self.maxsize > 0:
-                if not block:
-                    if self._qsize() >= self.maxsize:
-                        raise FullError()
-                elif timeout is None:
-                    while self._qsize() >= self.maxsize:
-                        self.not_full.wait()
-                elif timeout < 0:
-                    raise ValueError("'timeout' must be a non-negative number")
-                else:
-                    endtime = time() + timeout
-                    while self._qsize() >= self.maxsize:
-                        remaining = endtime - time()
-                        if remaining <= 0.0:
-                            raise FullError()
-                        self.not_full.wait(remaining)
-            self._put(item)
-            self.unfinished_tasks += 1
-            self.not_empty.notify()
-
-    def get(self, block=True, timeout=None):
-        """Remove and return an item from the queue.
-
-        If optional args 'block' is true and 'timeout' is None (the default),
-        block if necessary until an item is available. If 'timeout' is
-        a non-negative number, it blocks at most 'timeout' seconds and raises
-        the EmptyError exception if no item was available within that time.
-        Otherwise ('block' is false), return an item if one is immediately
-        available, else raise the EmptyError exception ('timeout' is ignored
-        in that case).
-        """
-        with self.not_empty:
-            if not block:
-                if not self._qsize():
-                    raise EmptyError()
-            elif timeout is None:
-                while not self._qsize():
-                    self.not_empty.wait()
-            elif timeout < 0:
-                raise ValueError("'timeout' must be a non-negative number")
-            else:
-                endtime = time() + timeout
-                while not self._qsize():
-                    remaining = endtime - time()
-                    if remaining <= 0.0:
-                        raise EmptyError()
-                    self.not_empty.wait(remaining)
-            item = self._get()
-            self.not_full.notify()
-            return item
-
-    def put_nowait(self, item):
-        """Put an item into the queue without blocking.
-
-        Only enqueue the item if a free slot is immediately available.
-        Otherwise raise the FullError exception.
-        """
-        return self.put(item, block=False)
-
-    def get_nowait(self):
-        """Remove and return an item from the queue without blocking.
-
-        Only get an item if one is immediately available. Otherwise
-        raise the EmptyError exception.
-        """
-        return self.get(block=False)
-
-    # Override these methods to implement other queue organizations
-    # (e.g. stack or priority queue).
-    # These will only be called with appropriate locks held
-
-    # Initialize the queue representation
-    def _init(self, maxsize):
-        self.queue = deque()  # type: Any
-
-    def _qsize(self):
-        return len(self.queue)
-
-    # Put a new item in the queue
-    def _put(self, item):
-        self.queue.append(item)
-
-    # Get an item from the queue
-    def _get(self):
-        return self.queue.popleft()
diff --git a/src/sentry_sdk_alpha/_types.py b/src/sentry_sdk_alpha/_types.py
deleted file mode 100644
index d1c7f8be01254a..00000000000000
--- a/src/sentry_sdk_alpha/_types.py
+++ /dev/null
@@ -1,238 +0,0 @@
-from typing import TYPE_CHECKING, TypeVar, Union
-
-# Re-exported for compat, since code out there in the wild might use this variable.
-MYPY = TYPE_CHECKING
-
-
-SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
-
-
-class AnnotatedValue:
-    """
-    Meta information for a data field in the event payload.
-    This is to tell Relay that we have tampered with the fields value.
-    See:
-    https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423
-    """
-
-    __slots__ = ("value", "metadata")
-
-    def __init__(self, value, metadata):
-        # type: (Optional[Any], Dict[str, Any]) -> None
-        self.value = value
-        self.metadata = metadata
-
-    def __eq__(self, other):
-        # type: (Any) -> bool
-        if not isinstance(other, AnnotatedValue):
-            return False
-
-        return self.value == other.value and self.metadata == other.metadata
-
-    def __str__(self):
-        # type: (AnnotatedValue) -> str
-        return str({"value": str(self.value), "metadata": str(self.metadata)})
-
-    def __len__(self):
-        # type: (AnnotatedValue) -> int
-        if self.value is not None:
-            return len(self.value)
-        else:
-            return 0
-
-    @classmethod
-    def removed_because_raw_data(cls):
-        # type: () -> AnnotatedValue
-        """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form."""
-        return AnnotatedValue(
-            value="",
-            metadata={
-                "rem": [  # Remark
-                    [
-                        "!raw",  # Unparsable raw data
-                        "x",  # The fields original value was removed
-                    ]
-                ]
-            },
-        )
-
-    @classmethod
-    def removed_because_over_size_limit(cls, value=""):
-        # type: (Any) -> AnnotatedValue
-        """
-        The actual value was removed because the size of the field exceeded the configured maximum size,
-        for example specified with the max_request_body_size sdk option.
-        """
-        return AnnotatedValue(
-            value=value,
-            metadata={
-                "rem": [  # Remark
-                    [
-                        "!config",  # Because of configured maximum size
-                        "x",  # The fields original value was removed
-                    ]
-                ]
-            },
-        )
-
-    @classmethod
-    def substituted_because_contains_sensitive_data(cls):
-        # type: () -> AnnotatedValue
-        """The actual value was removed because it contained sensitive information."""
-        return AnnotatedValue(
-            value=SENSITIVE_DATA_SUBSTITUTE,
-            metadata={
-                "rem": [  # Remark
-                    [
-                        "!config",  # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
-                        "s",  # The fields original value was substituted
-                    ]
-                ]
-            },
-        )
-
-
-T = TypeVar("T")
-Annotated = Union[AnnotatedValue, T]
-
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Container, Mapping, MutableMapping, Sequence
-    from datetime import datetime
-    from types import TracebackType
-    from typing import Any, Dict, Literal, Optional, Type, TypedDict
-
-    class SDKInfo(TypedDict):
-        name: str
-        version: str
-        packages: Sequence[Mapping[str, str]]
-
-    # "critical" is an alias of "fatal" recognized by Relay
-    LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]
-
-    class Event(TypedDict, total=False):
-        breadcrumbs: Annotated[dict[Literal["values"], list[dict[str, Any]]]]
-        # TODO: We can expand on this type
-        check_in_id: str
-        contexts: dict[str, dict[str, object]]
-        dist: str
-        duration: float | None
-        environment: str
-        errors: list[dict[str, Any]]  # TODO: We can expand on this type
-        event_id: str
-        exception: dict[Literal["values"], list[dict[str, Any]]]
-        # TODO: We can expand on this type
-        extra: MutableMapping[str, object]
-        fingerprint: list[str]
-        level: LogLevelStr
-        logentry: Mapping[str, object]
-        logger: str
-        message: str
-        modules: dict[str, str]
-        monitor_config: Mapping[str, object]
-        monitor_slug: str | None
-        platform: Literal["python"]
-        profile: object  # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
-        release: str
-        request: dict[str, object]
-        sdk: Mapping[str, object]
-        server_name: str
-        spans: Annotated[list[dict[str, object]]]
-        stacktrace: dict[str, object]
-        # We access this key in the code, but I am unsure whether we ever set it
-        start_timestamp: datetime
-        status: str | None
-        tags: MutableMapping[str, str]
-        # Tags must be less than 200 characters each
-        threads: dict[Literal["values"], list[dict[str, Any]]]
-        # TODO: We can expand on this type
-        timestamp: datetime | None  # Must be set before sending the event
-        transaction: str
-        transaction_info: Mapping[str, Any]  # TODO: We can expand on this type
-        type: Literal["check_in", "transaction"]
-        user: dict[str, object]
-        _dropped_spans: int
-
-    ExcInfo = Union[
-        tuple[type[BaseException], BaseException, Optional[TracebackType]],
-        tuple[None, None, None],
-    ]
-
-    # TODO: Make a proper type definition for this (PRs welcome!)
-    Hint = dict[str, Any]
-
-    class Log(TypedDict):
-        severity_text: str
-        severity_number: int
-        body: str
-        attributes: dict[str, str | bool | float | int]
-        time_unix_nano: int
-        trace_id: str | None
-
-    # TODO: Make a proper type definition for this (PRs welcome!)
-    Breadcrumb = dict[str, Any]
-
-    # TODO: Make a proper type definition for this (PRs welcome!)
-    BreadcrumbHint = dict[str, Any]
-
-    # TODO: Make a proper type definition for this (PRs welcome!)
-    SamplingContext = dict[str, Any]
-
-    EventProcessor = Callable[[Event, Hint], Optional[Event]]
-    ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
-    BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
-    TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
-    LogProcessor = Callable[[Log, Hint], Optional[Log]]
-
-    TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
-
-    # https://github.com/python/mypy/issues/5710
-    NotImplementedType = Any
-
-    EventDataCategory = Literal[
-        "default",
-        "error",
-        "crash",
-        "transaction",
-        "security",
-        "attachment",
-        "session",
-        "internal",
-        "profile",
-        "profile_chunk",
-        "monitor",
-        "span",
-        "log",
-    ]
-    SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
-
-    ContinuousProfilerMode = Literal["thread", "gevent", "unknown"]
-    ProfilerMode = Union[ContinuousProfilerMode, Literal["sleep"]]
-
-    MonitorConfigScheduleType = Literal["crontab", "interval"]
-    MonitorConfigScheduleUnit = Literal[
-        "year",
-        "month",
-        "week",
-        "day",
-        "hour",
-        "minute",
-        "second",  # not supported in Sentry and will result in a warning
-    ]
-
-    class MonitorConfigSchedule(TypedDict, total=False):
-        type: MonitorConfigScheduleType
-        value: int | str
-        unit: MonitorConfigScheduleUnit
-
-    class MonitorConfig(TypedDict, total=False):
-        schedule: MonitorConfigSchedule
-        timezone: str
-        checkin_margin: int
-        max_runtime: int
-        failure_issue_threshold: int
-        recovery_threshold: int
-
-    HttpStatusCodeRange = Union[int, Container[int]]
-
-    OtelExtractedSpanData = tuple[str, str, Optional[str], Optional[int], Optional[str]]
diff --git a/src/sentry_sdk_alpha/_werkzeug.py b/src/sentry_sdk_alpha/_werkzeug.py
deleted file mode 100644
index 79dda2f0529f3e..00000000000000
--- a/src/sentry_sdk_alpha/_werkzeug.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Copyright (c) 2007 by the Pallets team.
-
-Some rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
-* Redistributions of source code must retain the above copyright notice,
-  this list of conditions and the following disclaimer.
-
-* Redistributions in binary form must reproduce the above copyright
-  notice, this list of conditions and the following disclaimer in the
-  documentation and/or other materials provided with the distribution.
-
-* Neither the name of the copyright holder nor the names of its
-  contributors may be used to endorse or promote products derived from
-  this software without specific prior written permission.
-
-THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
-CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
-BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
-USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
-THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGE.
-"""
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Iterator
-    from typing import Dict, Tuple
-
-
-#
-# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
-# https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361
-#
-# We need this function because Django does not give us a "pure" http header
-# dict. So we might as well use it for all WSGI integrations.
-#
-def _get_headers(environ):
-    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
-    """
-    Returns only proper HTTP headers.
-    """
-    for key, value in environ.items():
-        key = str(key)
-        if key.startswith("HTTP_") and key not in (
-            "HTTP_CONTENT_TYPE",
-            "HTTP_CONTENT_LENGTH",
-        ):
-            yield key[5:].replace("_", "-").title(), value
-        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
-            yield key.replace("_", "-").title(), value
-
-
-#
-# `get_host` comes from `werkzeug.wsgi.get_host`
-# https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145
-#
-def get_host(environ, use_x_forwarded_for=False):
-    # type: (Dict[str, str], bool) -> str
-    """
-    Return the host for the given WSGI environment.
-    """
-    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
-        rv = environ["HTTP_X_FORWARDED_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("HTTP_HOST"):
-        rv = environ["HTTP_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("SERVER_NAME"):
-        rv = environ["SERVER_NAME"]
-        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
-            ("https", "443"),
-            ("http", "80"),
-        ):
-            rv += ":" + environ["SERVER_PORT"]
-    else:
-        # In spite of the WSGI spec, SERVER_NAME might not be present.
-        rv = "unknown"
-
-    return rv
diff --git a/src/sentry_sdk_alpha/ai/__init__.py b/src/sentry_sdk_alpha/ai/__init__.py
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/src/sentry_sdk_alpha/ai/monitoring.py b/src/sentry_sdk_alpha/ai/monitoring.py
deleted file mode 100644
index 1f824d9bc3b0ae..00000000000000
--- a/src/sentry_sdk_alpha/ai/monitoring.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import inspect
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha.utils
-from sentry_sdk_alpha import start_span
-from sentry_sdk_alpha.consts import SPANDATA
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import ContextVar
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional
-
-_ai_pipeline_name = ContextVar("ai_pipeline_name", default=None)
-
-
-def set_ai_pipeline_name(name):
-    # type: (Optional[str]) -> None
-    _ai_pipeline_name.set(name)
-
-
-def get_ai_pipeline_name():
-    # type: () -> Optional[str]
-    return _ai_pipeline_name.get()
-
-
-def ai_track(description, **span_kwargs):
-    # type: (str, Any) -> Callable[..., Any]
-    def decorator(f):
-        # type: (Callable[..., Any]) -> Callable[..., Any]
-        def sync_wrapped(*args, **kwargs):
-            # type: (Any, Any) -> Any
-            curr_pipeline = _ai_pipeline_name.get()
-            op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline")
-
-            with start_span(name=description, op=op, only_if_parent=True, **span_kwargs) as span:
-                for k, v in kwargs.pop("sentry_tags", {}).items():
-                    span.set_tag(k, v)
-                for k, v in kwargs.pop("sentry_data", {}).items():
-                    span.set_attribute(k, v)
-                if curr_pipeline:
-                    span.set_attribute(SPANDATA.AI_PIPELINE_NAME, curr_pipeline)
-                    return f(*args, **kwargs)
-                else:
-                    _ai_pipeline_name.set(description)
-                    try:
-                        res = f(*args, **kwargs)
-                    except Exception as e:
-                        event, hint = sentry_sdk_alpha.utils.event_from_exception(
-                            e,
-                            client_options=sentry_sdk_alpha.get_client().options,
-                            mechanism={"type": "ai_monitoring", "handled": False},
-                        )
-                        sentry_sdk_alpha.capture_event(event, hint=hint)
-                        raise e from None
-                    finally:
-                        _ai_pipeline_name.set(None)
-                    return res
-
-        async def async_wrapped(*args, **kwargs):
-            # type: (Any, Any) -> Any
-            curr_pipeline = _ai_pipeline_name.get()
-            op = span_kwargs.get("op", "ai.run" if curr_pipeline else "ai.pipeline")
-
-            with start_span(name=description, op=op, only_if_parent=True, **span_kwargs) as span:
-                for k, v in kwargs.pop("sentry_tags", {}).items():
-                    span.set_tag(k, v)
-                for k, v in kwargs.pop("sentry_data", {}).items():
-                    span.set_attribute(k, v)
-                if curr_pipeline:
-                    span.set_attribute(SPANDATA.AI_PIPELINE_NAME, curr_pipeline)
-                    return await f(*args, **kwargs)
-                else:
-                    _ai_pipeline_name.set(description)
-                    try:
-                        res = await f(*args, **kwargs)
-                    except Exception as e:
-                        event, hint = sentry_sdk_alpha.utils.event_from_exception(
-                            e,
-                            client_options=sentry_sdk_alpha.get_client().options,
-                            mechanism={"type": "ai_monitoring", "handled": False},
-                        )
-                        sentry_sdk_alpha.capture_event(event, hint=hint)
-                        raise e from None
-                    finally:
-                        _ai_pipeline_name.set(None)
-                    return res
-
-        if inspect.iscoroutinefunction(f):
-            return wraps(f)(async_wrapped)
-        else:
-            return wraps(f)(sync_wrapped)
-
-    return decorator
-
-
-def record_token_usage(span, prompt_tokens=None, completion_tokens=None, total_tokens=None):
-    # type: (Span, Optional[int], Optional[int], Optional[int]) -> None
-    ai_pipeline_name = get_ai_pipeline_name()
-    if ai_pipeline_name:
-        span.set_attribute(SPANDATA.AI_PIPELINE_NAME, ai_pipeline_name)
-    if prompt_tokens is not None:
-        span.set_attribute(SPANDATA.AI_PROMPT_TOKENS_USED, prompt_tokens)
-    if completion_tokens is not None:
-        span.set_attribute(SPANDATA.AI_COMPLETION_TOKENS_USED, completion_tokens)
-    if total_tokens is None and prompt_tokens is not None and completion_tokens is not None:
-        total_tokens = prompt_tokens + completion_tokens
-    if total_tokens is not None:
-        span.set_attribute(SPANDATA.AI_TOTAL_TOKENS_USED, total_tokens)
diff --git a/src/sentry_sdk_alpha/ai/utils.py b/src/sentry_sdk_alpha/ai/utils.py
deleted file mode 100644
index b1789aa9582915..00000000000000
--- a/src/sentry_sdk_alpha/ai/utils.py
+++ /dev/null
@@ -1,32 +0,0 @@
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any
-
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import logger
-
-
-def _normalize_data(data):
-    # type: (Any) -> Any
-
-    # convert pydantic data (e.g. OpenAI v1+) to json compatible format
-    if hasattr(data, "model_dump"):
-        try:
-            return data.model_dump()
-        except Exception as e:
-            logger.warning("Could not convert pydantic data to JSON: %s", e)
-            return data
-    if isinstance(data, list):
-        if len(data) == 1:
-            return _normalize_data(data[0])  # remove empty dimensions
-        return list(_normalize_data(x) for x in data)
-    if isinstance(data, dict):
-        return {k: _normalize_data(v) for (k, v) in data.items()}
-    return data
-
-
-def set_data_normalized(span, key, value):
-    # type: (Span, str, Any) -> None
-    normalized = _normalize_data(value)
-    span.set_attribute(key, normalized)
diff --git a/src/sentry_sdk_alpha/api.py b/src/sentry_sdk_alpha/api.py
deleted file mode 100644
index 78eaf237951796..00000000000000
--- a/src/sentry_sdk_alpha/api.py
+++ /dev/null
@@ -1,328 +0,0 @@
-import inspect
-from contextlib import contextmanager
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha import Client, tracing_utils
-from sentry_sdk_alpha._init_implementation import init
-from sentry_sdk_alpha.crons import monitor
-from sentry_sdk_alpha.opentelemetry.scope import PotelScope as Scope
-from sentry_sdk_alpha.opentelemetry.scope import (
-    isolation_scope,
-    new_scope,
-    use_isolation_scope,
-    use_scope,
-)
-
-# TODO-neel-potel make 2 scope strategies/impls and switch
-from sentry_sdk_alpha.scope import Scope as BaseScope
-from sentry_sdk_alpha.tracing import trace
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Generator, Mapping
-    from typing import Any, Dict, Optional, TypeVar, Union
-
-    import sentry_sdk_alpha
-
-    T = TypeVar("T")
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-# When changing this, update __all__ in __init__.py too
-__all__ = [
-    "init",
-    "add_attachment",
-    "add_breadcrumb",
-    "capture_event",
-    "capture_exception",
-    "capture_message",
-    "continue_trace",
-    "flush",
-    "get_baggage",
-    "get_client",
-    "get_global_scope",
-    "get_isolation_scope",
-    "get_current_scope",
-    "get_current_span",
-    "get_traceparent",
-    "is_initialized",
-    "isolation_scope",
-    "last_event_id",
-    "new_scope",
-    "set_context",
-    "set_extra",
-    "set_level",
-    "set_tag",
-    "set_tags",
-    "set_user",
-    "start_span",
-    "start_transaction",
-    "trace",
-    "monitor",
-    "use_scope",
-    "use_isolation_scope",
-]
-
-
-def scopemethod(f):
-    # type: (F) -> F
-    f.__doc__ = "{}\n\n{}".format(
-        "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
-        inspect.getdoc(getattr(Scope, f.__name__)),
-    )
-    return f
-
-
-def clientmethod(f):
-    # type: (F) -> F
-    f.__doc__ = "{}\n\n{}".format(
-        "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__,
-        inspect.getdoc(getattr(Client, f.__name__)),
-    )
-    return f
-
-
-@scopemethod
-def get_client():
-    # type: () -> sentry_sdk.client.BaseClient
-    return Scope.get_client()
-
-
-def is_initialized():
-    # type: () -> bool
-    """
-    .. versionadded:: 2.0.0
-
-    Returns whether Sentry has been initialized or not.
-
-    If a client is available and the client is active
-    (meaning it is configured to send data) then
-    Sentry is initialized.
-    """
-    return get_client().is_active()
-
-
-@scopemethod
-def get_global_scope():
-    # type: () -> BaseScope
-    return Scope.get_global_scope()
-
-
-@scopemethod
-def get_isolation_scope():
-    # type: () -> Scope
-    return Scope.get_isolation_scope()
-
-
-@scopemethod
-def get_current_scope():
-    # type: () -> Scope
-    return Scope.get_current_scope()
-
-
-@scopemethod
-def last_event_id():
-    # type: () -> Optional[str]
-    """
-    See :py:meth:`sentry_sdk.Scope.last_event_id` documentation regarding
-    this method's limitations.
-    """
-    return Scope.last_event_id()
-
-
-@scopemethod
-def capture_event(
-    event,  # type: sentry_sdk._types.Event
-    hint=None,  # type: Optional[sentry_sdk._types.Hint]
-    scope=None,  # type: Optional[Any]
-    **scope_kwargs,  # type: Any
-):
-    # type: (...) -> Optional[str]
-    return get_current_scope().capture_event(event, hint, scope=scope, **scope_kwargs)
-
-
-@scopemethod
-def capture_message(
-    message,  # type: str
-    level=None,  # type: Optional[sentry_sdk._types.LogLevelStr]
-    scope=None,  # type: Optional[Any]
-    **scope_kwargs,  # type: Any
-):
-    # type: (...) -> Optional[str]
-    return get_current_scope().capture_message(message, level, scope=scope, **scope_kwargs)
-
-
-@scopemethod
-def capture_exception(
-    error=None,  # type: Optional[Union[BaseException, sentry_sdk._types.ExcInfo]]
-    scope=None,  # type: Optional[Any]
-    **scope_kwargs,  # type: Any
-):
-    # type: (...) -> Optional[str]
-    return get_current_scope().capture_exception(error, scope=scope, **scope_kwargs)
-
-
-@scopemethod
-def add_attachment(
-    bytes=None,  # type: Union[None, bytes, Callable[[], bytes]]
-    filename=None,  # type: Optional[str]
-    path=None,  # type: Optional[str]
-    content_type=None,  # type: Optional[str]
-    add_to_transactions=False,  # type: bool
-):
-    # type: (...) -> None
-    return get_isolation_scope().add_attachment(
-        bytes, filename, path, content_type, add_to_transactions
-    )
-
-
-@scopemethod
-def add_breadcrumb(
-    crumb=None,  # type: Optional[sentry_sdk._types.Breadcrumb]
-    hint=None,  # type: Optional[sentry_sdk._types.BreadcrumbHint]
-    **kwargs,  # type: Any
-):
-    # type: (...) -> None
-    return get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs)
-
-
-@scopemethod
-def set_tag(key, value):
-    # type: (str, Any) -> None
-    return get_isolation_scope().set_tag(key, value)
-
-
-@scopemethod
-def set_tags(tags):
-    # type: (Mapping[str, object]) -> None
-    return get_isolation_scope().set_tags(tags)
-
-
-@scopemethod
-def set_context(key, value):
-    # type: (str, Dict[str, Any]) -> None
-    return get_isolation_scope().set_context(key, value)
-
-
-@scopemethod
-def set_extra(key, value):
-    # type: (str, Any) -> None
-    return get_isolation_scope().set_extra(key, value)
-
-
-@scopemethod
-def set_user(value):
-    # type: (Optional[Dict[str, Any]]) -> None
-    return get_isolation_scope().set_user(value)
-
-
-@scopemethod
-def set_level(value):
-    # type: (sentry_sdk._types.LogLevelStr) -> None
-    return get_isolation_scope().set_level(value)
-
-
-@clientmethod
-def flush(
-    timeout=None,  # type: Optional[float]
-    callback=None,  # type: Optional[Callable[[int, float], None]]
-):
-    # type: (...) -> None
-    return get_client().flush(timeout=timeout, callback=callback)
-
-
-def start_span(**kwargs):
-    # type: (Any) -> sentry_sdk.tracing.Span
-    """
-    Start and return a span.
-
-    This is the entry point to manual tracing instrumentation.
-
-    A tree structure can be built by adding child spans to the span.
-    To start a new child span within the span, call the `start_child()` method.
-
-    When used as a context manager, spans are automatically finished at the end
-    of the `with` block. If not using context managers, call the `finish()`
-    method.
-    """
-    return get_current_scope().start_span(**kwargs)
-
-
-def start_transaction(
-    transaction=None,  # type: Optional[sentry_sdk.tracing.Span]
-    **kwargs,  # type: Any
-):
-    # type: (...) -> sentry_sdk.tracing.Span
-    """
-    .. deprecated:: 3.0.0
-        This function is deprecated and will be removed in a future release.
-        Use :py:meth:`sentry_sdk.start_span` instead.
-
-    Start and return a transaction on the current scope.
-
-    Start an existing transaction if given, otherwise create and start a new
-    transaction with kwargs.
-
-    This is the entry point to manual tracing instrumentation.
-
-    A tree structure can be built by adding child spans to the transaction,
-    and child spans to other spans. To start a new child span within the
-    transaction or any span, call the respective `.start_child()` method.
-
-    Every child span must be finished before the transaction is finished,
-    otherwise the unfinished spans are discarded.
-
-    When used as context managers, spans and transactions are automatically
-    finished at the end of the `with` block. If not using context managers,
-    call the `.finish()` method.
-
-    When the transaction is finished, it will be sent to Sentry with all its
-    finished child spans.
-
-    :param transaction: The transaction to start. If omitted, we create and
-        start a new transaction.
-    :param kwargs: Optional keyword arguments to be passed to the Transaction
-        constructor. See :py:class:`sentry_sdk.tracing.Transaction` for
-        available arguments.
-    """
-    return start_span(
-        span=transaction,
-        **kwargs,
-    )
-
-
-def get_current_span(scope=None):
-    # type: (Optional[Scope]) -> Optional[sentry_sdk.tracing.Span]
-    """
-    Returns the currently active span if there is one running, otherwise `None`
-    """
-    return tracing_utils.get_current_span(scope)
-
-
-def get_traceparent():
-    # type: () -> Optional[str]
-    """
-    Returns the traceparent either from the active span or from the scope.
-    """
-    return get_current_scope().get_traceparent()
-
-
-def get_baggage():
-    # type: () -> Optional[str]
-    """
-    Returns Baggage either from the active span or from the scope.
-    """
-    baggage = get_current_scope().get_baggage()
-    if baggage is not None:
-        return baggage.serialize()
-
-    return None
-
-
-@contextmanager
-def continue_trace(environ_or_headers):
-    # type: (Dict[str, Any]) -> Generator[None, None, None]
-    """
-    Sets the propagation context from environment or headers to continue an incoming trace.
-    """
-    with get_isolation_scope().continue_trace(environ_or_headers):
-        yield
diff --git a/src/sentry_sdk_alpha/attachments.py b/src/sentry_sdk_alpha/attachments.py
deleted file mode 100644
index 30bccf87b84b9d..00000000000000
--- a/src/sentry_sdk_alpha/attachments.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import mimetypes
-import os
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.envelope import Item, PayloadRef
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Optional, Union
-
-
-class Attachment:
-    """Additional files/data to send along with an event.
-
-    This class stores attachments that can be sent along with an event. Attachments are files or other data, e.g.
-    config or log files, that are relevant to an event. Attachments are set on the ``Scope``, and are sent along with
-    all non-transaction events (or all events including transactions if ``add_to_transactions`` is ``True``) that are
-    captured within the ``Scope``.
-
-    To add an attachment to a ``Scope``, use :py:meth:`sentry_sdk.Scope.add_attachment`. The parameters for
-    ``add_attachment`` are the same as the parameters for this class's constructor.
-
-    :param bytes: Raw bytes of the attachment, or a function that returns the raw bytes. Must be provided unless
-                  ``path`` is provided.
-    :param filename: The filename of the attachment. Must be provided unless ``path`` is provided.
-    :param path: Path to a file to attach. Must be provided unless ``bytes`` is provided.
-    :param content_type: The content type of the attachment. If not provided, it will be guessed from the ``filename``
-                         parameter, if available, or the ``path`` parameter if ``filename`` is ``None``.
-    :param add_to_transactions: Whether to add this attachment to transactions. Defaults to ``False``.
-    """
-
-    def __init__(
-        self,
-        bytes=None,  # type: Union[None, bytes, Callable[[], bytes]]
-        filename=None,  # type: Optional[str]
-        path=None,  # type: Optional[str]
-        content_type=None,  # type: Optional[str]
-        add_to_transactions=False,  # type: bool
-    ):
-        # type: (...) -> None
-        if bytes is None and path is None:
-            raise TypeError("path or raw bytes required for attachment")
-        if filename is None and path is not None:
-            filename = os.path.basename(path)
-        if filename is None:
-            raise TypeError("filename is required for attachment")
-        if content_type is None:
-            content_type = mimetypes.guess_type(filename)[0]
-        self.bytes = bytes
-        self.filename = filename
-        self.path = path
-        self.content_type = content_type
-        self.add_to_transactions = add_to_transactions
-
-    def to_envelope_item(self):
-        # type: () -> Item
-        """Returns an envelope item for this attachment."""
-        payload = None  # type: Union[None, PayloadRef, bytes]
-        if self.bytes is not None:
-            if callable(self.bytes):
-                payload = self.bytes()
-            else:
-                payload = self.bytes
-        else:
-            payload = PayloadRef(path=self.path)
-        return Item(
-            payload=payload,
-            type="attachment",
-            content_type=self.content_type,
-            filename=self.filename,
-        )
-
-    def __repr__(self):
-        # type: () -> str
-        return f""
diff --git a/src/sentry_sdk_alpha/client.py b/src/sentry_sdk_alpha/client.py
deleted file mode 100644
index 54482cc25dfebf..00000000000000
--- a/src/sentry_sdk_alpha/client.py
+++ /dev/null
@@ -1,975 +0,0 @@
-import os
-import random
-import socket
-import uuid
-from collections.abc import Mapping
-from datetime import datetime, timezone
-from importlib import import_module
-from typing import TYPE_CHECKING, Dict, List, cast, overload
-
-from sentry_sdk_alpha._compat import check_uwsgi_thread_support
-from sentry_sdk_alpha.consts import (
-    DEFAULT_MAX_VALUE_LENGTH,
-    DEFAULT_OPTIONS,
-    SPANDATA,
-    VERSION,
-    ClientConstructor,
-)
-from sentry_sdk_alpha.envelope import Envelope
-from sentry_sdk_alpha.integrations import setup_integrations
-from sentry_sdk_alpha.integrations.dedupe import DedupeIntegration
-from sentry_sdk_alpha.monitor import Monitor
-from sentry_sdk_alpha.profiler.continuous_profiler import setup_continuous_profiler
-from sentry_sdk_alpha.profiler.transaction_profiler import (
-    Profile,
-    has_profiling_enabled,
-    setup_profiler,
-)
-from sentry_sdk_alpha.scrubber import EventScrubber
-from sentry_sdk_alpha.serializer import serialize
-from sentry_sdk_alpha.sessions import SessionFlusher
-from sentry_sdk_alpha.spotlight import setup_spotlight
-from sentry_sdk_alpha.tracing import trace
-from sentry_sdk_alpha.transport import BaseHttpTransport, make_transport
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    ContextVar,
-    capture_internal_exceptions,
-    current_stacktrace,
-    env_to_bool,
-    format_timestamp,
-    get_default_release,
-    get_sdk_name,
-    get_type_name,
-    handle_in_app,
-    logger,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Sequence
-    from typing import Any, Optional, Type, TypeVar, Union
-
-    from sentry_sdk_alpha._log_batcher import LogBatcher
-    from sentry_sdk_alpha._types import Event, Hint, Log, SDKInfo
-    from sentry_sdk_alpha.integrations import Integration
-    from sentry_sdk_alpha.scope import Scope
-    from sentry_sdk_alpha.session import Session
-    from sentry_sdk_alpha.spotlight import SpotlightClient
-    from sentry_sdk_alpha.transport import Transport
-
-    I = TypeVar("I", bound=Integration)  # noqa: E741
-
-_client_init_debug = ContextVar("client_init_debug")
-
-
-SDK_INFO = {
-    "name": "sentry.python",  # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations()
-    "version": VERSION,
-    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
-}  # type: SDKInfo
-
-
-def _get_options(*args, **kwargs):
-    # type: (*Optional[str], **Any) -> Dict[str, Any]
-    if args and (isinstance(args[0], (bytes, str)) or args[0] is None):
-        dsn = args[0]  # type: Optional[str]
-        args = args[1:]
-    else:
-        dsn = None
-
-    if len(args) > 1:
-        raise TypeError("Only single positional argument is expected")
-
-    rv = dict(DEFAULT_OPTIONS)
-    options = dict(*args, **kwargs)
-    if dsn is not None and options.get("dsn") is None:
-        options["dsn"] = dsn
-
-    for key, value in options.items():
-        if key not in rv:
-            raise TypeError(f"Unknown option {key!r}")
-
-        rv[key] = value
-
-    if rv["dsn"] is None:
-        rv["dsn"] = os.environ.get("SENTRY_DSN")
-
-    if rv["release"] is None:
-        rv["release"] = get_default_release()
-
-    if rv["environment"] is None:
-        rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production"
-
-    if rv["debug"] is None:
-        rv["debug"] = env_to_bool(os.environ.get("SENTRY_DEBUG", "False"), strict=True)
-
-    if rv["server_name"] is None and hasattr(socket, "gethostname"):
-        rv["server_name"] = socket.gethostname()
-
-    if rv["project_root"] is None:
-        try:
-            project_root = os.getcwd()
-        except Exception:
-            project_root = None
-
-        rv["project_root"] = project_root
-
-    if rv["event_scrubber"] is None:
-        rv["event_scrubber"] = EventScrubber(
-            send_default_pii=(False if rv["send_default_pii"] is None else rv["send_default_pii"])
-        )
-
-    if rv["socket_options"] and not isinstance(rv["socket_options"], list):
-        logger.warning(
-            "Ignoring socket_options because of unexpected format. See urllib3.HTTPConnection.socket_options for the expected format."
-        )
-        rv["socket_options"] = None
-
-    return rv
-
-
-class BaseClient:
-    """
-    .. versionadded:: 2.0.0
-
-    The basic definition of a client that is used for sending data to Sentry.
-    """
-
-    spotlight = None  # type: Optional[SpotlightClient]
-
-    def __init__(self, options=None):
-        # type: (Optional[Dict[str, Any]]) -> None
-        self.options = options if options is not None else DEFAULT_OPTIONS  # type: Dict[str, Any]
-
-        self.transport = None  # type: Optional[Transport]
-        self.monitor = None  # type: Optional[Monitor]
-        self.log_batcher = None  # type: Optional[LogBatcher]
-
-    def __getstate__(self, *args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        return {"options": {}}
-
-    def __setstate__(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        pass
-
-    @property
-    def dsn(self):
-        # type: () -> Optional[str]
-        return None
-
-    def should_send_default_pii(self):
-        # type: () -> bool
-        return False
-
-    def is_active(self):
-        # type: () -> bool
-        """
-        .. versionadded:: 2.0.0
-
-        Returns whether the client is active (able to send data to Sentry)
-        """
-        return False
-
-    def capture_event(self, *args, **kwargs):
-        # type: (*Any, **Any) -> Optional[str]
-        return None
-
-    def _capture_experimental_log(self, scope, log):
-        # type: (Scope, Log) -> None
-        pass
-
-    def capture_session(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        return None
-
-    if TYPE_CHECKING:
-
-        @overload
-        def get_integration(self, name_or_class):
-            # type: (str) -> Optional[Integration]
-            ...
-
-        @overload
-        def get_integration(self, name_or_class):
-            # type: (type[I]) -> Optional[I]
-            ...
-
-    def get_integration(self, name_or_class):
-        # type: (Union[str, type[Integration]]) -> Optional[Integration]
-        return None
-
-    def close(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        return None
-
-    def flush(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        return None
-
-    def __enter__(self):
-        # type: () -> BaseClient
-        return self
-
-    def __exit__(self, exc_type, exc_value, tb):
-        # type: (Any, Any, Any) -> None
-        return None
-
-
-class NonRecordingClient(BaseClient):
-    """
-    .. versionadded:: 2.0.0
-
-    A client that does not send any events to Sentry. This is used as a fallback when the Sentry SDK is not yet initialized.
-    """
-
-    pass
-
-
-class _Client(BaseClient):
-    """
-    The client is internally responsible for capturing the events and
-    forwarding them to sentry through the configured transport.  It takes
-    the client options as keyword arguments and optionally the DSN as first
-    argument.
-
-    Alias of :py:class:`sentry_sdk.Client`. (Was created for better intelisense support)
-    """
-
-    def __init__(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        super().__init__(options=get_options(*args, **kwargs))
-        self._init_impl()
-
-    def __getstate__(self):
-        # type: () -> Any
-        return {"options": self.options}
-
-    def __setstate__(self, state):
-        # type: (Any) -> None
-        self.options = state["options"]
-        self._init_impl()
-
-    def _setup_instrumentation(self, functions_to_trace):
-        # type: (Sequence[Dict[str, str]]) -> None
-        """
-        Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator.
-        """
-        for function in functions_to_trace:
-            class_name = None
-            function_qualname = function["qualified_name"]
-            module_name, function_name = function_qualname.rsplit(".", 1)
-
-            try:
-                # Try to import module and function
-                # ex: "mymodule.submodule.funcname"
-
-                module_obj = import_module(module_name)
-                function_obj = getattr(module_obj, function_name)
-                setattr(module_obj, function_name, trace(function_obj))
-                logger.debug("Enabled tracing for %s", function_qualname)
-            except ModuleNotFoundError:
-                try:
-                    # Try to import a class
-                    # ex: "mymodule.submodule.MyClassName.member_function"
-
-                    module_name, class_name = module_name.rsplit(".", 1)
-                    module_obj = import_module(module_name)
-                    class_obj = getattr(module_obj, class_name)
-                    function_obj = getattr(class_obj, function_name)
-                    function_type = type(class_obj.__dict__[function_name])
-                    traced_function = trace(function_obj)
-
-                    if function_type in (staticmethod, classmethod):
-                        traced_function = staticmethod(traced_function)
-
-                    setattr(class_obj, function_name, traced_function)
-                    setattr(module_obj, class_name, class_obj)
-                    logger.debug("Enabled tracing for %s", function_qualname)
-
-                except Exception as e:
-                    logger.warning(
-                        "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
-                        function_qualname,
-                        e,
-                    )
-
-            except Exception as e:
-                logger.warning(
-                    "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
-                    function_qualname,
-                    e,
-                )
-
-    def _init_impl(self):
-        # type: () -> None
-        old_debug = _client_init_debug.get(False)
-
-        def _capture_envelope(envelope):
-            # type: (Envelope) -> None
-            if self.transport is not None:
-                self.transport.capture_envelope(envelope)
-
-        try:
-            _client_init_debug.set(self.options["debug"])
-            self.transport = make_transport(self.options)
-
-            self.monitor = None
-            if self.transport:
-                if self.options["enable_backpressure_handling"]:
-                    self.monitor = Monitor(self.transport)
-
-            self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
-
-            experiments = self.options.get("_experiments", {})
-            self.log_batcher = None
-            if experiments.get("enable_logs", False):
-                from sentry_sdk_alpha._log_batcher import LogBatcher
-
-                self.log_batcher = LogBatcher(capture_func=_capture_envelope)
-
-            max_request_body_size = ("always", "never", "small", "medium")
-            if self.options["max_request_body_size"] not in max_request_body_size:
-                raise ValueError(
-                    "Invalid value for max_request_body_size. Must be one of {}".format(
-                        max_request_body_size
-                    )
-                )
-
-            self.integrations = setup_integrations(
-                self.options["integrations"],
-                with_defaults=self.options["default_integrations"],
-                with_auto_enabling_integrations=self.options["auto_enabling_integrations"],
-                disabled_integrations=self.options["disabled_integrations"],
-            )
-
-            spotlight_config = self.options.get("spotlight")
-            if spotlight_config is None and "SENTRY_SPOTLIGHT" in os.environ:
-                spotlight_env_value = os.environ["SENTRY_SPOTLIGHT"]
-                spotlight_config = env_to_bool(spotlight_env_value, strict=True)
-                self.options["spotlight"] = (
-                    spotlight_config if spotlight_config is not None else spotlight_env_value
-                )
-
-            if self.options.get("spotlight"):
-                self.spotlight = setup_spotlight(self.options)
-                if not self.options["dsn"]:
-                    sample_all = lambda *_args, **_kwargs: 1.0
-                    self.options["send_default_pii"] = True
-                    self.options["error_sampler"] = sample_all
-                    self.options["traces_sampler"] = sample_all
-                    self.options["profiles_sampler"] = sample_all
-
-            sdk_name = get_sdk_name(list(self.integrations.keys()))
-            SDK_INFO["name"] = sdk_name
-            logger.debug("Setting SDK name to '%s'", sdk_name)
-
-            if has_profiling_enabled(self.options):
-                try:
-                    setup_profiler(self.options)
-                except Exception as e:
-                    logger.debug("Can not set up profiler. (%s)", e)
-            else:
-                try:
-                    setup_continuous_profiler(
-                        self.options,
-                        sdk_info=SDK_INFO,
-                        capture_func=_capture_envelope,
-                    )
-                except Exception as e:
-                    logger.debug("Can not set up continuous profiler. (%s)", e)
-
-            from sentry_sdk_alpha.opentelemetry.tracing import (
-                patch_readable_span,
-                setup_sentry_tracing,
-            )
-
-            patch_readable_span()
-            setup_sentry_tracing()
-        finally:
-            _client_init_debug.set(old_debug)
-
-        self._setup_instrumentation(self.options.get("functions_to_trace", []))
-
-        if (
-            self.monitor
-            or self.log_batcher
-            or has_profiling_enabled(self.options)
-            or isinstance(self.transport, BaseHttpTransport)
-        ):
-            # If we have anything on that could spawn a background thread, we
-            # need to check if it's safe to use them.
-            check_uwsgi_thread_support()
-
-    def is_active(self):
-        # type: () -> bool
-        """
-        .. versionadded:: 2.0.0
-
-        Returns whether the client is active (able to send data to Sentry)
-        """
-        return True
-
-    def should_send_default_pii(self):
-        # type: () -> bool
-        """
-        .. versionadded:: 2.0.0
-
-        Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry.
-        """
-        return self.options.get("send_default_pii") or False
-
-    @property
-    def dsn(self):
-        # type: () -> Optional[str]
-        """Returns the configured DSN as string."""
-        return self.options["dsn"]
-
-    def _prepare_event(
-        self,
-        event,  # type: Event
-        hint,  # type: Hint
-        scope,  # type: Optional[Scope]
-    ):
-        # type: (...) -> Optional[Event]
-
-        previous_total_spans = None  # type: Optional[int]
-        previous_total_breadcrumbs = None  # type: Optional[int]
-
-        if event.get("timestamp") is None:
-            event["timestamp"] = datetime.now(timezone.utc)
-
-        if scope is not None:
-            is_transaction = event.get("type") == "transaction"
-            spans_before = len(cast(list[dict[str, object]], event.get("spans", [])))
-            event_ = scope.apply_to_event(event, hint, self.options)
-
-            # one of the event/error processors returned None
-            if event_ is None:
-                if self.transport:
-                    self.transport.record_lost_event(
-                        "event_processor",
-                        data_category=("transaction" if is_transaction else "error"),
-                    )
-                    if is_transaction:
-                        self.transport.record_lost_event(
-                            "event_processor",
-                            data_category="span",
-                            quantity=spans_before + 1,  # +1 for the transaction itself
-                        )
-                return None
-
-            event = event_  # type: Optional[Event]  # type: ignore[no-redef]
-            spans_delta = spans_before - len(cast(list[dict[str, object]], event.get("spans", [])))
-            if is_transaction and spans_delta > 0 and self.transport is not None:
-                self.transport.record_lost_event(
-                    "event_processor", data_category="span", quantity=spans_delta
-                )
-
-            dropped_spans = event.pop("_dropped_spans", 0) + spans_delta  # type: int
-            if dropped_spans > 0:
-                previous_total_spans = spans_before + dropped_spans
-            if scope._n_breadcrumbs_truncated > 0:
-                breadcrumbs = event.get("breadcrumbs", {})
-                values = (
-                    breadcrumbs.get("values", [])
-                    if not isinstance(breadcrumbs, AnnotatedValue)
-                    else []
-                )
-                previous_total_breadcrumbs = len(values) + scope._n_breadcrumbs_truncated
-
-        if (
-            self.options["attach_stacktrace"]
-            and "exception" not in event
-            and "stacktrace" not in event
-            and "threads" not in event
-        ):
-            with capture_internal_exceptions():
-                event["threads"] = {
-                    "values": [
-                        {
-                            "stacktrace": current_stacktrace(
-                                include_local_variables=self.options.get(
-                                    "include_local_variables", True
-                                ),
-                                max_value_length=self.options.get(
-                                    "max_value_length", DEFAULT_MAX_VALUE_LENGTH
-                                ),
-                            ),
-                            "crashed": False,
-                            "current": True,
-                        }
-                    ]
-                }
-
-        for key in "release", "environment", "server_name", "dist":
-            if event.get(key) is None and self.options[key] is not None:
-                event[key] = str(self.options[key]).strip()
-        if event.get("sdk") is None:
-            sdk_info = dict(SDK_INFO)
-            sdk_info["integrations"] = sorted(self.integrations.keys())
-            event["sdk"] = sdk_info
-
-        if event.get("platform") is None:
-            event["platform"] = "python"
-
-        event = handle_in_app(
-            event,
-            self.options["in_app_exclude"],
-            self.options["in_app_include"],
-            self.options["project_root"],
-        )
-
-        if event is not None:
-            event_scrubber = self.options["event_scrubber"]
-            if event_scrubber:
-                event_scrubber.scrub_event(event)
-
-        if previous_total_spans is not None:
-            event["spans"] = AnnotatedValue(event.get("spans", []), {"len": previous_total_spans})
-        if previous_total_breadcrumbs is not None:
-            event["breadcrumbs"] = AnnotatedValue(
-                event.get("breadcrumbs", []), {"len": previous_total_breadcrumbs}
-            )
-        # Postprocess the event here so that annotated types do
-        # generally not surface in before_send
-        if event is not None:
-            event = cast(
-                "Event",
-                serialize(
-                    cast("Dict[str, Any]", event),
-                    max_request_body_size=self.options.get("max_request_body_size"),
-                    max_value_length=self.options.get("max_value_length"),
-                    custom_repr=self.options.get("custom_repr"),
-                ),
-            )
-
-        before_send = self.options["before_send"]
-        if before_send is not None and event is not None and event.get("type") != "transaction":
-            new_event = None  # type: Optional[Event]
-            with capture_internal_exceptions():
-                new_event = before_send(event, hint or {})
-            if new_event is None:
-                logger.info("before send dropped event")
-                if self.transport:
-                    self.transport.record_lost_event("before_send", data_category="error")
-
-                # If this is an exception, reset the DedupeIntegration. It still
-                # remembers the dropped exception as the last exception, meaning
-                # that if the same exception happens again and is not dropped
-                # in before_send, it'd get dropped by DedupeIntegration.
-                if event.get("exception"):
-                    DedupeIntegration.reset_last_seen()
-
-            event = new_event  # type: Optional[Event]  # type: ignore[no-redef]
-
-        before_send_transaction = self.options["before_send_transaction"]
-        if (
-            before_send_transaction is not None
-            and event is not None
-            and event.get("type") == "transaction"
-        ):
-            new_event = None
-            spans_before = len(cast(list[dict[str, object]], event.get("spans", [])))
-            with capture_internal_exceptions():
-                new_event = before_send_transaction(event, hint or {})
-            if new_event is None:
-                logger.info("before send transaction dropped event")
-                if self.transport:
-                    self.transport.record_lost_event(
-                        reason="before_send", data_category="transaction"
-                    )
-                    self.transport.record_lost_event(
-                        reason="before_send",
-                        data_category="span",
-                        quantity=spans_before + 1,  # +1 for the transaction itself
-                    )
-            else:
-                spans_delta = spans_before - len(
-                    cast(list[dict[str, object]], new_event.get("spans", []))
-                )
-                if spans_delta > 0 and self.transport is not None:
-                    self.transport.record_lost_event(
-                        reason="before_send", data_category="span", quantity=spans_delta
-                    )
-
-            event = new_event  # type: Optional[Event]  # type: ignore[no-redef]
-
-        return event
-
-    def _is_ignored_error(self, event, hint):
-        # type: (Event, Hint) -> bool
-        exc_info = hint.get("exc_info")
-        if exc_info is None:
-            return False
-
-        error = exc_info[0]
-        error_type_name = get_type_name(exc_info[0])
-        error_full_name = f"{exc_info[0].__module__}.{error_type_name}"
-
-        for ignored_error in self.options["ignore_errors"]:
-            # String types are matched against the type name in the
-            # exception only
-            if isinstance(ignored_error, str):
-                if ignored_error == error_full_name or ignored_error == error_type_name:
-                    return True
-            else:
-                if issubclass(error, ignored_error):
-                    return True
-
-        return False
-
-    def _should_capture(
-        self,
-        event,  # type: Event
-        hint,  # type: Hint
-        scope=None,  # type: Optional[Scope]
-    ):
-        # type: (...) -> bool
-        # Transactions are sampled independent of error events.
-        is_transaction = event.get("type") == "transaction"
-        if is_transaction:
-            return True
-
-        ignoring_prevents_recursion = scope is not None and not scope._should_capture
-        if ignoring_prevents_recursion:
-            return False
-
-        ignored_by_config_option = self._is_ignored_error(event, hint)
-        if ignored_by_config_option:
-            return False
-
-        return True
-
-    def _should_sample_error(
-        self,
-        event,  # type: Event
-        hint,  # type: Hint
-    ):
-        # type: (...) -> bool
-        error_sampler = self.options.get("error_sampler", None)
-
-        if callable(error_sampler):
-            with capture_internal_exceptions():
-                sample_rate = error_sampler(event, hint)
-        else:
-            sample_rate = self.options["sample_rate"]
-
-        try:
-            not_in_sample_rate = sample_rate < 1.0 and random.random() >= sample_rate
-        except NameError:
-            logger.warning(
-                "The provided error_sampler raised an error. Defaulting to sampling the event."
-            )
-
-            # If the error_sampler raised an error, we should sample the event, since the default behavior
-            # (when no sample_rate or error_sampler is provided) is to sample all events.
-            not_in_sample_rate = False
-        except TypeError:
-            parameter, verb = (
-                ("error_sampler", "returned")
-                if callable(error_sampler)
-                else ("sample_rate", "contains")
-            )
-            logger.warning(
-                "The provided %s %s an invalid value of %s. The value should be a float or a bool. Defaulting to sampling the event."
-                % (parameter, verb, repr(sample_rate))
-            )
-
-            # If the sample_rate has an invalid value, we should sample the event, since the default behavior
-            # (when no sample_rate or error_sampler is provided) is to sample all events.
-            not_in_sample_rate = False
-
-        if not_in_sample_rate:
-            # because we will not sample this event, record a "lost event".
-            if self.transport:
-                self.transport.record_lost_event("sample_rate", data_category="error")
-
-            return False
-
-        return True
-
-    def _update_session_from_event(
-        self,
-        session,  # type: Session
-        event,  # type: Event
-    ):
-        # type: (...) -> None
-
-        crashed = False
-        errored = False
-        user_agent = None
-
-        exceptions = (event.get("exception") or {}).get("values")
-        if exceptions:
-            errored = True
-            for error in exceptions:
-                if isinstance(error, AnnotatedValue):
-                    error = error.value or {}
-                mechanism = error.get("mechanism")
-                if isinstance(mechanism, Mapping) and mechanism.get("handled") is False:
-                    crashed = True
-                    break
-
-        user = event.get("user")
-
-        if session.user_agent is None:
-            headers = (event.get("request") or {}).get("headers")
-            headers_dict = headers if isinstance(headers, dict) else {}
-            for k, v in headers_dict.items():
-                if k.lower() == "user-agent":
-                    user_agent = v
-                    break
-
-        session.update(
-            status="crashed" if crashed else None,
-            user=user,
-            user_agent=user_agent,
-            errors=session.errors + (errored or crashed),
-        )
-
-    def capture_event(
-        self,
-        event,  # type: Event
-        hint=None,  # type: Optional[Hint]
-        scope=None,  # type: Optional[Scope]
-    ):
-        # type: (...) -> Optional[str]
-        """Captures an event.
-
-        :param event: A ready-made event that can be directly sent to Sentry.
-
-        :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
-
-        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
-
-        :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
-        """
-        hint = dict(hint or ())  # type: Hint
-
-        if not self._should_capture(event, hint, scope):
-            return None
-
-        profile = event.pop("profile", None)
-
-        event_id = event.get("event_id")
-        if event_id is None:
-            event["event_id"] = event_id = uuid.uuid4().hex
-        event_opt = self._prepare_event(event, hint, scope)
-        if event_opt is None:
-            return None
-
-        # whenever we capture an event we also check if the session needs
-        # to be updated based on that information.
-        session = scope._session if scope else None
-        if session:
-            self._update_session_from_event(session, event)
-
-        is_transaction = event_opt.get("type") == "transaction"
-        is_checkin = event_opt.get("type") == "check_in"
-
-        if not is_transaction and not is_checkin and not self._should_sample_error(event, hint):
-            return None
-
-        attachments = hint.get("attachments")
-
-        trace_context = event_opt.get("contexts", {}).get("trace") or {}
-        dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {})
-
-        headers = {
-            "event_id": event_opt["event_id"],
-            "sent_at": format_timestamp(datetime.now(timezone.utc)),
-        }  # type: dict[str, object]
-
-        if dynamic_sampling_context:
-            headers["trace"] = dynamic_sampling_context
-
-        envelope = Envelope(headers=headers)
-
-        if is_transaction:
-            if isinstance(profile, Profile):
-                envelope.add_profile(profile.to_json(event_opt, self.options))
-            envelope.add_transaction(event_opt)
-        elif is_checkin:
-            envelope.add_checkin(event_opt)
-        else:
-            envelope.add_event(event_opt)
-
-        for attachment in attachments or ():
-            envelope.add_item(attachment.to_envelope_item())
-
-        return_value = None
-        if self.spotlight:
-            self.spotlight.capture_envelope(envelope)
-            return_value = event_id
-
-        if self.transport is not None:
-            self.transport.capture_envelope(envelope)
-            return_value = event_id
-
-        return return_value
-
-    def _capture_experimental_log(self, current_scope, log):
-        # type: (Scope, Log) -> None
-        logs_enabled = self.options["_experiments"].get("enable_logs", False)
-        if not logs_enabled:
-            return
-        isolation_scope = current_scope.get_isolation_scope()
-
-        log["attributes"]["sentry.sdk.name"] = SDK_INFO["name"]
-        log["attributes"]["sentry.sdk.version"] = SDK_INFO["version"]
-
-        server_name = self.options.get("server_name")
-        if server_name is not None and SPANDATA.SERVER_ADDRESS not in log["attributes"]:
-            log["attributes"][SPANDATA.SERVER_ADDRESS] = server_name
-
-        environment = self.options.get("environment")
-        if environment is not None and "sentry.environment" not in log["attributes"]:
-            log["attributes"]["sentry.environment"] = environment
-
-        release = self.options.get("release")
-        if release is not None and "sentry.release" not in log["attributes"]:
-            log["attributes"]["sentry.release"] = release
-
-        span = current_scope.span
-        if span is not None and "sentry.trace.parent_span_id" not in log["attributes"]:
-            log["attributes"]["sentry.trace.parent_span_id"] = span.span_id
-
-        if log.get("trace_id") is None:
-            transaction = current_scope.root_span
-            propagation_context = isolation_scope.get_active_propagation_context()
-            if transaction is not None:
-                log["trace_id"] = transaction.trace_id
-            elif propagation_context is not None:
-                log["trace_id"] = propagation_context.trace_id
-
-        # If debug is enabled, log the log to the console
-        debug = self.options.get("debug", False)
-        if debug:
-            logger.debug(f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}')
-
-        before_send_log = self.options["_experiments"].get("before_send_log")
-        if before_send_log is not None:
-            log = before_send_log(log, {})
-        if log is None:
-            return
-
-        if self.log_batcher:
-            self.log_batcher.add(log)
-
-    def capture_session(
-        self, session  # type: Session
-    ):
-        # type: (...) -> None
-        if not session.release:
-            logger.info("Discarded session update because of missing release")
-        else:
-            self.session_flusher.add_session(session)
-
-    if TYPE_CHECKING:
-
-        @overload
-        def get_integration(self, name_or_class):
-            # type: (str) -> Optional[Integration]
-            ...
-
-        @overload
-        def get_integration(self, name_or_class):
-            # type: (type[I]) -> Optional[I]
-            ...
-
-    def get_integration(
-        self, name_or_class  # type: Union[str, Type[Integration]]
-    ):
-        # type: (...) -> Optional[Integration]
-        """Returns the integration for this client by name or class.
-        If the client does not have that integration then `None` is returned.
-        """
-        if isinstance(name_or_class, str):
-            integration_name = name_or_class
-        elif name_or_class.identifier is not None:
-            integration_name = name_or_class.identifier
-        else:
-            raise ValueError("Integration has no name")
-
-        return self.integrations.get(integration_name)
-
-    def close(
-        self,
-        timeout=None,  # type: Optional[float]
-        callback=None,  # type: Optional[Callable[[int, float], None]]
-    ):
-        # type: (...) -> None
-        """
-        Close the client and shut down the transport. Arguments have the same
-        semantics as :py:meth:`Client.flush`.
-        """
-        if self.transport is not None:
-            self.flush(timeout=timeout, callback=callback)
-
-            self.session_flusher.kill()
-
-            if self.log_batcher is not None:
-                self.log_batcher.kill()
-
-            if self.monitor:
-                self.monitor.kill()
-
-            self.transport.kill()
-            self.transport = None
-
-    def flush(
-        self,
-        timeout=None,  # type: Optional[float]
-        callback=None,  # type: Optional[Callable[[int, float], None]]
-    ):
-        # type: (...) -> None
-        """
-        Wait for the current events to be sent.
-
-        :param timeout: Wait for at most `timeout` seconds. If no `timeout` is provided, the `shutdown_timeout` option value is used.
-
-        :param callback: Is invoked with the number of pending events and the configured timeout.
-        """
-        if self.transport is not None:
-            if timeout is None:
-                timeout = self.options["shutdown_timeout"]
-            self.session_flusher.flush()
-
-            if self.log_batcher is not None:
-                self.log_batcher.flush()
-
-            self.transport.flush(timeout=timeout, callback=callback)
-
-    def __enter__(self):
-        # type: () -> _Client
-        return self
-
-    def __exit__(self, exc_type, exc_value, tb):
-        # type: (Any, Any, Any) -> None
-        self.close()
-
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    # Make mypy, PyCharm and other static analyzers think `get_options` is a
-    # type to have nicer autocompletion for params.
-    #
-    # Use `ClientConstructor` to define the argument types of `init` and
-    # `Dict[str, Any]` to tell static analyzers about the return type.
-
-    class get_options(ClientConstructor, dict[str, Any]):  # noqa: N801
-        pass
-
-    class Client(ClientConstructor, _Client):
-        pass
-
-else:
-    # Alias `get_options` for actual usage. Go through the lambda indirection
-    # to throw PyCharm off of the weakly typed signature (it would otherwise
-    # discover both the weakly typed signature of `_init` and our faked `init`
-    # type).
-
-    get_options = (lambda: _get_options)()
-    Client = (lambda: _Client)()
diff --git a/src/sentry_sdk_alpha/consts.py b/src/sentry_sdk_alpha/consts.py
deleted file mode 100644
index 9d5532519e2f89..00000000000000
--- a/src/sentry_sdk_alpha/consts.py
+++ /dev/null
@@ -1,1048 +0,0 @@
-import itertools
-from enum import Enum
-from typing import TYPE_CHECKING
-
-# up top to prevent circular import due to integration import
-DEFAULT_MAX_VALUE_LENGTH = 1024
-
-DEFAULT_MAX_STACK_FRAMES = 100
-DEFAULT_ADD_FULL_STACK = False
-
-
-# Also needs to be at the top to prevent circular import
-class EndpointType(Enum):
-    """
-    The type of an endpoint. This is an enum, rather than a constant, for historical reasons
-    (the old /store endpoint). The enum also preserve future compatibility, in case we ever
-    have a new endpoint.
-    """
-
-    ENVELOPE = "envelope"
-
-
-class CompressionAlgo(Enum):
-    GZIP = "gzip"
-    BROTLI = "br"
-
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Sequence
-    from typing import Any, Dict, List, Literal, Optional, Tuple, Type, TypedDict, Union
-
-    import sentry_sdk_alpha
-    from sentry_sdk_alpha._types import (
-        BreadcrumbProcessor,
-        ContinuousProfilerMode,
-        Event,
-        EventProcessor,
-        Hint,
-        ProfilerMode,
-        TracesSampler,
-        TransactionProcessor,
-    )
-
-    # Experiments are feature flags to enable and disable certain unstable SDK
-    # functionality. Changing them from the defaults (`None`) in production
-    # code is highly discouraged. They are not subject to any stability
-    # guarantees such as the ones from semantic versioning.
-    class Experiments(TypedDict, total=False):
-        max_spans: int | None
-        max_flags: int | None
-        record_sql_params: bool | None
-        continuous_profiling_auto_start: bool | None
-        continuous_profiling_mode: ContinuousProfilerMode | None
-        otel_powered_performance: bool | None
-        transport_zlib_compression_level: int | None
-        transport_compression_level: int | None
-        transport_compression_algo: CompressionAlgo | None
-        transport_num_pools: int | None
-        transport_http2: bool | None
-        enable_logs: bool | None
-
-
-DEFAULT_QUEUE_SIZE = 100
-DEFAULT_MAX_BREADCRUMBS = 100
-MATCH_ALL = r".*"
-
-FALSE_VALUES = [
-    "false",
-    "no",
-    "off",
-    "n",
-    "0",
-]
-
-
-class SPANDATA:
-    """
-    Additional information describing the type of the span.
-    See: https://develop.sentry.dev/sdk/performance/span-data-conventions/
-    """
-
-    AI_FREQUENCY_PENALTY = "ai.frequency_penalty"
-    """
-    Used to reduce repetitiveness of generated tokens.
-    Example: 0.5
-    """
-
-    AI_PRESENCE_PENALTY = "ai.presence_penalty"
-    """
-    Used to reduce repetitiveness of generated tokens.
-    Example: 0.5
-    """
-
-    AI_INPUT_MESSAGES = "ai.input_messages"
-    """
-    The input messages to an LLM call.
-    Example: [{"role": "user", "message": "hello"}]
-    """
-
-    AI_MODEL_ID = "ai.model_id"
-    """
-    The unique descriptor of the model being execugted
-    Example: gpt-4
-    """
-
-    AI_METADATA = "ai.metadata"
-    """
-    Extra metadata passed to an AI pipeline step.
-    Example: {"executed_function": "add_integers"}
-    """
-
-    AI_TAGS = "ai.tags"
-    """
-    Tags that describe an AI pipeline step.
-    Example: {"executed_function": "add_integers"}
-    """
-
-    AI_STREAMING = "ai.streaming"
-    """
-    Whether or not the AI model call's repsonse was streamed back asynchronously
-    Example: true
-    """
-
-    AI_TEMPERATURE = "ai.temperature"
-    """
-    For an AI model call, the temperature parameter. Temperature essentially means how random the output will be.
-    Example: 0.5
-    """
-
-    AI_TOP_P = "ai.top_p"
-    """
-    For an AI model call, the top_p parameter. Top_p essentially controls how random the output will be.
-    Example: 0.5
-    """
-
-    AI_TOP_K = "ai.top_k"
-    """
-    For an AI model call, the top_k parameter. Top_k essentially controls how random the output will be.
-    Example: 35
-    """
-
-    AI_FUNCTION_CALL = "ai.function_call"
-    """
-    For an AI model call, the function that was called. This is deprecated for OpenAI, and replaced by tool_calls
-    """
-
-    AI_TOOL_CALLS = "ai.tool_calls"
-    """
-    For an AI model call, the function that was called.
-    """
-
-    AI_TOOLS = "ai.tools"
-    """
-    For an AI model call, the functions that are available
-    """
-
-    AI_RESPONSE_FORMAT = "ai.response_format"
-    """
-    For an AI model call, the format of the response
-    """
-
-    AI_LOGIT_BIAS = "ai.logit_bias"
-    """
-    For an AI model call, the logit bias
-    """
-
-    AI_PREAMBLE = "ai.preamble"
-    """
-    For an AI model call, the preamble parameter.
-    Preambles are a part of the prompt used to adjust the model's overall behavior and conversation style.
-    Example: "You are now a clown."
-    """
-
-    AI_RAW_PROMPTING = "ai.raw_prompting"
-    """
-    Minimize pre-processing done to the prompt sent to the LLM.
-    Example: true
-    """
-    AI_RESPONSES = "ai.responses"
-    """
-    The responses to an AI model call. Always as a list.
-    Example: ["hello", "world"]
-    """
-
-    AI_SEED = "ai.seed"
-    """
-    The seed, ideally models given the same seed and same other parameters will produce the exact same output.
-    Example: 123.45
-    """
-
-    AI_CITATIONS = "ai.citations"
-    """
-    References or sources cited by the AI model in its response.
-    Example: ["Smith et al. 2020", "Jones 2019"]
-    """
-
-    AI_DOCUMENTS = "ai.documents"
-    """
-    Documents or content chunks used as context for the AI model.
-    Example: ["doc1.txt", "doc2.pdf"]
-    """
-
-    AI_SEARCH_QUERIES = "ai.search_queries"
-    """
-    Queries used to search for relevant context or documents.
-    Example: ["climate change effects", "renewable energy"]
-    """
-
-    AI_SEARCH_RESULTS = "ai.search_results"
-    """
-    Results returned from search queries for context.
-    Example: ["Result 1", "Result 2"]
-    """
-
-    AI_GENERATION_ID = "ai.generation_id"
-    """
-    Unique identifier for the completion.
-    Example: "gen_123abc"
-    """
-
-    AI_SEARCH_REQUIRED = "ai.is_search_required"
-    """
-    Boolean indicating if the model needs to perform a search.
-    Example: true
-    """
-
-    AI_FINISH_REASON = "ai.finish_reason"
-    """
-    The reason why the model stopped generating.
-    Example: "length"
-    """
-
-    AI_PIPELINE_NAME = "ai.pipeline.name"
-    """
-    Name of the AI pipeline or chain being executed.
-    Example: "qa-pipeline"
-    """
-
-    AI_PROMPT_TOKENS_USED = "ai.prompt_tokens.used"
-    """
-    The number of input prompt tokens used by the model.
-    Example: 10
-    """
-
-    AI_COMPLETION_TOKENS_USED = "ai.completion_tokens.used"
-    """
-    The number of output completion tokens used by the model.
-    Example: 10
-    """
-
-    AI_TOTAL_TOKENS_USED = "ai.total_tokens.used"
-    """
-    The total number of tokens (input + output) used by the request to the model.
-    Example: 20
-    """
-
-    AI_TEXTS = "ai.texts"
-    """
-    Raw text inputs provided to the model.
-    Example: ["What is machine learning?"]
-    """
-
-    AI_WARNINGS = "ai.warnings"
-    """
-    Warning messages generated during model execution.
-    Example: ["Token limit exceeded"]
-    """
-
-    DB_NAME = "db.name"
-    """
-    The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails).
-    Example: myDatabase
-    """
-
-    DB_USER = "db.user"
-    """
-    The name of the database user used for connecting to the database.
-    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
-    Example: my_user
-    """
-
-    DB_OPERATION = "db.operation"
-    """
-    The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword.
-    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
-    Example: findAndModify, HMSET, SELECT
-    """
-
-    DB_SYSTEM = "db.system"
-    """
-    An identifier for the database management system (DBMS) product being used.
-    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
-    Example: postgresql
-    """
-
-    DB_MONGODB_COLLECTION = "db.mongodb.collection"
-    """
-    The MongoDB collection being accessed within the database.
-    See: https://github.com/open-telemetry/semantic-conventions/blob/main/docs/database/mongodb.md#attributes
-    Example: public.users; customers
-    """
-
-    CACHE_HIT = "cache.hit"
-    """
-    A boolean indicating whether the requested data was found in the cache.
-    Example: true
-    """
-
-    CACHE_ITEM_SIZE = "cache.item_size"
-    """
-    The size of the requested data in bytes.
-    Example: 58
-    """
-
-    CACHE_KEY = "cache.key"
-    """
-    The key of the requested data.
-    Example: template.cache.some_item.867da7e2af8e6b2f3aa7213a4080edb3
-    """
-
-    NETWORK_PEER_ADDRESS = "network.peer.address"
-    """
-    Peer address of the network connection - IP address or Unix domain socket name.
-    Example: 10.1.2.80, /tmp/my.sock, localhost
-    """
-
-    NETWORK_PEER_PORT = "network.peer.port"
-    """
-    Peer port number of the network connection.
-    Example: 6379
-    """
-
-    HTTP_QUERY = "http.query"
-    """
-    The Query string present in the URL.
-    Example: ?foo=bar&bar=baz
-    """
-
-    HTTP_FRAGMENT = "http.fragment"
-    """
-    The Fragments present in the URL.
-    Example: #foo=bar
-    """
-
-    HTTP_METHOD = "http.method"
-    """
-    The HTTP method used.
-    Example: GET
-    """
-
-    HTTP_STATUS_CODE = "http.response.status_code"
-    """
-    The HTTP status code as an integer.
-    Example: 418
-    """
-
-    MESSAGING_DESTINATION_NAME = "messaging.destination.name"
-    """
-    The destination name where the message is being consumed from,
-    e.g. the queue name or topic.
-    """
-
-    MESSAGING_MESSAGE_ID = "messaging.message.id"
-    """
-    The message's identifier.
-    """
-
-    MESSAGING_MESSAGE_RETRY_COUNT = "messaging.message.retry.count"
-    """
-    Number of retries/attempts to process a message.
-    """
-
-    MESSAGING_MESSAGE_RECEIVE_LATENCY = "messaging.message.receive.latency"
-    """
-    The latency between when the task was enqueued and when it was started to be processed.
-    """
-
-    MESSAGING_SYSTEM = "messaging.system"
-    """
-    The messaging system's name, e.g. `kafka`, `aws_sqs`
-    """
-
-    SERVER_ADDRESS = "server.address"
-    """
-    Name of the database host.
-    Example: example.com
-    """
-
-    SERVER_PORT = "server.port"
-    """
-    Logical server port number
-    Example: 80; 8080; 443
-    """
-
-    SERVER_SOCKET_ADDRESS = "server.socket.address"
-    """
-    Physical server IP address or Unix socket address.
-    Example: 10.5.3.2
-    """
-
-    SERVER_SOCKET_PORT = "server.socket.port"
-    """
-    Physical server port.
-    Recommended: If different than server.port.
-    Example: 16456
-    """
-
-    CODE_FILEPATH = "code.filepath"
-    """
-    The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path).
-    Example: "/app/myapplication/http/handler/server.py"
-    """
-
-    CODE_LINENO = "code.lineno"
-    """
-    The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`.
-    Example: 42
-    """
-
-    CODE_FUNCTION = "code.function"
-    """
-    The method or function name, or equivalent (usually rightmost part of the code unit's name).
-    Example: "server_request"
-    """
-
-    CODE_NAMESPACE = "code.namespace"
-    """
-    The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit.
-    Example: "http.handler"
-    """
-
-    THREAD_ID = "thread.id"
-    """
-    Identifier of a thread from where the span originated. This should be a string.
-    Example: "7972576320"
-    """
-
-    THREAD_NAME = "thread.name"
-    """
-    Label identifying a thread from where the span originated. This should be a string.
-    Example: "MainThread"
-    """
-
-    PROFILER_ID = "profiler_id"
-    """
-    Label identifying the profiler id that the span occurred in. This should be a string.
-    Example: "5249fbada8d5416482c2f6e47e337372"
-    """
-
-
-class SPANSTATUS:
-    """
-    The status of a Sentry span.
-
-    See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context
-    """
-
-    ABORTED = "aborted"
-    ALREADY_EXISTS = "already_exists"
-    CANCELLED = "cancelled"
-    DATA_LOSS = "data_loss"
-    DEADLINE_EXCEEDED = "deadline_exceeded"
-    FAILED_PRECONDITION = "failed_precondition"
-    INTERNAL_ERROR = "internal_error"
-    INVALID_ARGUMENT = "invalid_argument"
-    NOT_FOUND = "not_found"
-    OK = "ok"
-    OUT_OF_RANGE = "out_of_range"
-    PERMISSION_DENIED = "permission_denied"
-    RESOURCE_EXHAUSTED = "resource_exhausted"
-    UNAUTHENTICATED = "unauthenticated"
-    UNAVAILABLE = "unavailable"
-    UNIMPLEMENTED = "unimplemented"
-    UNKNOWN_ERROR = "unknown_error"
-
-
-class OP:
-    ANTHROPIC_MESSAGES_CREATE = "ai.messages.create.anthropic"
-    CACHE_GET = "cache.get"
-    CACHE_PUT = "cache.put"
-    COHERE_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.cohere"
-    COHERE_EMBEDDINGS_CREATE = "ai.embeddings.create.cohere"
-    DB = "db"
-    DB_REDIS = "db.redis"
-    EVENT_DJANGO = "event.django"
-    FUNCTION = "function"
-    FUNCTION_AWS = "function.aws"
-    FUNCTION_GCP = "function.gcp"
-    GRAPHQL_EXECUTE = "graphql.execute"
-    GRAPHQL_MUTATION = "graphql.mutation"
-    GRAPHQL_PARSE = "graphql.parse"
-    GRAPHQL_RESOLVE = "graphql.resolve"
-    GRAPHQL_SUBSCRIPTION = "graphql.subscription"
-    GRAPHQL_QUERY = "graphql.query"
-    GRAPHQL_VALIDATE = "graphql.validate"
-    GRPC_CLIENT = "grpc.client"
-    GRPC_SERVER = "grpc.server"
-    HTTP_CLIENT = "http.client"
-    HTTP_CLIENT_STREAM = "http.client.stream"
-    HTTP_SERVER = "http.server"
-    MIDDLEWARE_DJANGO = "middleware.django"
-    MIDDLEWARE_LITESTAR = "middleware.litestar"
-    MIDDLEWARE_LITESTAR_RECEIVE = "middleware.litestar.receive"
-    MIDDLEWARE_LITESTAR_SEND = "middleware.litestar.send"
-    MIDDLEWARE_STARLETTE = "middleware.starlette"
-    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
-    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
-    MIDDLEWARE_STARLITE = "middleware.starlite"
-    MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
-    MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
-    OPENAI_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.openai"
-    OPENAI_EMBEDDINGS_CREATE = "ai.embeddings.create.openai"
-    HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.huggingface_hub"
-    LANGCHAIN_PIPELINE = "ai.pipeline.langchain"
-    LANGCHAIN_RUN = "ai.run.langchain"
-    LANGCHAIN_TOOL = "ai.tool.langchain"
-    LANGCHAIN_AGENT = "ai.agent.langchain"
-    LANGCHAIN_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.langchain"
-    QUEUE_PROCESS = "queue.process"
-    QUEUE_PUBLISH = "queue.publish"
-    QUEUE_SUBMIT_ARQ = "queue.submit.arq"
-    QUEUE_TASK_ARQ = "queue.task.arq"
-    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
-    QUEUE_TASK_CELERY = "queue.task.celery"
-    QUEUE_TASK_RQ = "queue.task.rq"
-    QUEUE_SUBMIT_HUEY = "queue.submit.huey"
-    QUEUE_TASK_HUEY = "queue.task.huey"
-    QUEUE_SUBMIT_RAY = "queue.submit.ray"
-    QUEUE_TASK_RAY = "queue.task.ray"
-    SUBPROCESS = "subprocess"
-    SUBPROCESS_WAIT = "subprocess.wait"
-    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
-    TEMPLATE_RENDER = "template.render"
-    VIEW_RENDER = "view.render"
-    VIEW_RESPONSE_RENDER = "view.response.render"
-    WEBSOCKET_SERVER = "websocket.server"
-    SOCKET_CONNECTION = "socket.connection"
-    SOCKET_DNS = "socket.dns"
-
-
-BAGGAGE_HEADER_NAME = "baggage"
-SENTRY_TRACE_HEADER_NAME = "sentry-trace"
-
-DEFAULT_SPAN_ORIGIN = "manual"
-DEFAULT_SPAN_NAME = ""
-
-
-# Transaction source
-# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
-class TransactionSource(str, Enum):
-    COMPONENT = "component"
-    CUSTOM = "custom"
-    ROUTE = "route"
-    TASK = "task"
-    URL = "url"
-    VIEW = "view"
-
-    def __str__(self):
-        # type: () -> str
-        return self.value
-
-
-# These are typically high cardinality and the server hates them
-LOW_QUALITY_TRANSACTION_SOURCES = [
-    TransactionSource.URL,
-]
-
-SOURCE_FOR_STYLE = {
-    "endpoint": TransactionSource.COMPONENT,
-    "function_name": TransactionSource.COMPONENT,
-    "handler_name": TransactionSource.COMPONENT,
-    "method_and_path_pattern": TransactionSource.ROUTE,
-    "path": TransactionSource.URL,
-    "route_name": TransactionSource.COMPONENT,
-    "route_pattern": TransactionSource.ROUTE,
-    "uri_template": TransactionSource.ROUTE,
-    "url": TransactionSource.ROUTE,
-}
-
-
-# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
-# take these arguments (even though they take opaque **kwargs)
-class ClientConstructor:
-
-    def __init__(
-        self,
-        dsn=None,  # type: Optional[str]
-        *,
-        max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS,  # type: int
-        release=None,  # type: Optional[str]
-        environment=None,  # type: Optional[str]
-        server_name=None,  # type: Optional[str]
-        shutdown_timeout=2,  # type: float
-        integrations=[],  # type: Sequence[sentry_sdk.integrations.Integration]  # noqa: B006
-        in_app_include=[],  # type: List[str]  # noqa: B006
-        in_app_exclude=[],  # type: List[str]  # noqa: B006
-        default_integrations=True,  # type: bool
-        dist=None,  # type: Optional[str]
-        transport=None,  # type: Optional[Union[sentry_sdk.transport.Transport, Type[sentry_sdk.transport.Transport], Callable[[Event], None]]]
-        transport_queue_size=DEFAULT_QUEUE_SIZE,  # type: int
-        sample_rate=1.0,  # type: float
-        send_default_pii=None,  # type: Optional[bool]
-        http_proxy=None,  # type: Optional[str]
-        https_proxy=None,  # type: Optional[str]
-        ignore_errors=[],  # type: Sequence[Union[type, str]]  # noqa: B006
-        max_request_body_size="medium",  # type: str
-        socket_options=None,  # type: Optional[List[Tuple[int, int, int | bytes]]]
-        keep_alive=False,  # type: bool
-        before_send=None,  # type: Optional[EventProcessor]
-        before_breadcrumb=None,  # type: Optional[BreadcrumbProcessor]
-        debug=None,  # type: Optional[bool]
-        attach_stacktrace=False,  # type: bool
-        ca_certs=None,  # type: Optional[str]
-        traces_sample_rate=None,  # type: Optional[float]
-        traces_sampler=None,  # type: Optional[TracesSampler]
-        profiles_sample_rate=None,  # type: Optional[float]
-        profiles_sampler=None,  # type: Optional[TracesSampler]
-        profiler_mode=None,  # type: Optional[ProfilerMode]
-        profile_lifecycle="manual",  # type: Literal["manual", "trace"]
-        profile_session_sample_rate=None,  # type: Optional[float]
-        auto_enabling_integrations=True,  # type: bool
-        disabled_integrations=None,  # type: Optional[Sequence[sentry_sdk.integrations.Integration]]
-        auto_session_tracking=True,  # type: bool
-        send_client_reports=True,  # type: bool
-        _experiments={},  # type: Experiments  # noqa: B006
-        proxy_headers=None,  # type: Optional[Dict[str, str]]
-        before_send_transaction=None,  # type: Optional[TransactionProcessor]
-        project_root=None,  # type: Optional[str]
-        include_local_variables=True,  # type: Optional[bool]
-        include_source_context=True,  # type: Optional[bool]
-        trace_propagation_targets=[  # noqa: B006
-            MATCH_ALL
-        ],  # type: Optional[Sequence[str]]
-        functions_to_trace=[],  # type: Sequence[Dict[str, str]]  # noqa: B006
-        event_scrubber=None,  # type: Optional[sentry_sdk.scrubber.EventScrubber]
-        max_value_length=DEFAULT_MAX_VALUE_LENGTH,  # type: int
-        enable_backpressure_handling=True,  # type: bool
-        error_sampler=None,  # type: Optional[Callable[[Event, Hint], Union[float, bool]]]
-        enable_db_query_source=True,  # type: bool
-        db_query_source_threshold_ms=100,  # type: int
-        spotlight=None,  # type: Optional[Union[bool, str]]
-        cert_file=None,  # type: Optional[str]
-        key_file=None,  # type: Optional[str]
-        custom_repr=None,  # type: Optional[Callable[..., Optional[str]]]
-        add_full_stack=DEFAULT_ADD_FULL_STACK,  # type: bool
-        max_stack_frames=DEFAULT_MAX_STACK_FRAMES,  # type: Optional[int]
-    ):
-        # type: (...) -> None
-        """Initialize the Sentry SDK with the given parameters. All parameters described here can be used in a call to `sentry_sdk.init()`.
-
-        :param dsn: The DSN tells the SDK where to send the events.
-
-            If this option is not set, the SDK will just not send any data.
-
-            The `dsn` config option takes precedence over the environment variable.
-
-            Learn more about `DSN utilization `_.
-
-        :param debug: Turns debug mode on or off.
-
-            When `True`, the SDK will attempt to print out debugging information. This can be useful if something goes
-            wrong with event sending.
-
-            The default is always `False`. It's generally not recommended to turn it on in production because of the
-            increase in log output.
-
-            The `debug` config option takes precedence over the environment variable.
-
-        :param release: Sets the release.
-
-            If not set, the SDK will try to automatically configure a release out of the box but it's a better idea to
-            manually set it to guarantee that the release is in sync with your deploy integrations.
-
-            Release names are strings, but some formats are detected by Sentry and might be rendered differently.
-
-            See `the releases documentation `_ to learn how the SDK tries to
-            automatically configure a release.
-
-            The `release` config option takes precedence over the environment variable.
-
-            Learn more about how to send release data so Sentry can tell you about regressions between releases and
-            identify the potential source in `the product documentation `_.
-
-        :param environment: Sets the environment. This string is freeform and set to `production` by default.
-
-            A release can be associated with more than one environment to separate them in the UI (think `staging` vs
-            `production` or similar).
-
-            The `environment` config option takes precedence over the environment variable.
-
-        :param dist: The distribution of the application.
-
-            Distributions are used to disambiguate build or deployment variants of the same release of an application.
-
-            The dist can be for example a build number.
-
-        :param sample_rate: Configures the sample rate for error events, in the range of `0.0` to `1.0`.
-
-            The default is `1.0`, which means that 100% of error events will be sent. If set to `0.1`, only 10% of
-            error events will be sent.
-
-            Events are picked randomly.
-
-        :param error_sampler: Dynamically configures the sample rate for error events on a per-event basis.
-
-            This configuration option accepts a function, which takes two parameters (the `event` and the `hint`), and
-            which returns a boolean (indicating whether the event should be sent to Sentry) or a floating-point number
-            between `0.0` and `1.0`, inclusive.
-
-            The number indicates the probability the event is sent to Sentry; the SDK will randomly decide whether to
-            send the event with the given probability.
-
-            If this configuration option is specified, the `sample_rate` option is ignored.
-
-        :param ignore_errors: A list of exception class names that shouldn't be sent to Sentry.
-
-            Errors that are an instance of these exceptions or a subclass of them, will be filtered out before they're
-            sent to Sentry.
-
-            By default, all errors are sent.
-
-        :param max_breadcrumbs: This variable controls the total amount of breadcrumbs that should be captured.
-
-            This defaults to `100`, but you can set this to any number.
-
-            However, you should be aware that Sentry has a `maximum payload size `_
-            and any events exceeding that payload size will be dropped.
-
-        :param attach_stacktrace: When enabled, stack traces are automatically attached to all messages logged.
-
-            Stack traces are always attached to exceptions; however, when this option is set, stack traces are also
-            sent with messages.
-
-            This option means that stack traces appear next to all log messages.
-
-            Grouping in Sentry is different for events with stack traces and without. As a result, you will get new
-            groups as you enable or disable this flag for certain events.
-
-        :param send_default_pii: If this flag is enabled, `certain personally identifiable information (PII)
-            `_ is added by active integrations.
-
-            If you enable this option, be sure to manually remove what you don't want to send using our features for
-            managing `Sensitive Data `_.
-
-        :param event_scrubber: Scrubs the event payload for sensitive information such as cookies, sessions, and
-            passwords from a `denylist`.
-
-            It can additionally be used to scrub from another `pii_denylist` if `send_default_pii` is disabled.
-
-            See how to `configure the scrubber here `_.
-
-        :param include_source_context: When enabled, source context will be included in events sent to Sentry.
-
-            This source context includes the five lines of code above and below the line of code where an error
-            happened.
-
-        :param include_local_variables: When enabled, the SDK will capture a snapshot of local variables to send with
-            the event to help with debugging.
-
-        :param add_full_stack: When capturing errors, Sentry stack traces typically only include frames that start the
-            moment an error occurs.
-
-            But if the `add_full_stack` option is enabled (set to `True`), all frames from the start of execution will
-            be included in the stack trace sent to Sentry.
-
-        :param max_stack_frames: This option limits the number of stack frames that will be captured when
-            `add_full_stack` is enabled.
-
-        :param server_name: This option can be used to supply a server name.
-
-            When provided, the name of the server is sent along and persisted in the event.
-
-            For many integrations, the server name actually corresponds to the device hostname, even in situations
-            where the machine is not actually a server.
-
-        :param project_root: The full path to the root directory of your application.
-
-            The `project_root` is used to mark frames in a stack trace either as being in your application or outside
-            of the application.
-
-        :param in_app_include: A list of string prefixes of module names that belong to the app.
-
-            This option takes precedence over `in_app_exclude`.
-
-            Sentry differentiates stack frames that are directly related to your application ("in application") from
-            stack frames that come from other packages such as the standard library, frameworks, or other dependencies.
-
-            The application package is automatically marked as `inApp`.
-
-            The difference is visible in [sentry.io](https://sentry.io), where only the "in application" frames are
-            displayed by default.
-
-        :param in_app_exclude: A list of string prefixes of module names that do not belong to the app, but rather to
-            third-party packages.
-
-            Modules considered not part of the app will be hidden from stack traces by default.
-
-            This option can be overridden using `in_app_include`.
-
-        :param max_request_body_size: This parameter controls whether integrations should capture HTTP request bodies.
-            It can be set to one of the following values:
-
-            - `never`: Request bodies are never sent.
-            - `small`: Only small request bodies will be captured. The cutoff for small depends on the SDK (typically
-              4KB).
-            - `medium`: Medium and small requests will be captured (typically 10KB).
-            - `always`: The SDK will always capture the request body as long as Sentry can make sense of it.
-
-            Please note that the Sentry server [limits HTTP request body size](https://develop.sentry.dev/sdk/
-            expected-features/data-handling/#variable-size). The server always enforces its size limit, regardless of
-            how you configure this option.
-
-        :param max_value_length: The number of characters after which the values containing text in the event payload
-            will be truncated.
-
-            WARNING: If the value you set for this is exceptionally large, the event may exceed 1 MiB and will be
-            dropped by Sentry.
-
-        :param ca_certs: A path to an alternative CA bundle file in PEM-format.
-
-        :param send_client_reports: Set this boolean to `False` to disable sending of client reports.
-
-            Client reports allow the client to send status reports about itself to Sentry, such as information about
-            events that were dropped before being sent.
-
-        :param integrations: List of integrations to enable in addition to `auto-enabling integrations (overview)
-            `_.
-
-            This setting can be used to override the default config options for a specific auto-enabling integration
-            or to add an integration that is not auto-enabled.
-
-        :param disabled_integrations: List of integrations that will be disabled.
-
-            This setting can be used to explicitly turn off specific `auto-enabling integrations (list)
-            `_ or
-            `default `_ integrations.
-
-        :param auto_enabling_integrations: Configures whether `auto-enabling integrations (configuration)
-            `_ should be enabled.
-
-            When set to `False`, no auto-enabling integrations will be enabled by default, even if the corresponding
-            framework/library is detected.
-
-        :param default_integrations: Configures whether `default integrations
-            `_ should be enabled.
-
-            Setting `default_integrations` to `False` disables all default integrations **as well as all auto-enabling
-            integrations**, unless they are specifically added in the `integrations` option, described above.
-
-        :param before_send: This function is called with an SDK-specific message or error event object, and can return
-            a modified event object, or `null` to skip reporting the event.
-
-            This can be used, for instance, for manual PII stripping before sending.
-
-            By the time `before_send` is executed, all scope data has already been applied to the event. Further
-            modification of the scope won't have any effect.
-
-        :param before_send_transaction: This function is called with an SDK-specific transaction event object, and can
-            return a modified transaction event object, or `null` to skip reporting the event.
-
-            One way this might be used is for manual PII stripping before sending.
-
-        :param before_breadcrumb: This function is called with an SDK-specific breadcrumb object before the breadcrumb
-            is added to the scope.
-
-            When nothing is returned from the function, the breadcrumb is dropped.
-
-            To pass the breadcrumb through, return the first argument, which contains the breadcrumb object.
-
-            The callback typically gets a second argument (called a "hint") which contains the original object from
-            which the breadcrumb was created to further customize what the breadcrumb should look like.
-
-        :param transport: Switches out the transport used to send events.
-
-            How this works depends on the SDK. It can, for instance, be used to capture events for unit-testing or to
-            send it through some more complex setup that requires proxy authentication.
-
-        :param transport_queue_size: The maximum number of events that will be queued before the transport is forced to
-            flush.
-
-        :param http_proxy: When set, a proxy can be configured that should be used for outbound requests.
-
-            This is also used for HTTPS requests unless a separate `https_proxy` is configured. However, not all SDKs
-            support a separate HTTPS proxy.
-
-            SDKs will attempt to default to the system-wide configured proxy, if possible. For instance, on Unix
-            systems, the `http_proxy` environment variable will be picked up.
-
-        :param https_proxy: Configures a separate proxy for outgoing HTTPS requests.
-
-            This value might not be supported by all SDKs. When not supported the `http-proxy` value is also used for
-            HTTPS requests at all times.
-
-        :param proxy_headers: A dict containing additional proxy headers (usually for authentication) to be forwarded
-            to `urllib3`'s `ProxyManager `_.
-
-        :param shutdown_timeout: Controls how many seconds to wait before shutting down.
-
-            Sentry SDKs send events from a background queue. This queue is given a certain amount to drain pending
-            events. The default is SDK specific but typically around two seconds.
-
-            Setting this value too low may cause problems for sending events from command line applications.
-
-            Setting the value too high will cause the application to block for a long time for users experiencing
-            network connectivity problems.
-
-        :param keep_alive: Determines whether to keep the connection alive between requests.
-
-            This can be useful in environments where you encounter frequent network issues such as connection resets.
-
-        :param cert_file: Path to the client certificate to use.
-
-            If set, supersedes the `CLIENT_CERT_FILE` environment variable.
-
-        :param key_file: Path to the key file to use.
-
-            If set, supersedes the `CLIENT_KEY_FILE` environment variable.
-
-        :param socket_options: An optional list of socket options to use.
-
-            These provide fine-grained, low-level control over the way the SDK connects to Sentry.
-
-            If provided, the options will override the default `urllib3` `socket options
-            `_.
-
-        :param traces_sample_rate: A number between `0` and `1`, controlling the percentage chance a given transaction
-            will be sent to Sentry.
-
-            (`0` represents 0% while `1` represents 100%.) Applies equally to all transactions created in the app.
-
-            Either this or `traces_sampler` must be defined to enable tracing.
-
-            If `traces_sample_rate` is `0`, this means that no new traces will be created. However, if you have
-            another service (for example a JS frontend) that makes requests to your service that include trace
-            information, those traces will be continued and thus transactions will be sent to Sentry.
-
-            If you want to disable all tracing you need to set `traces_sample_rate=None`. In this case, no new traces
-            will be started and no incoming traces will be continued.
-
-        :param traces_sampler: A function responsible for determining the percentage chance a given transaction will be
-            sent to Sentry.
-
-            It will automatically be passed information about the transaction and the context in which it's being
-            created, and must return a number between `0` (0% chance of being sent) and `1` (100% chance of being
-            sent).
-
-            Can also be used for filtering transactions, by returning `0` for those that are unwanted.
-
-            Either this or `traces_sample_rate` must be defined to enable tracing.
-
-        :param trace_propagation_targets: An optional property that controls which downstream services receive tracing
-            data, in the form of a `sentry-trace` and a `baggage` header attached to any outgoing HTTP requests.
-
-            The option may contain a list of strings or regex against which the URLs of outgoing requests are matched.
-
-            If one of the entries in the list matches the URL of an outgoing request, trace data will be attached to
-            that request.
-
-            String entries do not have to be full matches, meaning the URL of a request is matched when it _contains_
-            a string provided through the option.
-
-            If `trace_propagation_targets` is not provided, trace data is attached to every outgoing request from the
-            instrumented client.
-
-        :param functions_to_trace: An optional list of functions that should be set up for tracing.
-
-            For each function in the list, a span will be created when the function is executed.
-
-            Functions in the list are represented as strings containing the fully qualified name of the function.
-
-            This is a convenient option, making it possible to have one central place for configuring what functions
-            to trace, instead of having custom instrumentation scattered all over your code base.
-
-            To learn more, see the `Custom Instrumentation `_ documentation.
-
-        :param enable_backpressure_handling: When enabled, a new monitor thread will be spawned to perform health
-            checks on the SDK.
-
-            If the system is unhealthy, the SDK will keep halving the `traces_sample_rate` set by you in 10 second
-            intervals until recovery.
-
-            This down sampling helps ensure that the system stays stable and reduces SDK overhead under high load.
-
-            This option is enabled by default.
-
-        :param enable_db_query_source: When enabled, the source location will be added to database queries.
-
-        :param db_query_source_threshold_ms: The threshold in milliseconds for adding the source location to database
-            queries.
-
-            The query location will be added to the query for queries slower than the specified threshold.
-
-        :param custom_repr: A custom `repr `_ function to run
-            while serializing an object.
-
-            Use this to control how your custom objects and classes are visible in Sentry.
-
-            Return a string for that repr value to be used or `None` to continue serializing how Sentry would have
-            done it anyway.
-
-        :param profiles_sample_rate: A number between `0` and `1`, controlling the percentage chance a given sampled
-            transaction will be profiled.
-
-            (`0` represents 0% while `1` represents 100%.) Applies equally to all transactions created in the app.
-
-            This is relative to the tracing sample rate - e.g. `0.5` means 50% of sampled transactions will be
-            profiled.
-
-        :param profiles_sampler:
-
-        :param profiler_mode:
-
-        :param profile_lifecycle:
-
-        :param profile_session_sample_rate:
-
-        :param auto_session_tracking:
-
-        :param spotlight:
-
-        :param instrumenter:
-
-        :param _experiments:
-        """
-        pass
-
-
-def _get_default_options():
-    # type: () -> dict[str, Any]
-    import inspect
-
-    a = inspect.getfullargspec(ClientConstructor.__init__)
-    defaults = a.defaults or ()
-    kwonlydefaults = a.kwonlydefaults or {}
-
-    return dict(
-        itertools.chain(
-            zip(a.args[-len(defaults) :], defaults),
-            kwonlydefaults.items(),
-        )
-    )
-
-
-DEFAULT_OPTIONS = _get_default_options()
-del _get_default_options
-
-
-VERSION = "3.3.3a1"
diff --git a/src/sentry_sdk_alpha/crons/__init__.py b/src/sentry_sdk_alpha/crons/__init__.py
deleted file mode 100644
index 067105ca922273..00000000000000
--- a/src/sentry_sdk_alpha/crons/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from sentry_sdk_alpha.crons.api import capture_checkin
-from sentry_sdk_alpha.crons.consts import MonitorStatus
-from sentry_sdk_alpha.crons.decorator import monitor
-
-__all__ = [
-    "capture_checkin",
-    "MonitorStatus",
-    "monitor",
-]
diff --git a/src/sentry_sdk_alpha/crons/api.py b/src/sentry_sdk_alpha/crons/api.py
deleted file mode 100644
index 0ac8d81baf2b14..00000000000000
--- a/src/sentry_sdk_alpha/crons/api.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import uuid
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-    from sentry_sdk_alpha._types import Event, MonitorConfig
-
-
-def _create_check_in_event(
-    monitor_slug=None,  # type: Optional[str]
-    check_in_id=None,  # type: Optional[str]
-    status=None,  # type: Optional[str]
-    duration_s=None,  # type: Optional[float]
-    monitor_config=None,  # type: Optional[MonitorConfig]
-):
-    # type: (...) -> Event
-    options = sentry_sdk_alpha.get_client().options
-    check_in_id = check_in_id or uuid.uuid4().hex  # type: str
-
-    check_in = {
-        "type": "check_in",
-        "monitor_slug": monitor_slug,
-        "check_in_id": check_in_id,
-        "status": status,
-        "duration": duration_s,
-        "environment": options.get("environment", None),
-        "release": options.get("release", None),
-    }  # type: Event
-
-    if monitor_config:
-        check_in["monitor_config"] = monitor_config
-
-    return check_in
-
-
-def capture_checkin(
-    monitor_slug=None,  # type: Optional[str]
-    check_in_id=None,  # type: Optional[str]
-    status=None,  # type: Optional[str]
-    duration=None,  # type: Optional[float]
-    monitor_config=None,  # type: Optional[MonitorConfig]
-):
-    # type: (...) -> str
-    check_in_event = _create_check_in_event(
-        monitor_slug=monitor_slug,
-        check_in_id=check_in_id,
-        status=status,
-        duration_s=duration,
-        monitor_config=monitor_config,
-    )
-
-    sentry_sdk_alpha.capture_event(check_in_event)
-
-    return check_in_event["check_in_id"]
diff --git a/src/sentry_sdk_alpha/crons/consts.py b/src/sentry_sdk_alpha/crons/consts.py
deleted file mode 100644
index be686b4539439d..00000000000000
--- a/src/sentry_sdk_alpha/crons/consts.py
+++ /dev/null
@@ -1,4 +0,0 @@
-class MonitorStatus:
-    IN_PROGRESS = "in_progress"
-    OK = "ok"
-    ERROR = "error"
diff --git a/src/sentry_sdk_alpha/crons/decorator.py b/src/sentry_sdk_alpha/crons/decorator.py
deleted file mode 100644
index 986fad2f23a1b9..00000000000000
--- a/src/sentry_sdk_alpha/crons/decorator.py
+++ /dev/null
@@ -1,126 +0,0 @@
-from functools import wraps
-from inspect import iscoroutinefunction
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.crons import capture_checkin
-from sentry_sdk_alpha.crons.consts import MonitorStatus
-from sentry_sdk_alpha.utils import now
-
-if TYPE_CHECKING:
-    from collections.abc import Awaitable, Callable
-    from types import TracebackType
-    from typing import Any, Optional, ParamSpec, Type, TypeVar, Union, cast, overload
-
-    from sentry_sdk_alpha._types import MonitorConfig
-
-    P = ParamSpec("P")
-    R = TypeVar("R")
-
-
-class monitor:  # noqa: N801
-    """
-    Decorator/context manager to capture checkin events for a monitor.
-
-    Usage (as decorator):
-    ```
-    import sentry_sdk
-
-    app = Celery()
-
-    @app.task
-    @sentry_sdk.monitor(monitor_slug='my-fancy-slug')
-    def test(arg):
-        print(arg)
-    ```
-
-    This does not have to be used with Celery, but if you do use it with celery,
-    put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator.
-
-    Usage (as context manager):
-    ```
-    import sentry_sdk
-
-    def test(arg):
-        with sentry_sdk.monitor(monitor_slug='my-fancy-slug'):
-            print(arg)
-    ```
-    """
-
-    def __init__(self, monitor_slug=None, monitor_config=None):
-        # type: (Optional[str], Optional[MonitorConfig]) -> None
-        self.monitor_slug = monitor_slug
-        self.monitor_config = monitor_config
-
-    def __enter__(self):
-        # type: () -> None
-        self.start_timestamp = now()
-        self.check_in_id = capture_checkin(
-            monitor_slug=self.monitor_slug,
-            status=MonitorStatus.IN_PROGRESS,
-            monitor_config=self.monitor_config,
-        )
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None
-        duration_s = now() - self.start_timestamp
-
-        if exc_type is None and exc_value is None and traceback is None:
-            status = MonitorStatus.OK
-        else:
-            status = MonitorStatus.ERROR
-
-        capture_checkin(
-            monitor_slug=self.monitor_slug,
-            check_in_id=self.check_in_id,
-            status=status,
-            duration=duration_s,
-            monitor_config=self.monitor_config,
-        )
-
-    if TYPE_CHECKING:
-
-        @overload
-        def __call__(self, fn):
-            # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
-            # Unfortunately, mypy does not give us any reliable way to type check the
-            # return value of an Awaitable (i.e. async function) for this overload,
-            # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]].
-            ...
-
-        @overload
-        def __call__(self, fn):
-            # type: (Callable[P, R]) -> Callable[P, R]
-            ...
-
-    def __call__(
-        self,
-        fn,  # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]]
-    ):
-        # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]]
-        if iscoroutinefunction(fn):
-            return self._async_wrapper(fn)
-
-        else:
-            if TYPE_CHECKING:
-                fn = cast("Callable[P, R]", fn)
-            return self._sync_wrapper(fn)
-
-    def _async_wrapper(self, fn):
-        # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
-        @wraps(fn)
-        async def inner(*args: "P.args", **kwargs: "P.kwargs"):
-            # type: (...) -> R
-            with self:
-                return await fn(*args, **kwargs)
-
-        return inner
-
-    def _sync_wrapper(self, fn):
-        # type: (Callable[P, R]) -> Callable[P, R]
-        @wraps(fn)
-        def inner(*args: "P.args", **kwargs: "P.kwargs"):
-            # type: (...) -> R
-            with self:
-                return fn(*args, **kwargs)
-
-        return inner
diff --git a/src/sentry_sdk_alpha/debug.py b/src/sentry_sdk_alpha/debug.py
deleted file mode 100644
index 8f24cb91e2667f..00000000000000
--- a/src/sentry_sdk_alpha/debug.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import logging
-import sys
-from logging import LogRecord
-
-from sentry_sdk_alpha import get_client
-from sentry_sdk_alpha.client import _client_init_debug
-from sentry_sdk_alpha.utils import logger
-
-
-class _DebugFilter(logging.Filter):
-    def filter(self, record):
-        # type: (LogRecord) -> bool
-        if _client_init_debug.get(False):
-            return True
-
-        return get_client().options["debug"]
-
-
-def init_debug_support():
-    # type: () -> None
-    if not logger.handlers:
-        configure_logger()
-
-
-def configure_logger():
-    # type: () -> None
-    _handler = logging.StreamHandler(sys.stderr)
-    _handler.setFormatter(logging.Formatter(" [sentry] %(levelname)s: %(message)s"))
-    logger.addHandler(_handler)
-    logger.setLevel(logging.DEBUG)
-    logger.addFilter(_DebugFilter())
diff --git a/src/sentry_sdk_alpha/envelope.py b/src/sentry_sdk_alpha/envelope.py
deleted file mode 100644
index 628110d766bb80..00000000000000
--- a/src/sentry_sdk_alpha/envelope.py
+++ /dev/null
@@ -1,348 +0,0 @@
-import io
-import json
-import mimetypes
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.session import Session
-from sentry_sdk_alpha.utils import capture_internal_exceptions, json_dumps
-
-if TYPE_CHECKING:
-    from collections.abc import Iterator
-    from typing import Any, Dict, List, Optional, Union
-
-    from sentry_sdk_alpha._types import Event, EventDataCategory
-
-
-def parse_json(data):
-    # type: (Union[bytes, str]) -> Any
-    # on some python 3 versions this needs to be bytes
-    if isinstance(data, bytes):
-        data = data.decode("utf-8", "replace")
-    return json.loads(data)
-
-
-class Envelope:
-    """
-    Represents a Sentry Envelope. The calling code is responsible for adhering to the constraints
-    documented in the Sentry docs: https://develop.sentry.dev/sdk/envelopes/#data-model. In particular,
-    each envelope may have at most one Item with type "event" or "transaction" (but not both).
-    """
-
-    def __init__(
-        self,
-        headers=None,  # type: Optional[Dict[str, Any]]
-        items=None,  # type: Optional[List[Item]]
-    ):
-        # type: (...) -> None
-        if headers is not None:
-            headers = dict(headers)
-        self.headers = headers or {}
-        if items is None:
-            items = []
-        else:
-            items = list(items)
-        self.items = items
-
-    @property
-    def description(self):
-        # type: (...) -> str
-        return "envelope with {} items ({})".format(
-            len(self.items),
-            ", ".join(x.data_category for x in self.items),
-        )
-
-    def add_event(
-        self, event  # type: Event
-    ):
-        # type: (...) -> None
-        self.add_item(Item(payload=PayloadRef(json=event), type="event"))
-
-    def add_transaction(
-        self, transaction  # type: Event
-    ):
-        # type: (...) -> None
-        self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction"))
-
-    def add_profile(
-        self, profile  # type: Any
-    ):
-        # type: (...) -> None
-        self.add_item(Item(payload=PayloadRef(json=profile), type="profile"))
-
-    def add_profile_chunk(
-        self, profile_chunk  # type: Any
-    ):
-        # type: (...) -> None
-        self.add_item(
-            Item(
-                payload=PayloadRef(json=profile_chunk),
-                type="profile_chunk",
-                headers={"platform": profile_chunk.get("platform", "python")},
-            )
-        )
-
-    def add_checkin(
-        self, checkin  # type: Any
-    ):
-        # type: (...) -> None
-        self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in"))
-
-    def add_session(
-        self, session  # type: Union[Session, Any]
-    ):
-        # type: (...) -> None
-        if isinstance(session, Session):
-            session = session.to_json()
-        self.add_item(Item(payload=PayloadRef(json=session), type="session"))
-
-    def add_sessions(
-        self, sessions  # type: Any
-    ):
-        # type: (...) -> None
-        self.add_item(Item(payload=PayloadRef(json=sessions), type="sessions"))
-
-    def add_item(
-        self, item  # type: Item
-    ):
-        # type: (...) -> None
-        self.items.append(item)
-
-    def get_event(self):
-        # type: (...) -> Optional[Event]
-        for items in self.items:
-            event = items.get_event()
-            if event is not None:
-                return event
-        return None
-
-    def get_transaction_event(self):
-        # type: (...) -> Optional[Event]
-        for item in self.items:
-            event = item.get_transaction_event()
-            if event is not None:
-                return event
-        return None
-
-    def __iter__(self):
-        # type: (...) -> Iterator[Item]
-        return iter(self.items)
-
-    def serialize_into(
-        self, f  # type: Any
-    ):
-        # type: (...) -> None
-        f.write(json_dumps(self.headers))
-        f.write(b"\n")
-        for item in self.items:
-            item.serialize_into(f)
-
-    def serialize(self):
-        # type: (...) -> bytes
-        out = io.BytesIO()
-        self.serialize_into(out)
-        return out.getvalue()
-
-    @classmethod
-    def deserialize_from(
-        cls, f  # type: Any
-    ):
-        # type: (...) -> Envelope
-        headers = parse_json(f.readline())
-        items = []
-        while 1:
-            item = Item.deserialize_from(f)
-            if item is None:
-                break
-            items.append(item)
-        return cls(headers=headers, items=items)
-
-    @classmethod
-    def deserialize(
-        cls, bytes  # type: bytes
-    ):
-        # type: (...) -> Envelope
-        return cls.deserialize_from(io.BytesIO(bytes))
-
-    def __repr__(self):
-        # type: (...) -> str
-        return f""
-
-
-class PayloadRef:
-    def __init__(
-        self,
-        bytes=None,  # type: Optional[bytes]
-        path=None,  # type: Optional[Union[bytes, str]]
-        json=None,  # type: Optional[Any]
-    ):
-        # type: (...) -> None
-        self.json = json
-        self.bytes = bytes
-        self.path = path
-
-    def get_bytes(self):
-        # type: (...) -> bytes
-        if self.bytes is None:
-            if self.path is not None:
-                with capture_internal_exceptions():
-                    with open(self.path, "rb") as f:
-                        self.bytes = f.read()
-            elif self.json is not None:
-                self.bytes = json_dumps(self.json)
-        return self.bytes or b""
-
-    @property
-    def inferred_content_type(self):
-        # type: (...) -> str
-        if self.json is not None:
-            return "application/json"
-        elif self.path is not None:
-            path = self.path
-            if isinstance(path, bytes):
-                path = path.decode("utf-8", "replace")
-            ty = mimetypes.guess_type(path)[0]
-            if ty:
-                return ty
-        return "application/octet-stream"
-
-    def __repr__(self):
-        # type: (...) -> str
-        return f""
-
-
-class Item:
-    def __init__(
-        self,
-        payload,  # type: Union[bytes, str, PayloadRef]
-        headers=None,  # type: Optional[Dict[str, Any]]
-        type=None,  # type: Optional[str]
-        content_type=None,  # type: Optional[str]
-        filename=None,  # type: Optional[str]
-    ):
-        if headers is not None:
-            headers = dict(headers)
-        elif headers is None:
-            headers = {}
-        self.headers = headers
-        if isinstance(payload, bytes):
-            payload = PayloadRef(bytes=payload)
-        elif isinstance(payload, str):
-            payload = PayloadRef(bytes=payload.encode("utf-8"))
-        else:
-            payload = payload
-
-        if filename is not None:
-            headers["filename"] = filename
-        if type is not None:
-            headers["type"] = type
-        if content_type is not None:
-            headers["content_type"] = content_type
-        elif "content_type" not in headers:
-            headers["content_type"] = payload.inferred_content_type
-
-        self.payload = payload
-
-    def __repr__(self):
-        # type: (...) -> str
-        return "".format(
-            self.headers,
-            self.payload,
-            self.data_category,
-        )
-
-    @property
-    def type(self):
-        # type: (...) -> Optional[str]
-        return self.headers.get("type")
-
-    @property
-    def data_category(self):
-        # type: (...) -> EventDataCategory
-        ty = self.headers.get("type")
-        if ty == "session" or ty == "sessions":
-            return "session"
-        elif ty == "attachment":
-            return "attachment"
-        elif ty == "transaction":
-            return "transaction"
-        elif ty == "event":
-            return "error"
-        elif ty == "log":
-            return "log"
-        elif ty == "client_report":
-            return "internal"
-        elif ty == "profile":
-            return "profile"
-        elif ty == "profile_chunk":
-            return "profile_chunk"
-        elif ty == "check_in":
-            return "monitor"
-        else:
-            return "default"
-
-    def get_bytes(self):
-        # type: (...) -> bytes
-        return self.payload.get_bytes()
-
-    def get_event(self):
-        # type: (...) -> Optional[Event]
-        """
-        Returns an error event if there is one.
-        """
-        if self.type == "event" and self.payload.json is not None:
-            return self.payload.json
-        return None
-
-    def get_transaction_event(self):
-        # type: (...) -> Optional[Event]
-        if self.type == "transaction" and self.payload.json is not None:
-            return self.payload.json
-        return None
-
-    def serialize_into(
-        self, f  # type: Any
-    ):
-        # type: (...) -> None
-        headers = dict(self.headers)
-        bytes = self.get_bytes()
-        headers["length"] = len(bytes)
-        f.write(json_dumps(headers))
-        f.write(b"\n")
-        f.write(bytes)
-        f.write(b"\n")
-
-    def serialize(self):
-        # type: (...) -> bytes
-        out = io.BytesIO()
-        self.serialize_into(out)
-        return out.getvalue()
-
-    @classmethod
-    def deserialize_from(
-        cls, f  # type: Any
-    ):
-        # type: (...) -> Optional[Item]
-        line = f.readline().rstrip()
-        if not line:
-            return None
-        headers = parse_json(line)
-        length = headers.get("length")
-        if length is not None:
-            payload = f.read(length)
-            f.readline()
-        else:
-            # if no length was specified we need to read up to the end of line
-            # and remove it (if it is present, i.e. not the very last char in an eof terminated envelope)
-            payload = f.readline().rstrip(b"\n")
-        if headers.get("type") in ("event", "transaction"):
-            rv = cls(headers=headers, payload=PayloadRef(json=parse_json(payload)))
-        else:
-            rv = cls(headers=headers, payload=payload)
-        return rv
-
-    @classmethod
-    def deserialize(
-        cls, bytes  # type: bytes
-    ):
-        # type: (...) -> Optional[Item]
-        return cls.deserialize_from(io.BytesIO(bytes))
diff --git a/src/sentry_sdk_alpha/feature_flags.py b/src/sentry_sdk_alpha/feature_flags.py
deleted file mode 100644
index a54d1fd841391f..00000000000000
--- a/src/sentry_sdk_alpha/feature_flags.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import copy
-from threading import Lock
-from typing import TYPE_CHECKING, Any
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._lru_cache import LRUCache
-
-if TYPE_CHECKING:
-    from typing import TypedDict
-
-    class FlagData(TypedDict):
-        flag: str
-        result: bool
-
-
-DEFAULT_FLAG_CAPACITY = 100
-
-
-class FlagBuffer:
-
-    def __init__(self, capacity):
-        # type: (int) -> None
-        self.capacity = capacity
-        self.lock = Lock()
-
-        # Buffer is private. The name is mangled to discourage use. If you use this attribute
-        # directly you're on your own!
-        self.__buffer = LRUCache(capacity)
-
-    def clear(self):
-        # type: () -> None
-        self.__buffer = LRUCache(self.capacity)
-
-    def __deepcopy__(self, memo):
-        # type: (dict[int, Any]) -> FlagBuffer
-        with self.lock:
-            buffer = FlagBuffer(self.capacity)
-            buffer.__buffer = copy.deepcopy(self.__buffer, memo)
-            return buffer
-
-    def get(self):
-        # type: () -> list[FlagData]
-        with self.lock:
-            return [{"flag": key, "result": value} for key, value in self.__buffer.get_all()]
-
-    def set(self, flag, result):
-        # type: (str, bool) -> None
-        if isinstance(result, FlagBuffer):
-            # If someone were to insert `self` into `self` this would create a circular dependency
-            # on the lock. This is of course a deadlock. However, this is far outside the expected
-            # usage of this class. We guard against it here for completeness and to document this
-            # expected failure mode.
-            raise ValueError("FlagBuffer instances can not be inserted into the dictionary.")
-
-        with self.lock:
-            self.__buffer.set(flag, result)
-
-
-def add_feature_flag(flag, result):
-    # type: (str, bool) -> None
-    """
-    Records a flag and its value to be sent on subsequent error events.
-    We recommend you do this on flag evaluations. Flags are buffered per Sentry scope.
-    """
-    flags = sentry_sdk_alpha.get_isolation_scope().flags
-    flags.set(flag, result)
-
-    span = sentry_sdk_alpha.get_current_span()
-    if span:
-        span.set_flag(flag, result)
diff --git a/src/sentry_sdk_alpha/integrations/__init__.py b/src/sentry_sdk_alpha/integrations/__init__.py
deleted file mode 100644
index 6e06c85b994271..00000000000000
--- a/src/sentry_sdk_alpha/integrations/__init__.py
+++ /dev/null
@@ -1,278 +0,0 @@
-from abc import ABC, abstractmethod
-from threading import Lock
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterator, Sequence
-    from typing import Dict, List, Optional, Set, Type, Union
-
-
-_DEFAULT_FAILED_REQUEST_STATUS_CODES = frozenset(range(500, 600))
-
-
-_installer_lock = Lock()
-
-# Set of all integration identifiers we have attempted to install
-_processed_integrations = set()  # type: Set[str]
-
-# Set of all integration identifiers we have actually installed
-_installed_integrations = set()  # type: Set[str]
-
-
-def _generate_default_integrations_iterator(
-    integrations,  # type: List[str]
-    auto_enabling_integrations,  # type: List[str]
-):
-    # type: (...) -> Callable[[bool], Iterator[Type[Integration]]]
-
-    def iter_default_integrations(with_auto_enabling_integrations):
-        # type: (bool) -> Iterator[Type[Integration]]
-        """Returns an iterator of the default integration classes:"""
-        from importlib import import_module
-
-        if with_auto_enabling_integrations:
-            all_import_strings = integrations + auto_enabling_integrations
-        else:
-            all_import_strings = integrations
-
-        for import_string in all_import_strings:
-            try:
-                module, cls = import_string.rsplit(".", 1)
-                yield getattr(import_module(module), cls)
-            except (DidNotEnable, SyntaxError) as e:
-                logger.debug("Did not import default integration %s: %s", import_string, e)
-
-    if isinstance(iter_default_integrations.__doc__, str):
-        for import_string in integrations:
-            iter_default_integrations.__doc__ += f"\n- `{import_string}`"
-
-    return iter_default_integrations
-
-
-_DEFAULT_INTEGRATIONS = [
-    # stdlib/base runtime integrations
-    "sentry_sdk.integrations.argv.ArgvIntegration",
-    "sentry_sdk.integrations.atexit.AtexitIntegration",
-    "sentry_sdk.integrations.dedupe.DedupeIntegration",
-    "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
-    "sentry_sdk.integrations.logging.LoggingIntegration",
-    "sentry_sdk.integrations.modules.ModulesIntegration",
-    "sentry_sdk.integrations.stdlib.StdlibIntegration",
-    "sentry_sdk.integrations.threading.ThreadingIntegration",
-]
-
-_AUTO_ENABLING_INTEGRATIONS = [
-    "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
-    "sentry_sdk.integrations.anthropic.AnthropicIntegration",
-    "sentry_sdk.integrations.ariadne.AriadneIntegration",
-    "sentry_sdk.integrations.arq.ArqIntegration",
-    "sentry_sdk.integrations.asyncpg.AsyncPGIntegration",
-    "sentry_sdk.integrations.boto3.Boto3Integration",
-    "sentry_sdk.integrations.bottle.BottleIntegration",
-    "sentry_sdk.integrations.celery.CeleryIntegration",
-    "sentry_sdk.integrations.chalice.ChaliceIntegration",
-    "sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration",
-    "sentry_sdk.integrations.cohere.CohereIntegration",
-    "sentry_sdk.integrations.django.DjangoIntegration",
-    "sentry_sdk.integrations.falcon.FalconIntegration",
-    "sentry_sdk.integrations.fastapi.FastApiIntegration",
-    "sentry_sdk.integrations.flask.FlaskIntegration",
-    "sentry_sdk.integrations.gql.GQLIntegration",
-    "sentry_sdk.integrations.graphene.GrapheneIntegration",
-    "sentry_sdk.integrations.httpx.HttpxIntegration",
-    "sentry_sdk.integrations.huey.HueyIntegration",
-    "sentry_sdk.integrations.huggingface_hub.HuggingfaceHubIntegration",
-    "sentry_sdk.integrations.langchain.LangchainIntegration",
-    "sentry_sdk.integrations.litestar.LitestarIntegration",
-    "sentry_sdk.integrations.loguru.LoguruIntegration",
-    "sentry_sdk.integrations.openai.OpenAIIntegration",
-    "sentry_sdk.integrations.pymongo.PyMongoIntegration",
-    "sentry_sdk.integrations.pyramid.PyramidIntegration",
-    "sentry_sdk.integrations.quart.QuartIntegration",
-    "sentry_sdk.integrations.redis.RedisIntegration",
-    "sentry_sdk.integrations.rq.RqIntegration",
-    "sentry_sdk.integrations.sanic.SanicIntegration",
-    "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
-    "sentry_sdk.integrations.starlette.StarletteIntegration",
-    "sentry_sdk.integrations.starlite.StarliteIntegration",
-    "sentry_sdk.integrations.strawberry.StrawberryIntegration",
-    "sentry_sdk.integrations.tornado.TornadoIntegration",
-]
-
-iter_default_integrations = _generate_default_integrations_iterator(
-    integrations=_DEFAULT_INTEGRATIONS,
-    auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
-)
-
-del _generate_default_integrations_iterator
-
-
-_MIN_VERSIONS = {
-    "aiohttp": (3, 4),
-    "anthropic": (0, 16),
-    "ariadne": (0, 20),
-    "arq": (0, 23),
-    "asyncpg": (0, 23),
-    "beam": (2, 12),
-    "boto3": (1, 12),  # botocore
-    "bottle": (0, 12),
-    "celery": (4, 4, 7),
-    "chalice": (1, 16, 0),
-    "clickhouse_driver": (0, 2, 0),
-    "common": (1, 4, 0),  # opentelemetry-sdk
-    "cohere": (5, 4, 0),
-    "django": (2, 0),
-    "dramatiq": (1, 9),
-    "falcon": (3, 0),
-    "fastapi": (0, 79, 0),
-    "flask": (1, 1, 4),
-    "gql": (3, 4, 1),
-    "graphene": (3, 3),
-    "grpc": (1, 32, 0),  # grpcio
-    "huggingface_hub": (0, 22),
-    "langchain": (0, 0, 210),
-    "launchdarkly": (9, 8, 0),
-    "loguru": (0, 7, 0),
-    "openai": (1, 0, 0),
-    "openfeature": (0, 7, 1),
-    "quart": (0, 16, 0),
-    "ray": (2, 7, 0),
-    "requests": (2, 0, 0),
-    "rq": (0, 6),
-    "sanic": (0, 8),
-    "sqlalchemy": (1, 2),
-    "starlette": (0, 16),
-    "starlite": (1, 48),
-    "statsig": (0, 55, 3),
-    "strawberry": (0, 209, 5),
-    "tornado": (6, 0),
-    "trytond": (5, 0),
-    "typer": (0, 15),
-    "unleash": (6, 0, 1),
-}
-
-
-def setup_integrations(
-    integrations,
-    with_defaults=True,
-    with_auto_enabling_integrations=False,
-    disabled_integrations=None,
-):
-    # type: (Sequence[Integration], bool, bool, Optional[Sequence[Union[type[Integration], Integration]]]) -> Dict[str, Integration]
-    """
-    Given a list of integration instances, this installs them all.
-
-    When `with_defaults` is set to `True` all default integrations are added
-    unless they were already provided before.
-
-    `disabled_integrations` takes precedence over `with_defaults` and
-    `with_auto_enabling_integrations`.
-    """
-    integrations = {integration.identifier: integration for integration in integrations or ()}
-
-    logger.debug("Setting up integrations (with default = %s)", with_defaults)
-
-    # Integrations that will not be enabled
-    disabled_integrations = [
-        integration if isinstance(integration, type) else type(integration)
-        for integration in disabled_integrations or []
-    ]
-
-    # Integrations that are not explicitly set up by the user.
-    used_as_default_integration = set()
-
-    if with_defaults:
-        for integration_cls in iter_default_integrations(with_auto_enabling_integrations):
-            if integration_cls.identifier not in integrations:
-                instance = integration_cls()
-                integrations[instance.identifier] = instance
-                used_as_default_integration.add(instance.identifier)
-
-    for identifier, integration in integrations.items():
-        with _installer_lock:
-            if identifier not in _processed_integrations:
-                if type(integration) in disabled_integrations:
-                    logger.debug("Ignoring integration %s", identifier)
-                else:
-                    logger.debug("Setting up previously not enabled integration %s", identifier)
-                    try:
-                        type(integration).setup_once()
-                    except DidNotEnable as e:
-                        if identifier not in used_as_default_integration:
-                            raise
-
-                        logger.debug("Did not enable default integration %s: %s", identifier, e)
-                    else:
-                        _installed_integrations.add(identifier)
-
-                _processed_integrations.add(identifier)
-
-    integrations = {
-        identifier: integration
-        for identifier, integration in integrations.items()
-        if identifier in _installed_integrations
-    }
-
-    for identifier in integrations:
-        logger.debug("Enabling integration %s", identifier)
-
-    return integrations
-
-
-def _check_minimum_version(integration, version, package=None):
-    # type: (type[Integration], Optional[tuple[int, ...]], Optional[str]) -> None
-    package = package or integration.identifier
-
-    if version is None:
-        raise DidNotEnable(f"Unparsable {package} version.")
-
-    min_version = _MIN_VERSIONS.get(integration.identifier)
-    if min_version is None:
-        return
-
-    if version < min_version:
-        raise DidNotEnable(
-            f"Integration only supports {package} {'.'.join(map(str, min_version))} or newer."
-        )
-
-
-class DidNotEnable(Exception):  # noqa: N818
-    """
-    The integration could not be enabled due to a trivial user error like
-    `flask` not being installed for the `FlaskIntegration`.
-
-    This exception is silently swallowed for default integrations, but reraised
-    for explicitly enabled integrations.
-    """
-
-
-class Integration(ABC):
-    """Baseclass for all integrations.
-
-    To accept options for an integration, implement your own constructor that
-    saves those options on `self`.
-    """
-
-    install = None
-    """Legacy method, do not implement."""
-
-    identifier = None  # type: str
-    """String unique ID of integration type"""
-
-    @staticmethod
-    @abstractmethod
-    def setup_once():
-        # type: () -> None
-        """
-        Initialize the integration.
-
-        This function is only called once, ever. Configuration is not available
-        at this point, so the only thing to do here is to hook into exception
-        handlers, and perhaps do monkeypatches.
-
-        Inside those hooks `Integration.current` can be used to access the
-        instance again.
-        """
-        pass
diff --git a/src/sentry_sdk_alpha/integrations/_asgi_common.py b/src/sentry_sdk_alpha/integrations/_asgi_common.py
deleted file mode 100644
index 7e6c661a560436..00000000000000
--- a/src/sentry_sdk_alpha/integrations/_asgi_common.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import urllib
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations._wsgi_common import _filter_headers
-from sentry_sdk_alpha.scope import should_send_default_pii
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Literal, Optional, Union
-
-    from sentry_sdk_alpha.utils import AnnotatedValue
-
-
-def _get_headers(asgi_scope):
-    # type: (Any) -> Dict[str, str]
-    """
-    Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
-    """
-    headers = {}  # type: Dict[str, str]
-    for raw_key, raw_value in asgi_scope.get("headers", {}):
-        key = raw_key.decode("latin-1")
-        value = raw_value.decode("latin-1")
-        if key in headers:
-            headers[key] = headers[key] + ", " + value
-        else:
-            headers[key] = value
-
-    return headers
-
-
-def _get_url(asgi_scope, default_scheme=None, host=None):
-    # type: (Dict[str, Any], Optional[Literal["ws", "http"]], Optional[Union[AnnotatedValue, str]]) -> str
-    """
-    Extract URL from the ASGI scope, without also including the querystring.
-    """
-    scheme = asgi_scope.get("scheme", default_scheme)
-
-    server = asgi_scope.get("server", None)
-    path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
-
-    if host:
-        return f"{scheme}://{host}{path}"
-
-    if server is not None:
-        host, port = server
-        default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
-        if port != default_port:
-            return f"{scheme}://{host}:{port}{path}"
-        return f"{scheme}://{host}{path}"
-    return path
-
-
-def _get_query(asgi_scope):
-    # type: (Any) -> Any
-    """
-    Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
-    """
-    qs = asgi_scope.get("query_string")
-    if not qs:
-        return None
-    return urllib.parse.unquote(qs.decode("latin-1"))
-
-
-def _get_ip(asgi_scope):
-    # type: (Any) -> str
-    """
-    Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
-    """
-    headers = _get_headers(asgi_scope)
-    try:
-        return headers["x-forwarded-for"].split(",")[0].strip()
-    except (KeyError, IndexError):
-        pass
-
-    try:
-        return headers["x-real-ip"]
-    except KeyError:
-        pass
-
-    return asgi_scope.get("client")[0]
-
-
-def _get_request_data(asgi_scope):
-    # type: (Any) -> Dict[str, Any]
-    """
-    Returns data related to the HTTP request from the ASGI scope.
-    """
-    request_data = {}  # type: Dict[str, Any]
-    ty = asgi_scope["type"]
-    if ty in ("http", "websocket"):
-        request_data["method"] = asgi_scope.get("method")
-
-        request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
-        request_data["query_string"] = _get_query(asgi_scope)
-
-        request_data["url"] = _get_url(
-            asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
-        )
-
-    client = asgi_scope.get("client")
-    if client and should_send_default_pii():
-        request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
-
-    return request_data
diff --git a/src/sentry_sdk_alpha/integrations/_wsgi_common.py b/src/sentry_sdk_alpha/integrations/_wsgi_common.py
deleted file mode 100644
index d01ea6aa9b0935..00000000000000
--- a/src/sentry_sdk_alpha/integrations/_wsgi_common.py
+++ /dev/null
@@ -1,233 +0,0 @@
-import json
-from copy import deepcopy
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE, AnnotatedValue
-
-try:
-    from django.http.request import RawPostDataException
-except ImportError:
-    RawPostDataException = None
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Mapping, MutableMapping
-    from typing import Any, Dict, Optional, Union
-
-    from sentry_sdk_alpha._types import Event
-
-
-SENSITIVE_ENV_KEYS = (
-    "REMOTE_ADDR",
-    "HTTP_X_FORWARDED_FOR",
-    "HTTP_SET_COOKIE",
-    "HTTP_COOKIE",
-    "HTTP_AUTHORIZATION",
-    "HTTP_X_API_KEY",
-    "HTTP_X_FORWARDED_FOR",
-    "HTTP_X_REAL_IP",
-)
-
-SENSITIVE_HEADERS = tuple(x[len("HTTP_") :] for x in SENSITIVE_ENV_KEYS if x.startswith("HTTP_"))
-
-DEFAULT_HTTP_METHODS_TO_CAPTURE = (
-    "CONNECT",
-    "DELETE",
-    "GET",
-    # "HEAD",  # do not capture HEAD requests by default
-    # "OPTIONS",  # do not capture OPTIONS requests by default
-    "PATCH",
-    "POST",
-    "PUT",
-    "TRACE",
-)
-
-
-def request_body_within_bounds(client, content_length):
-    # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool
-    if client is None:
-        return False
-
-    bodies = client.options["max_request_body_size"]
-    return not (
-        bodies == "never"
-        or (bodies == "small" and content_length > 10**3)
-        or (bodies == "medium" and content_length > 10**4)
-    )
-
-
-class RequestExtractor:
-    """
-    Base class for request extraction.
-    """
-
-    # It does not make sense to make this class an ABC because it is not used
-    # for typing, only so that child classes can inherit common methods from
-    # it. Only some child classes implement all methods that raise
-    # NotImplementedError in this class.
-
-    def __init__(self, request):
-        # type: (Any) -> None
-        self.request = request
-
-    def extract_into_event(self, event):
-        # type: (Event) -> None
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            return
-
-        data = None  # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
-
-        content_length = self.content_length()
-        request_info = event.get("request", {})
-
-        if should_send_default_pii():
-            request_info["cookies"] = dict(self.cookies())
-
-        if not request_body_within_bounds(client, content_length):
-            data = AnnotatedValue.removed_because_over_size_limit()
-        else:
-            # First read the raw body data
-            # It is important to read this first because if it is Django
-            # it will cache the body and then we can read the cached version
-            # again in parsed_body() (or json() or wherever).
-            raw_data = None
-            try:
-                raw_data = self.raw_data()
-            except (RawPostDataException, ValueError):
-                # If DjangoRestFramework is used it already read the body for us
-                # so reading it here will fail. We can ignore this.
-                pass
-
-            parsed_body = self.parsed_body()
-            if parsed_body is not None:
-                data = parsed_body
-            elif raw_data:
-                data = AnnotatedValue.removed_because_raw_data()
-            else:
-                data = None
-
-        if data is not None:
-            request_info["data"] = data
-
-        event["request"] = deepcopy(request_info)
-
-    def content_length(self):
-        # type: () -> int
-        try:
-            return int(self.env().get("CONTENT_LENGTH", 0))
-        except ValueError:
-            return 0
-
-    def cookies(self):
-        # type: () -> MutableMapping[str, Any]
-        raise NotImplementedError()
-
-    def raw_data(self):
-        # type: () -> Optional[Union[str, bytes]]
-        raise NotImplementedError()
-
-    def form(self):
-        # type: () -> Optional[Dict[str, Any]]
-        raise NotImplementedError()
-
-    def parsed_body(self):
-        # type: () -> Optional[Dict[str, Any]]
-        try:
-            form = self.form()
-        except Exception:
-            form = None
-        try:
-            files = self.files()
-        except Exception:
-            files = None
-
-        if form or files:
-            data = {}
-            if form:
-                data = dict(form.items())
-            if files:
-                for key in files.keys():
-                    data[key] = AnnotatedValue.removed_because_raw_data()
-
-            return data
-
-        return self.json()
-
-    def is_json(self):
-        # type: () -> bool
-        return _is_json_content_type(self.env().get("CONTENT_TYPE"))
-
-    def json(self):
-        # type: () -> Optional[Any]
-        try:
-            if not self.is_json():
-                return None
-
-            try:
-                raw_data = self.raw_data()
-            except (RawPostDataException, ValueError):
-                # The body might have already been read, in which case this will
-                # fail
-                raw_data = None
-
-            if raw_data is None:
-                return None
-
-            if isinstance(raw_data, str):
-                return json.loads(raw_data)
-            else:
-                return json.loads(raw_data.decode("utf-8"))
-        except ValueError:
-            pass
-
-        return None
-
-    def files(self):
-        # type: () -> Optional[Dict[str, Any]]
-        raise NotImplementedError()
-
-    def size_of_file(self, file):
-        # type: (Any) -> int
-        raise NotImplementedError()
-
-    def env(self):
-        # type: () -> Dict[str, Any]
-        raise NotImplementedError()
-
-
-def _is_json_content_type(ct):
-    # type: (Optional[str]) -> bool
-    mt = (ct or "").split(";", 1)[0]
-    return mt == "application/json" or (mt.startswith("application/")) and mt.endswith("+json")
-
-
-def _filter_headers(headers):
-    # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]]
-    if should_send_default_pii():
-        return headers
-
-    return {
-        k: (
-            v
-            if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
-            else AnnotatedValue.removed_because_over_size_limit()
-        )
-        for k, v in headers.items()
-    }
-
-
-def _request_headers_to_span_attributes(headers):
-    # type: (dict[str, str]) -> dict[str, str]
-    attributes = {}
-
-    headers = _filter_headers(headers)
-
-    for header, value in headers.items():
-        if isinstance(value, AnnotatedValue):
-            value = SENSITIVE_DATA_SUBSTITUTE
-        attributes[f"http.request.header.{header.lower()}"] = value
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/aiohttp.py b/src/sentry_sdk_alpha/integrations/aiohttp.py
deleted file mode 100644
index 487f7a46a1cc87..00000000000000
--- a/src/sentry_sdk_alpha/integrations/aiohttp.py
+++ /dev/null
@@ -1,395 +0,0 @@
-import sys
-import weakref
-from functools import wraps
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    OP,
-    SOURCE_FOR_STYLE,
-    SPANDATA,
-    SPANSTATUS,
-    TransactionSource,
-)
-from sentry_sdk_alpha.integrations import (
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    DidNotEnable,
-    Integration,
-    _check_minimum_version,
-)
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    _filter_headers,
-    _request_headers_to_span_attributes,
-    request_body_within_bounds,
-)
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.sessions import track_session
-from sentry_sdk_alpha.tracing_utils import should_propagate_trace
-from sentry_sdk_alpha.utils import (
-    CONTEXTVARS_ERROR_MESSAGE,
-    HAS_REAL_CONTEXTVARS,
-    SENSITIVE_DATA_SUBSTITUTE,
-    AnnotatedValue,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    http_client_status_to_breadcrumb_level,
-    logger,
-    parse_url,
-    parse_version,
-    reraise,
-    set_thread_info_from_span,
-    transaction_from_function,
-)
-
-try:
-    import asyncio
-
-    from aiohttp import ClientSession, TraceConfig
-    from aiohttp import __version__ as AIOHTTP_VERSION
-    from aiohttp.web import Application, HTTPException, UrlDispatcher
-except ImportError:
-    raise DidNotEnable("AIOHTTP not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Set
-    from types import SimpleNamespace
-    from typing import Any, Optional, Tuple, Union
-
-    from aiohttp import TraceRequestEndParams, TraceRequestStartParams
-    from aiohttp.web_request import Request
-    from aiohttp.web_urldispatcher import UrlMappingMatchInfo
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.utils import ExcInfo
-
-
-TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
-
-REQUEST_PROPERTY_TO_ATTRIBUTE = {
-    "query_string": "url.query",
-    "method": "http.request.method",
-    "scheme": "url.scheme",
-    "path": "url.path",
-}
-
-
-class AioHttpIntegration(Integration):
-    identifier = "aiohttp"
-    origin = f"auto.http.{identifier}"
-
-    def __init__(
-        self,
-        transaction_style="handler_name",  # type: str
-        *,
-        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
-    ):
-        # type: (...) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-        self._failed_request_status_codes = failed_request_status_codes
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        version = parse_version(AIOHTTP_VERSION)
-        _check_minimum_version(AioHttpIntegration, version)
-
-        if not HAS_REAL_CONTEXTVARS:
-            # We better have contextvars or we're going to leak state between
-            # requests.
-            raise DidNotEnable(
-                "The aiohttp integration for Sentry requires Python 3.7+ "
-                " or aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
-            )
-
-        ignore_logger("aiohttp.server")
-
-        old_handle = Application._handle
-
-        async def sentry_app_handle(self, request, *args, **kwargs):
-            # type: (Any, Request, *Any, **Any) -> Any
-            integration = sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration)
-            if integration is None:
-                return await old_handle(self, request, *args, **kwargs)
-
-            weak_request = weakref.ref(request)
-
-            with sentry_sdk_alpha.isolation_scope() as scope:
-                with track_session(scope, session_mode="request"):
-                    # Scope data will not leak between requests because aiohttp
-                    # create a task to wrap each request.
-                    scope.generate_propagation_context()
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
-
-                    headers = dict(request.headers)
-                    with sentry_sdk_alpha.continue_trace(headers):
-                        with sentry_sdk_alpha.start_span(
-                            op=OP.HTTP_SERVER,
-                            # If this transaction name makes it to the UI, AIOHTTP's
-                            # URL resolver did not find a route or died trying.
-                            name="generic AIOHTTP request",
-                            source=TransactionSource.ROUTE,
-                            origin=AioHttpIntegration.origin,
-                            attributes=_prepopulate_attributes(request),
-                        ) as span:
-                            try:
-                                response = await old_handle(self, request)
-                            except HTTPException as e:
-                                span.set_http_status(e.status_code)
-
-                                if e.status_code in integration._failed_request_status_codes:
-                                    _capture_exception()
-
-                                raise
-                            except (asyncio.CancelledError, ConnectionResetError):
-                                span.set_status(SPANSTATUS.CANCELLED)
-                                raise
-                            except Exception:
-                                # This will probably map to a 500 but seems like we
-                                # have no way to tell. Do not set span status.
-                                reraise(*_capture_exception())
-
-                            span.set_http_status(response.status)
-                            return response
-
-        Application._handle = sentry_app_handle
-
-        old_urldispatcher_resolve = UrlDispatcher.resolve
-
-        @wraps(old_urldispatcher_resolve)
-        async def sentry_urldispatcher_resolve(self, request):
-            # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo
-            rv = await old_urldispatcher_resolve(self, request)
-
-            integration = sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration)
-            if integration is None:
-                return rv
-
-            name = None
-
-            try:
-                if integration.transaction_style == "handler_name":
-                    name = transaction_from_function(rv.handler)
-                elif integration.transaction_style == "method_and_path_pattern":
-                    route_info = rv.get_info()
-                    pattern = route_info.get("path") or route_info.get("formatter")
-                    name = f"{request.method} {pattern}"
-            except Exception:
-                pass
-
-            if name is not None:
-                sentry_sdk_alpha.get_current_scope().set_transaction_name(
-                    name,
-                    source=SOURCE_FOR_STYLE[integration.transaction_style],
-                )
-
-            return rv
-
-        UrlDispatcher.resolve = sentry_urldispatcher_resolve
-
-        old_client_session_init = ClientSession.__init__
-
-        @ensure_integration_enabled(AioHttpIntegration, old_client_session_init)
-        def init(*args, **kwargs):
-            # type: (Any, Any) -> None
-            client_trace_configs = list(kwargs.get("trace_configs") or ())
-            trace_config = create_trace_config()
-            client_trace_configs.append(trace_config)
-
-            kwargs["trace_configs"] = client_trace_configs
-            return old_client_session_init(*args, **kwargs)
-
-        ClientSession.__init__ = init
-
-
-def create_trace_config():
-    # type: () -> TraceConfig
-
-    async def on_request_start(session, trace_config_ctx, params):
-        # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
-        if sentry_sdk_alpha.get_client().get_integration(AioHttpIntegration) is None:
-            return
-
-        method = params.method.upper()
-
-        parsed_url = None
-        with capture_internal_exceptions():
-            parsed_url = parse_url(str(params.url), sanitize=False)
-
-        span = sentry_sdk_alpha.start_span(
-            op=OP.HTTP_CLIENT,
-            name="{} {}".format(
-                method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE
-            ),
-            origin=AioHttpIntegration.origin,
-            only_if_parent=True,
-        )
-
-        data = {
-            SPANDATA.HTTP_METHOD: method,
-        }
-        set_thread_info_from_span(data, span)
-
-        if parsed_url is not None:
-            data["url"] = parsed_url.url
-            data[SPANDATA.HTTP_QUERY] = parsed_url.query
-            data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment
-
-        for key, value in data.items():
-            span.set_attribute(key, value)
-
-        client = sentry_sdk_alpha.get_client()
-
-        if should_propagate_trace(client, str(params.url)):
-            for (
-                key,
-                value,
-            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(span=span):
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                        key=key, value=value, url=params.url
-                    )
-                )
-                if key == BAGGAGE_HEADER_NAME and params.headers.get(BAGGAGE_HEADER_NAME):
-                    # do not overwrite any existing baggage, just append to it
-                    params.headers[key] += "," + value
-                else:
-                    params.headers[key] = value
-
-        trace_config_ctx.span = span
-        trace_config_ctx.span_data = data
-
-    async def on_request_end(session, trace_config_ctx, params):
-        # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None
-        if trace_config_ctx.span is None:
-            return
-
-        span_data = trace_config_ctx.span_data or {}
-        status_code = int(params.response.status)
-        span_data[SPANDATA.HTTP_STATUS_CODE] = status_code
-        span_data["reason"] = params.response.reason
-
-        sentry_sdk_alpha.add_breadcrumb(
-            type="http",
-            category="httplib",
-            data=span_data,
-            level=http_client_status_to_breadcrumb_level(status_code),
-        )
-
-        span = trace_config_ctx.span
-        span.set_http_status(int(params.response.status))
-        span.set_attribute("reason", params.response.reason)
-        span.finish()
-
-    trace_config = TraceConfig()
-
-    trace_config.on_request_start.append(on_request_start)
-    trace_config.on_request_end.append(on_request_end)
-
-    return trace_config
-
-
-def _make_request_processor(weak_request):
-    # type: (weakref.ReferenceType[Request]) -> EventProcessor
-    def aiohttp_processor(
-        event,  # type: Event
-        hint,  # type: dict[str, Tuple[type, BaseException, Any]]
-    ):
-        # type: (...) -> Event
-        request = weak_request()
-        if request is None:
-            return event
-
-        with capture_internal_exceptions():
-            request_info = event.setdefault("request", {})
-
-            request_info["url"] = "{}://{}{}".format(
-                request.scheme,
-                request.host,
-                request.path,
-            )
-
-            request_info["query_string"] = request.query_string
-            request_info["method"] = request.method
-            request_info["env"] = {"REMOTE_ADDR": request.remote}
-            request_info["headers"] = _filter_headers(dict(request.headers))
-
-            # Just attach raw data here if it is within bounds, if available.
-            # Unfortunately there's no way to get structured data from aiohttp
-            # without awaiting on some coroutine.
-            request_info["data"] = get_aiohttp_request_data(request)
-
-        return event
-
-    return aiohttp_processor
-
-
-def _capture_exception():
-    # type: () -> ExcInfo
-    exc_info = sys.exc_info()
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "aiohttp", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-    return exc_info
-
-
-BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
-
-
-def get_aiohttp_request_data(request):
-    # type: (Request) -> Union[Optional[str], AnnotatedValue]
-    bytes_body = request._read_bytes
-
-    if bytes_body is not None:
-        # we have body to show
-        if not request_body_within_bounds(sentry_sdk_alpha.get_client(), len(bytes_body)):
-            return AnnotatedValue.removed_because_over_size_limit()
-
-        encoding = request.charset or "utf-8"
-        return bytes_body.decode(encoding, "replace")
-
-    if request.can_read_body:
-        # body exists but we can't show it
-        return BODY_NOT_READ_MESSAGE
-
-    # request has no body
-    return None
-
-
-def _prepopulate_attributes(request):
-    # type: (Request) -> dict[str, Any]
-    """Construct initial span attributes that can be used in traces sampler."""
-    attributes = {}
-
-    for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items():
-        if getattr(request, prop, None) is not None:
-            attributes[attr] = getattr(request, prop)
-
-    if getattr(request, "host", None) is not None:
-        try:
-            host, port = request.host.split(":")
-            attributes["server.address"] = host
-            attributes["server.port"] = port
-        except ValueError:
-            attributes["server.address"] = request.host
-
-    with capture_internal_exceptions():
-        url = f"{request.scheme}://{request.host}{request.path}"  # noqa: E231
-        if request.query_string:
-            attributes["url.full"] = f"{url}?{request.query_string}"
-
-    attributes.update(_request_headers_to_span_attributes(dict(request.headers)))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/anthropic.py b/src/sentry_sdk_alpha/integrations/anthropic.py
deleted file mode 100644
index c95807d509dd0b..00000000000000
--- a/src/sentry_sdk_alpha/integrations/anthropic.py
+++ /dev/null
@@ -1,283 +0,0 @@
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.ai.monitoring import record_token_usage
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-    package_version,
-)
-
-try:
-    from anthropic.resources import AsyncMessages, Messages
-
-    if TYPE_CHECKING:
-        from anthropic.types import MessageStreamEvent
-except ImportError:
-    raise DidNotEnable("Anthropic not installed")
-
-if TYPE_CHECKING:
-    from collections.abc import AsyncIterator, Iterator
-    from typing import Any
-
-    from sentry_sdk_alpha.tracing import Span
-
-
-class AnthropicIntegration(Integration):
-    identifier = "anthropic"
-    origin = f"auto.ai.{identifier}"
-
-    def __init__(self, include_prompts=True):
-        # type: (AnthropicIntegration, bool) -> None
-        self.include_prompts = include_prompts
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = package_version("anthropic")
-        _check_minimum_version(AnthropicIntegration, version)
-
-        Messages.create = _wrap_message_create(Messages.create)
-        AsyncMessages.create = _wrap_message_create_async(AsyncMessages.create)
-
-
-def _capture_exception(exc):
-    # type: (Any) -> None
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "anthropic", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _calculate_token_usage(result, span):
-    # type: (Messages, Span) -> None
-    input_tokens = 0
-    output_tokens = 0
-    if hasattr(result, "usage"):
-        usage = result.usage
-        if hasattr(usage, "input_tokens") and isinstance(usage.input_tokens, int):
-            input_tokens = usage.input_tokens
-        if hasattr(usage, "output_tokens") and isinstance(usage.output_tokens, int):
-            output_tokens = usage.output_tokens
-
-    total_tokens = input_tokens + output_tokens
-    record_token_usage(span, input_tokens, output_tokens, total_tokens)
-
-
-def _get_responses(content):
-    # type: (list[Any]) -> list[dict[str, Any]]
-    """
-    Get JSON of a Anthropic responses.
-    """
-    responses = []
-    for item in content:
-        if hasattr(item, "text"):
-            responses.append(
-                {
-                    "type": item.type,
-                    "text": item.text,
-                }
-            )
-    return responses
-
-
-def _collect_ai_data(event, input_tokens, output_tokens, content_blocks):
-    # type: (MessageStreamEvent, int, int, list[str]) -> tuple[int, int, list[str]]
-    """
-    Count token usage and collect content blocks from the AI streaming response.
-    """
-    with capture_internal_exceptions():
-        if hasattr(event, "type"):
-            if event.type == "message_start":
-                usage = event.message.usage
-                input_tokens += usage.input_tokens
-                output_tokens += usage.output_tokens
-            elif event.type == "content_block_start":
-                pass
-            elif event.type == "content_block_delta":
-                if hasattr(event.delta, "text"):
-                    content_blocks.append(event.delta.text)
-                elif hasattr(event.delta, "partial_json"):
-                    content_blocks.append(event.delta.partial_json)
-            elif event.type == "content_block_stop":
-                pass
-            elif event.type == "message_delta":
-                output_tokens += event.usage.output_tokens
-
-    return input_tokens, output_tokens, content_blocks
-
-
-def _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks):
-    # type: (Span, AnthropicIntegration, int, int, list[str]) -> None
-    """
-    Add token usage and content blocks from the AI streaming response to the span.
-    """
-    with capture_internal_exceptions():
-        if should_send_default_pii() and integration.include_prompts:
-            complete_message = "".join(content_blocks)
-            span.set_attribute(
-                SPANDATA.AI_RESPONSES,
-                [{"type": "text", "text": complete_message}],
-            )
-        total_tokens = input_tokens + output_tokens
-        record_token_usage(span, input_tokens, output_tokens, total_tokens)
-        span.set_attribute(SPANDATA.AI_STREAMING, True)
-
-
-def _sentry_patched_create_common(f, *args, **kwargs):
-    # type: (Any, *Any, **Any) -> Any
-    integration = kwargs.pop("integration")
-    if integration is None:
-        return f(*args, **kwargs)
-
-    if "messages" not in kwargs:
-        return f(*args, **kwargs)
-
-    try:
-        iter(kwargs["messages"])
-    except TypeError:
-        return f(*args, **kwargs)
-
-    span = sentry_sdk_alpha.start_span(
-        op=OP.ANTHROPIC_MESSAGES_CREATE,
-        description="Anthropic messages create",
-        origin=AnthropicIntegration.origin,
-        only_if_parent=True,
-    )
-    span.__enter__()
-
-    result = yield f, args, kwargs
-
-    # add data to span and finish it
-    messages = list(kwargs["messages"])
-    model = kwargs.get("model")
-
-    with capture_internal_exceptions():
-        span.set_attribute(SPANDATA.AI_MODEL_ID, model)
-        span.set_attribute(SPANDATA.AI_STREAMING, False)
-
-        if should_send_default_pii() and integration.include_prompts:
-            span.set_attribute(SPANDATA.AI_INPUT_MESSAGES, messages)
-
-        if hasattr(result, "content"):
-            if should_send_default_pii() and integration.include_prompts:
-                span.set_attribute(SPANDATA.AI_RESPONSES, _get_responses(result.content))
-            _calculate_token_usage(result, span)
-            span.__exit__(None, None, None)
-
-        # Streaming response
-        elif hasattr(result, "_iterator"):
-            old_iterator = result._iterator
-
-            def new_iterator():
-                # type: () -> Iterator[MessageStreamEvent]
-                input_tokens = 0
-                output_tokens = 0
-                content_blocks = []  # type: list[str]
-
-                for event in old_iterator:
-                    input_tokens, output_tokens, content_blocks = _collect_ai_data(
-                        event, input_tokens, output_tokens, content_blocks
-                    )
-                    yield event
-
-                _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks)
-                span.__exit__(None, None, None)
-
-            async def new_iterator_async():
-                # type: () -> AsyncIterator[MessageStreamEvent]
-                input_tokens = 0
-                output_tokens = 0
-                content_blocks = []  # type: list[str]
-
-                async for event in old_iterator:
-                    input_tokens, output_tokens, content_blocks = _collect_ai_data(
-                        event, input_tokens, output_tokens, content_blocks
-                    )
-                    yield event
-
-                _add_ai_data_to_span(span, integration, input_tokens, output_tokens, content_blocks)
-                span.__exit__(None, None, None)
-
-            if str(type(result._iterator)) == "":
-                result._iterator = new_iterator_async()
-            else:
-                result._iterator = new_iterator()
-
-        else:
-            span.set_attribute("unknown_response", True)
-            span.__exit__(None, None, None)
-
-    return result
-
-
-def _wrap_message_create(f):
-    # type: (Any) -> Any
-    def _execute_sync(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _sentry_patched_create_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return e.value
-
-        try:
-            try:
-                result = f(*args, **kwargs)
-            except Exception as exc:
-                _capture_exception(exc)
-                raise exc from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    def _sentry_patched_create_sync(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(AnthropicIntegration)
-        kwargs["integration"] = integration
-
-        return _execute_sync(f, *args, **kwargs)
-
-    return _sentry_patched_create_sync
-
-
-def _wrap_message_create_async(f):
-    # type: (Any) -> Any
-    async def _execute_async(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _sentry_patched_create_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return await e.value
-
-        try:
-            try:
-                result = await f(*args, **kwargs)
-            except Exception as exc:
-                _capture_exception(exc)
-                raise exc from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    async def _sentry_patched_create_async(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(AnthropicIntegration)
-        kwargs["integration"] = integration
-
-        return await _execute_async(f, *args, **kwargs)
-
-    return _sentry_patched_create_async
diff --git a/src/sentry_sdk_alpha/integrations/argv.py b/src/sentry_sdk_alpha/integrations/argv.py
deleted file mode 100644
index c505e05e24500c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/argv.py
+++ /dev/null
@@ -1,30 +0,0 @@
-import sys
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-class ArgvIntegration(Integration):
-    identifier = "argv"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        @add_global_event_processor
-        def processor(event, hint):
-            # type: (Event, Optional[Hint]) -> Optional[Event]
-            if sentry_sdk_alpha.get_client().get_integration(ArgvIntegration) is not None:
-                extra = event.setdefault("extra", {})
-                # If some event processor decided to set extra to e.g. an
-                # `int`, don't crash. Not here.
-                if isinstance(extra, dict):
-                    extra["sys.argv"] = sys.argv
-
-            return event
diff --git a/src/sentry_sdk_alpha/integrations/ariadne.py b/src/sentry_sdk_alpha/integrations/ariadne.py
deleted file mode 100644
index 3ee8db08b90088..00000000000000
--- a/src/sentry_sdk_alpha/integrations/ariadne.py
+++ /dev/null
@@ -1,166 +0,0 @@
-from importlib import import_module
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import capture_event, get_client
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    package_version,
-)
-
-try:
-    # importing like this is necessary due to name shadowing in ariadne
-    # (ariadne.graphql is also a function)
-    ariadne_graphql = import_module("ariadne.graphql")
-except ImportError:
-    raise DidNotEnable("ariadne is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, List, Optional
-
-    from ariadne.types import (  # type: ignore
-        GraphQLError,
-        GraphQLResult,
-        GraphQLSchema,
-        QueryParser,
-    )
-    from graphql.language.ast import DocumentNode
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-
-class AriadneIntegration(Integration):
-    identifier = "ariadne"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = package_version("ariadne")
-        _check_minimum_version(AriadneIntegration, version)
-
-        ignore_logger("ariadne")
-
-        _patch_graphql()
-
-
-def _patch_graphql():
-    # type: () -> None
-    old_parse_query = ariadne_graphql.parse_query
-    old_handle_errors = ariadne_graphql.handle_graphql_errors
-    old_handle_query_result = ariadne_graphql.handle_query_result
-
-    @ensure_integration_enabled(AriadneIntegration, old_parse_query)
-    def _sentry_patched_parse_query(context_value, query_parser, data):
-        # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
-        event_processor = _make_request_event_processor(data)
-        sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor)
-
-        result = old_parse_query(context_value, query_parser, data)
-        return result
-
-    @ensure_integration_enabled(AriadneIntegration, old_handle_errors)
-    def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
-        # type: (List[GraphQLError], Any, Any) -> GraphQLResult
-        result = old_handle_errors(errors, *args, **kwargs)
-
-        event_processor = _make_response_event_processor(result[1])
-        sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor)
-
-        client = get_client()
-        if client.is_active():
-            with capture_internal_exceptions():
-                for error in errors:
-                    event, hint = event_from_exception(
-                        error,
-                        client_options=client.options,
-                        mechanism={
-                            "type": AriadneIntegration.identifier,
-                            "handled": False,
-                        },
-                    )
-                    capture_event(event, hint=hint)
-
-        return result
-
-    @ensure_integration_enabled(AriadneIntegration, old_handle_query_result)
-    def _sentry_patched_handle_query_result(result, *args, **kwargs):
-        # type: (Any, Any, Any) -> GraphQLResult
-        query_result = old_handle_query_result(result, *args, **kwargs)
-
-        event_processor = _make_response_event_processor(query_result[1])
-        sentry_sdk_alpha.get_isolation_scope().add_event_processor(event_processor)
-
-        client = get_client()
-        if client.is_active():
-            with capture_internal_exceptions():
-                for error in result.errors or []:
-                    event, hint = event_from_exception(
-                        error,
-                        client_options=client.options,
-                        mechanism={
-                            "type": AriadneIntegration.identifier,
-                            "handled": False,
-                        },
-                    )
-                    capture_event(event, hint=hint)
-
-        return query_result
-
-    ariadne_graphql.parse_query = _sentry_patched_parse_query  # type: ignore
-    ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors  # type: ignore
-    ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result  # type: ignore
-
-
-def _make_request_event_processor(data):
-    # type: (GraphQLSchema) -> EventProcessor
-    """Add request data and api_target to events."""
-
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        if not isinstance(data, dict):
-            return event
-
-        with capture_internal_exceptions():
-            try:
-                content_length = int((data.get("headers") or {}).get("Content-Length", 0))
-            except (TypeError, ValueError):
-                return event
-
-            if should_send_default_pii() and request_body_within_bounds(
-                get_client(), content_length
-            ):
-                request_info = event.setdefault("request", {})
-                request_info["api_target"] = "graphql"
-                request_info["data"] = data
-
-            elif event.get("request", {}).get("data"):
-                del event["request"]["data"]
-
-        return event
-
-    return inner
-
-
-def _make_response_event_processor(response):
-    # type: (Dict[str, Any]) -> EventProcessor
-    """Add response data to the event's response context."""
-
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        with capture_internal_exceptions():
-            if should_send_default_pii() and response.get("errors"):
-                contexts = event.setdefault("contexts", {})
-                contexts["response"] = {
-                    "data": response,
-                }
-
-        return event
-
-    return inner
diff --git a/src/sentry_sdk_alpha/integrations/arq.py b/src/sentry_sdk_alpha/integrations/arq.py
deleted file mode 100644
index 6cbc85a53a63e5..00000000000000
--- a/src/sentry_sdk_alpha/integrations/arq.py
+++ /dev/null
@@ -1,247 +0,0 @@
-import sys
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    SENSITIVE_DATA_SUBSTITUTE,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    parse_version,
-    reraise,
-)
-
-try:
-    import arq.worker
-    from arq.connections import ArqRedis
-    from arq.version import VERSION as ARQ_VERSION
-    from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
-except ImportError:
-    raise DidNotEnable("Arq is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Optional, Union
-
-    from arq.cron import CronJob
-    from arq.jobs import Job
-    from arq.typing import WorkerCoroutine
-    from arq.worker import Function
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
-
-ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
-
-DEFAULT_TRANSACTION_NAME = "unknown arq task"
-
-
-class ArqIntegration(Integration):
-    identifier = "arq"
-    origin = f"auto.queue.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        try:
-            if isinstance(ARQ_VERSION, str):
-                version = parse_version(ARQ_VERSION)
-            else:
-                version = ARQ_VERSION.version[:2]
-
-        except (TypeError, ValueError):
-            version = None
-
-        _check_minimum_version(ArqIntegration, version)
-
-        patch_enqueue_job()
-        patch_run_job()
-        patch_create_worker()
-
-        ignore_logger("arq.worker")
-
-
-def patch_enqueue_job():
-    # type: () -> None
-    old_enqueue_job = ArqRedis.enqueue_job
-    original_kwdefaults = old_enqueue_job.__kwdefaults__
-
-    async def _sentry_enqueue_job(self, function, *args, **kwargs):
-        # type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
-        integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration)
-        if integration is None:
-            return await old_enqueue_job(self, function, *args, **kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.QUEUE_SUBMIT_ARQ,
-            name=function,
-            origin=ArqIntegration.origin,
-            only_if_parent=True,
-        ):
-            return await old_enqueue_job(self, function, *args, **kwargs)
-
-    _sentry_enqueue_job.__kwdefaults__ = original_kwdefaults
-    ArqRedis.enqueue_job = _sentry_enqueue_job
-
-
-def patch_run_job():
-    # type: () -> None
-    old_run_job = Worker.run_job
-
-    async def _sentry_run_job(self, job_id, score):
-        # type: (Worker, str, int) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration)
-        if integration is None:
-            return await old_run_job(self, job_id, score)
-
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            scope._name = "arq"
-            scope.set_transaction_name(
-                DEFAULT_TRANSACTION_NAME,
-                source=TransactionSource.TASK,
-            )
-            scope.clear_breadcrumbs()
-
-            with sentry_sdk_alpha.start_span(
-                op=OP.QUEUE_TASK_ARQ,
-                name=DEFAULT_TRANSACTION_NAME,
-                source=TransactionSource.TASK,
-                origin=ArqIntegration.origin,
-            ) as span:
-                return_value = await old_run_job(self, job_id, score)
-
-                if span.status is None:
-                    span.set_status(SPANSTATUS.OK)
-
-                return return_value
-
-    Worker.run_job = _sentry_run_job
-
-
-def _capture_exception(exc_info):
-    # type: (ExcInfo) -> None
-    scope = sentry_sdk_alpha.get_current_scope()
-
-    if scope.root_span is not None:
-        if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
-            scope.root_span.set_status(SPANSTATUS.ABORTED)
-            return
-
-        scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR)
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": ArqIntegration.identifier, "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _make_event_processor(ctx, *args, **kwargs):
-    # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-
-        with capture_internal_exceptions():
-            scope = sentry_sdk_alpha.get_current_scope()
-            if scope.root_span is not None:
-                scope.root_span.name = ctx["job_name"]
-                event["transaction"] = ctx["job_name"]
-
-            tags = event.setdefault("tags", {})
-            tags["arq_task_id"] = ctx["job_id"]
-            tags["arq_task_retry"] = ctx["job_try"] > 1
-            extra = event.setdefault("extra", {})
-            extra["arq-job"] = {
-                "task": ctx["job_name"],
-                "args": (args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
-                "kwargs": (kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
-                "retry": ctx["job_try"],
-            }
-
-        return event
-
-    return event_processor
-
-
-def _wrap_coroutine(name, coroutine):
-    # type: (str, WorkerCoroutine) -> WorkerCoroutine
-
-    async def _sentry_coroutine(ctx, *args, **kwargs):
-        # type: (Dict[Any, Any], *Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(ArqIntegration)
-        if integration is None:
-            return await coroutine(ctx, *args, **kwargs)
-
-        sentry_sdk_alpha.get_isolation_scope().add_event_processor(
-            _make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
-        )
-
-        try:
-            result = await coroutine(ctx, *args, **kwargs)
-        except Exception:
-            exc_info = sys.exc_info()
-            _capture_exception(exc_info)
-            reraise(*exc_info)
-
-        return result
-
-    return _sentry_coroutine
-
-
-def patch_create_worker():
-    # type: () -> None
-    old_create_worker = arq.worker.create_worker
-
-    @ensure_integration_enabled(ArqIntegration, old_create_worker)
-    def _sentry_create_worker(*args, **kwargs):
-        # type: (*Any, **Any) -> Worker
-        settings_cls = args[0]
-
-        if isinstance(settings_cls, dict):
-            if "functions" in settings_cls:
-                settings_cls["functions"] = [
-                    _get_arq_function(func) for func in settings_cls.get("functions", [])
-                ]
-            if "cron_jobs" in settings_cls:
-                settings_cls["cron_jobs"] = [
-                    _get_arq_cron_job(cron_job) for cron_job in settings_cls.get("cron_jobs", [])
-                ]
-
-        if hasattr(settings_cls, "functions"):
-            settings_cls.functions = [_get_arq_function(func) for func in settings_cls.functions]
-        if hasattr(settings_cls, "cron_jobs"):
-            settings_cls.cron_jobs = [
-                _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs
-            ]
-
-        if "functions" in kwargs:
-            kwargs["functions"] = [_get_arq_function(func) for func in kwargs.get("functions", [])]
-        if "cron_jobs" in kwargs:
-            kwargs["cron_jobs"] = [
-                _get_arq_cron_job(cron_job) for cron_job in kwargs.get("cron_jobs", [])
-            ]
-
-        return old_create_worker(*args, **kwargs)
-
-    arq.worker.create_worker = _sentry_create_worker
-
-
-def _get_arq_function(func):
-    # type: (Union[str, Function, WorkerCoroutine]) -> Function
-    arq_func = arq.worker.func(func)
-    arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)
-
-    return arq_func
-
-
-def _get_arq_cron_job(cron_job):
-    # type: (CronJob) -> CronJob
-    cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)
-
-    return cron_job
diff --git a/src/sentry_sdk_alpha/integrations/asgi.py b/src/sentry_sdk_alpha/integrations/asgi.py
deleted file mode 100644
index 537e81e070bc11..00000000000000
--- a/src/sentry_sdk_alpha/integrations/asgi.py
+++ /dev/null
@@ -1,344 +0,0 @@
-"""
-An ASGI middleware.
-
-Based on Tom Christie's `sentry-asgi `.
-"""
-
-import asyncio
-import inspect
-from copy import deepcopy
-from functools import partial
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
-from sentry_sdk_alpha.integrations._asgi_common import (
-    _get_headers,
-    _get_query,
-    _get_request_data,
-    _get_url,
-)
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.sessions import track_session
-from sentry_sdk_alpha.utils import (
-    CONTEXTVARS_ERROR_MESSAGE,
-    HAS_REAL_CONTEXTVARS,
-    ContextVar,
-    _get_installed_modules,
-    capture_internal_exceptions,
-    event_from_exception,
-    logger,
-    transaction_from_function,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, Optional, Tuple
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-_asgi_middleware_applied = ContextVar("sentry_asgi_middleware_applied")
-
-_DEFAULT_TRANSACTION_NAME = "generic ASGI request"
-
-TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-
-ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE = {
-    "http_version": "network.protocol.version",
-    "method": "http.request.method",
-    "path": "url.path",
-    "scheme": "url.scheme",
-    "type": "network.protocol.name",
-}
-
-
-def _capture_exception(exc, mechanism_type="asgi"):
-    # type: (Any, str) -> None
-
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": mechanism_type, "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _looks_like_asgi3(app):
-    # type: (Any) -> bool
-    """
-    Try to figure out if an application object supports ASGI3.
-
-    This is how uvicorn figures out the application version as well.
-    """
-    if inspect.isclass(app):
-        return hasattr(app, "__await__")
-    elif inspect.isfunction(app):
-        return asyncio.iscoroutinefunction(app)
-    else:
-        call = getattr(app, "__call__", None)  # noqa
-        return asyncio.iscoroutinefunction(call)
-
-
-class SentryAsgiMiddleware:
-    __slots__ = (
-        "app",
-        "__call__",
-        "transaction_style",
-        "mechanism_type",
-        "span_origin",
-        "http_methods_to_capture",
-    )
-
-    def __init__(
-        self,
-        app,  # type: Any
-        unsafe_context_data=False,  # type: bool
-        transaction_style="endpoint",  # type: str
-        mechanism_type="asgi",  # type: str
-        span_origin=None,  # type: Optional[str]
-        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: Tuple[str, ...]
-    ):
-        # type: (...) -> None
-        """
-        Instrument an ASGI application with Sentry. Provides HTTP/websocket
-        data to sent events and basic handling for exceptions bubbling up
-        through the middleware.
-
-        :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
-        """
-        if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
-            # We better have contextvars or we're going to leak state between
-            # requests.
-            raise RuntimeError(
-                "The ASGI middleware for Sentry requires Python 3.7+ "
-                "or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
-            )
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-
-        asgi_middleware_while_using_starlette_or_fastapi = (
-            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
-        )
-        if asgi_middleware_while_using_starlette_or_fastapi:
-            logger.warning(
-                "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
-                "Please remove 'SentryAsgiMiddleware' from your project. "
-                "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
-            )
-
-        self.transaction_style = transaction_style
-        self.mechanism_type = mechanism_type
-        self.span_origin = span_origin
-        self.app = app
-        self.http_methods_to_capture = http_methods_to_capture
-
-        if _looks_like_asgi3(app):
-            self.__call__ = self._run_asgi3  # type: Callable[..., Any]
-        else:
-            self.__call__ = self._run_asgi2
-
-    def _run_asgi2(self, scope):
-        # type: (Any) -> Any
-        async def inner(receive, send):
-            # type: (Any, Any) -> Any
-            return await self._run_app(scope, receive, send, asgi_version=2)
-
-        return inner
-
-    async def _run_asgi3(self, scope, receive, send):
-        # type: (Any, Any, Any) -> Any
-        return await self._run_app(scope, receive, send, asgi_version=3)
-
-    async def _run_original_app(self, scope, receive, send, asgi_version):
-        # type: (Any, Any, Any, Any, int) -> Any
-        try:
-            if asgi_version == 2:
-                return await self.app(scope)(receive, send)
-            else:
-                return await self.app(scope, receive, send)
-
-        except Exception as exc:
-            _capture_exception(exc, mechanism_type=self.mechanism_type)
-            raise exc from None
-
-    async def _run_app(self, scope, receive, send, asgi_version):
-        # type: (Any, Any, Any, Any, int) -> Any
-        is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
-        is_lifespan = scope["type"] == "lifespan"
-        if is_recursive_asgi_middleware or is_lifespan:
-            return await self._run_original_app(scope, receive, send, asgi_version)
-
-        _asgi_middleware_applied.set(True)
-        try:
-            with sentry_sdk_alpha.isolation_scope() as sentry_scope:
-                (
-                    transaction_name,
-                    transaction_source,
-                ) = self._get_transaction_name_and_source(
-                    self.transaction_style,
-                    scope,
-                )
-                sentry_scope.set_transaction_name(
-                    transaction_name,
-                    source=transaction_source,
-                )
-
-                with track_session(sentry_scope, session_mode="request"):
-                    sentry_scope.clear_breadcrumbs()
-                    sentry_scope._name = "asgi"
-                    processor = partial(self.event_processor, asgi_scope=scope)
-                    sentry_scope.add_event_processor(processor)
-
-                    ty = scope["type"]
-
-                    method = scope.get("method", "").upper()
-                    should_trace = ty == "websocket" or (
-                        ty == "http" and method in self.http_methods_to_capture
-                    )
-                    if not should_trace:
-                        return await self._run_original_app(scope, receive, send, asgi_version)
-
-                    with sentry_sdk_alpha.continue_trace(_get_headers(scope)):
-                        with sentry_sdk_alpha.start_span(
-                            op=(OP.WEBSOCKET_SERVER if ty == "websocket" else OP.HTTP_SERVER),
-                            name=transaction_name,
-                            source=transaction_source,
-                            origin=self.span_origin,
-                            attributes=_prepopulate_attributes(scope),
-                        ) as span:
-                            if span is not None:
-                                logger.debug("[ASGI] Started transaction: %s", span)
-                                span.set_tag("asgi.type", ty)
-
-                            async def _sentry_wrapped_send(event):
-                                # type: (Dict[str, Any]) -> Any
-                                is_http_response = (
-                                    event.get("type") == "http.response.start"
-                                    and span is not None
-                                    and "status" in event
-                                )
-                                if is_http_response:
-                                    span.set_http_status(event["status"])
-
-                                return await send(event)
-
-                            return await self._run_original_app(
-                                scope, receive, _sentry_wrapped_send, asgi_version
-                            )
-        finally:
-            _asgi_middleware_applied.set(False)
-
-    def event_processor(self, event, hint, asgi_scope):
-        # type: (Event, Hint, Any) -> Optional[Event]
-        request_data = event.get("request", {})
-        request_data.update(_get_request_data(asgi_scope))
-        event["request"] = deepcopy(request_data)
-
-        # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks)
-        transaction = event.get("transaction")
-        transaction_source = (event.get("transaction_info") or {}).get("source")
-        already_set = (
-            transaction is not None
-            and transaction != _DEFAULT_TRANSACTION_NAME
-            and transaction_source
-            in [
-                TransactionSource.COMPONENT,
-                TransactionSource.ROUTE,
-                TransactionSource.CUSTOM,
-            ]
-        )
-        if not already_set:
-            name, source = self._get_transaction_name_and_source(self.transaction_style, asgi_scope)
-            event["transaction"] = name
-            event["transaction_info"] = {"source": source}
-
-            logger.debug(
-                "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'",
-                event["transaction"],
-                event["transaction_info"]["source"],
-            )
-
-        return event
-
-    # Helper functions.
-    #
-    # Note: Those functions are not public API. If you want to mutate request
-    # data to your liking it's recommended to use the `before_send` callback
-    # for that.
-
-    def _get_transaction_name_and_source(self, transaction_style, asgi_scope):
-        # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str]
-        name = None
-        source = SOURCE_FOR_STYLE[transaction_style]
-        ty = asgi_scope.get("type")
-
-        if transaction_style == "endpoint":
-            endpoint = asgi_scope.get("endpoint")
-            # Webframeworks like Starlette mutate the ASGI env once routing is
-            # done, which is sometime after the request has started. If we have
-            # an endpoint, overwrite our generic transaction name.
-            if endpoint:
-                name = transaction_from_function(endpoint) or ""
-            else:
-                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
-                source = TransactionSource.URL
-
-        elif transaction_style == "url":
-            # FastAPI includes the route object in the scope to let Sentry extract the
-            # path from it for the transaction name
-            route = asgi_scope.get("route")
-            if route:
-                path = getattr(route, "path", None)
-                if path is not None:
-                    name = path
-            else:
-                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
-                source = TransactionSource.URL
-
-        if name is None:
-            name = _DEFAULT_TRANSACTION_NAME
-            source = TransactionSource.ROUTE
-            return name, source
-
-        return name, source
-
-
-def _prepopulate_attributes(scope):
-    # type: (Any) -> dict[str, Any]
-    """Unpack ASGI scope into serializable OTel attributes."""
-    scope = scope or {}
-
-    attributes = {}
-    for attr, key in ASGI_SCOPE_PROPERTY_TO_ATTRIBUTE.items():
-        if scope.get(attr):
-            attributes[key] = scope[attr]
-
-    for attr in ("client", "server"):
-        if scope.get(attr):
-            try:
-                host, port = scope[attr]
-                attributes[f"{attr}.address"] = host
-                if port is not None:
-                    attributes[f"{attr}.port"] = port
-            except Exception:
-                pass
-
-    with capture_internal_exceptions():
-        full_url = _get_url(scope)
-        query = _get_query(scope)
-        if query:
-            attributes["url.query"] = query
-            full_url = f"{full_url}?{query}"
-
-        attributes["url.full"] = full_url
-
-    attributes.update(_request_headers_to_span_attributes(_get_headers(scope)))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/asyncio.py b/src/sentry_sdk_alpha/integrations/asyncio.py
deleted file mode 100644
index f7583136ca375f..00000000000000
--- a/src/sentry_sdk_alpha/integrations/asyncio.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import sys
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.utils import event_from_exception, logger, reraise
-
-try:
-    import asyncio
-    from asyncio.tasks import Task
-except ImportError:
-    raise DidNotEnable("asyncio not available")
-
-from typing import TYPE_CHECKING, cast
-
-if TYPE_CHECKING:
-    from collections.abc import Coroutine
-    from typing import Any
-
-    from sentry_sdk_alpha._types import ExcInfo
-
-
-def get_name(coro):
-    # type: (Any) -> str
-    return (
-        getattr(coro, "__qualname__", None)
-        or getattr(coro, "__name__", None)
-        or "coroutine without __name__"
-    )
-
-
-def patch_asyncio():
-    # type: () -> None
-    orig_task_factory = None
-    try:
-        loop = asyncio.get_running_loop()
-        orig_task_factory = loop.get_task_factory()
-
-        def _sentry_task_factory(loop, coro, **kwargs):
-            # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any]
-
-            async def _task_with_sentry_span_creation():
-                # type: () -> Any
-                result = None
-
-                with sentry_sdk_alpha.isolation_scope():
-                    with sentry_sdk_alpha.start_span(
-                        op=OP.FUNCTION,
-                        name=get_name(coro),
-                        origin=AsyncioIntegration.origin,
-                        only_if_parent=True,
-                    ):
-                        try:
-                            result = await coro
-                        except Exception:
-                            reraise(*_capture_exception())
-
-                return result
-
-            task = None
-
-            # Trying to use user set task factory (if there is one)
-            if orig_task_factory:
-                task = orig_task_factory(loop, _task_with_sentry_span_creation(), **kwargs)
-
-            if task is None:
-                # The default task factory in `asyncio` does not have its own function
-                # but is just a couple of lines in `asyncio.base_events.create_task()`
-                # Those lines are copied here.
-
-                # WARNING:
-                # If the default behavior of the task creation in asyncio changes,
-                # this will break!
-                task = Task(_task_with_sentry_span_creation(), loop=loop, **kwargs)
-                if task._source_traceback:  # type: ignore
-                    del task._source_traceback[-1]  # type: ignore
-
-            # Set the task name to include the original coroutine's name
-            try:
-                cast("asyncio.Task[Any]", task).set_name(f"{get_name(coro)} (Sentry-wrapped)")
-            except AttributeError:
-                # set_name might not be available in all Python versions
-                pass
-
-            return task
-
-        loop.set_task_factory(_sentry_task_factory)  # type: ignore
-
-    except RuntimeError:
-        # When there is no running loop, we have nothing to patch.
-        logger.warning(
-            "There is no running asyncio loop so there is nothing Sentry can patch. "
-            "Please make sure you call sentry_sdk.init() within a running "
-            "asyncio loop for the AsyncioIntegration to work. "
-            "See https://docs.sentry.io/platforms/python/integrations/asyncio/"
-        )
-
-
-def _capture_exception():
-    # type: () -> ExcInfo
-    exc_info = sys.exc_info()
-
-    client = sentry_sdk_alpha.get_client()
-
-    integration = client.get_integration(AsyncioIntegration)
-    if integration is not None:
-        event, hint = event_from_exception(
-            exc_info,
-            client_options=client.options,
-            mechanism={"type": "asyncio", "handled": False},
-        )
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-    return exc_info
-
-
-class AsyncioIntegration(Integration):
-    identifier = "asyncio"
-    origin = f"auto.function.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        patch_asyncio()
diff --git a/src/sentry_sdk_alpha/integrations/asyncpg.py b/src/sentry_sdk_alpha/integrations/asyncpg.py
deleted file mode 100644
index f89956f0e2563b..00000000000000
--- a/src/sentry_sdk_alpha/integrations/asyncpg.py
+++ /dev/null
@@ -1,221 +0,0 @@
-from __future__ import annotations
-
-import contextlib
-from collections.abc import Awaitable, Callable, Iterator
-from typing import Any, Optional, TypeVar
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
-from sentry_sdk_alpha.utils import (
-    _serialize_span_attribute,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    parse_version,
-)
-
-try:
-    import asyncpg  # type: ignore[import-not-found]
-    from asyncpg.cursor import BaseCursor  # type: ignore
-
-except ImportError:
-    raise DidNotEnable("asyncpg not installed.")
-
-
-class AsyncPGIntegration(Integration):
-    identifier = "asyncpg"
-    origin = f"auto.db.{identifier}"
-    _record_params = False
-
-    def __init__(self, *, record_params: bool = False):
-        AsyncPGIntegration._record_params = record_params
-
-    @staticmethod
-    def setup_once() -> None:
-        # asyncpg.__version__ is a string containing the semantic version in the form of ".."
-        asyncpg_version = parse_version(asyncpg.__version__)
-        _check_minimum_version(AsyncPGIntegration, asyncpg_version)
-
-        asyncpg.Connection.execute = _wrap_execute(
-            asyncpg.Connection.execute,
-        )
-        asyncpg.Connection._execute = _wrap_connection_method(asyncpg.Connection._execute)
-        asyncpg.Connection._executemany = _wrap_connection_method(
-            asyncpg.Connection._executemany, executemany=True
-        )
-        asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor)
-        asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare)
-        asyncpg.connect_utils._connect_addr = _wrap_connect_addr(
-            asyncpg.connect_utils._connect_addr
-        )
-
-
-T = TypeVar("T")
-
-
-def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
-    async def _inner(*args: Any, **kwargs: Any) -> T:
-        if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None:
-            return await f(*args, **kwargs)
-
-        # Avoid recording calls to _execute twice.
-        # Calls to Connection.execute with args also call
-        # Connection._execute, which is recorded separately
-        # args[0] = the connection object, args[1] is the query
-        if len(args) > 2:
-            return await f(*args, **kwargs)
-
-        query = args[1]
-        with record_sql_queries(
-            cursor=None,
-            query=query,
-            params_list=None,
-            paramstyle=None,
-            executemany=False,
-            span_origin=AsyncPGIntegration.origin,
-        ) as span:
-            res = await f(*args, **kwargs)
-
-            with capture_internal_exceptions():
-                add_query_source(span)
-
-        return res
-
-    return _inner
-
-
-SubCursor = TypeVar("SubCursor", bound=BaseCursor)
-
-
-@contextlib.contextmanager
-def _record(
-    cursor: SubCursor | None,
-    query: str,
-    params_list: tuple[Any, ...] | None,
-    *,
-    executemany: bool = False,
-) -> Iterator[Span]:
-    integration = sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration)
-    if integration is not None and not integration._record_params:
-        params_list = None
-
-    param_style = "pyformat" if params_list else None
-
-    with record_sql_queries(
-        cursor=cursor,
-        query=query,
-        params_list=params_list,
-        paramstyle=param_style,
-        executemany=executemany,
-        record_cursor_repr=cursor is not None,
-        span_origin=AsyncPGIntegration.origin,
-    ) as span:
-        yield span
-
-
-def _wrap_connection_method(
-    f: Callable[..., Awaitable[T]], *, executemany: bool = False
-) -> Callable[..., Awaitable[T]]:
-    async def _inner(*args: Any, **kwargs: Any) -> T:
-        if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None:
-            return await f(*args, **kwargs)
-
-        query = args[1]
-        params_list = args[2] if len(args) > 2 else None
-
-        with _record(None, query, params_list, executemany=executemany) as span:
-            data = _get_db_data(conn=args[0])
-            _set_on_span(span, data)
-            res = await f(*args, **kwargs)
-
-        return res
-
-    return _inner
-
-
-def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]:
-    @ensure_integration_enabled(AsyncPGIntegration, f)
-    def _inner(*args: Any, **kwargs: Any) -> T:  # noqa: N807
-        query = args[1]
-        params_list = args[2] if len(args) > 2 else None
-
-        with _record(
-            None,
-            query,
-            params_list,
-            executemany=False,
-        ) as span:
-            data = _get_db_data(conn=args[0])
-            _set_on_span(span, data)
-            res = f(*args, **kwargs)
-            span.set_attribute("db.cursor", _serialize_span_attribute(res))
-
-        return res
-
-    return _inner
-
-
-def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
-    async def _inner(*args: Any, **kwargs: Any) -> T:
-        if sentry_sdk_alpha.get_client().get_integration(AsyncPGIntegration) is None:
-            return await f(*args, **kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.DB,
-            name="connect",
-            origin=AsyncPGIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            data = _get_db_data(
-                addr=kwargs.get("addr"),
-                database=kwargs["params"].database,
-                user=kwargs["params"].user,
-            )
-            _set_on_span(span, data)
-
-            with capture_internal_exceptions():
-                sentry_sdk_alpha.add_breadcrumb(message="connect", category="query", data=data)
-
-            res = await f(*args, **kwargs)
-
-        return res
-
-    return _inner
-
-
-def _get_db_data(
-    conn: Any = None,
-    addr: tuple[str, ...] | None = None,
-    database: str | None = None,
-    user: str | None = None,
-) -> dict[str, str]:
-    if conn is not None:
-        addr = conn._addr
-        database = conn._params.database
-        user = conn._params.user
-
-    data = {
-        SPANDATA.DB_SYSTEM: "postgresql",
-    }
-
-    if addr:
-        try:
-            data[SPANDATA.SERVER_ADDRESS] = addr[0]
-            data[SPANDATA.SERVER_PORT] = addr[1]
-        except IndexError:
-            pass
-
-    if database:
-        data[SPANDATA.DB_NAME] = database
-
-    if user:
-        data[SPANDATA.DB_USER] = user
-
-    return data
-
-
-def _set_on_span(span: Span, data: dict[str, Any]) -> None:
-    for key, value in data.items():
-        span.set_attribute(key, value)
diff --git a/src/sentry_sdk_alpha/integrations/atexit.py b/src/sentry_sdk_alpha/integrations/atexit.py
deleted file mode 100644
index 9afde4d7c75895..00000000000000
--- a/src/sentry_sdk_alpha/integrations/atexit.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import atexit
-import os
-import sys
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from typing import Any, Optional
-
-
-def default_callback(pending, timeout):
-    # type: (int, int) -> None
-    """This is the default shutdown callback that is set on the options.
-    It prints out a message to stderr that informs the user that some events
-    are still pending and the process is waiting for them to flush out.
-    """
-
-    def echo(msg):
-        # type: (str) -> None
-        sys.stderr.write(msg + "\n")
-
-    echo("Sentry is attempting to send %i pending events" % pending)
-    echo("Waiting up to %s seconds" % timeout)
-    echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
-    sys.stderr.flush()
-
-
-class AtexitIntegration(Integration):
-    identifier = "atexit"
-
-    def __init__(self, callback=None):
-        # type: (Optional[Any]) -> None
-        if callback is None:
-            callback = default_callback
-        self.callback = callback
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        @atexit.register
-        def _shutdown():
-            # type: () -> None
-            client = sentry_sdk_alpha.get_client()
-            integration = client.get_integration(AtexitIntegration)
-
-            if integration is None:
-                return
-
-            logger.debug("atexit: got shutdown signal")
-            logger.debug("atexit: shutting down client")
-            sentry_sdk_alpha.get_isolation_scope().end_session()
-
-            client.close(callback=integration.callback)
diff --git a/src/sentry_sdk_alpha/integrations/aws_lambda.py b/src/sentry_sdk_alpha/integrations/aws_lambda.py
deleted file mode 100644
index 5b587a348cfc18..00000000000000
--- a/src/sentry_sdk_alpha/integrations/aws_lambda.py
+++ /dev/null
@@ -1,492 +0,0 @@
-import functools
-import json
-import re
-import sys
-from copy import deepcopy
-from datetime import datetime, timedelta, timezone
-from os import environ
-from typing import TYPE_CHECKING
-from urllib.parse import urlencode
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    _filter_headers,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    TimeoutThread,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    logger,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-# Constants
-TIMEOUT_WARNING_BUFFER = 1500  # Buffer time required to send timeout warning to Sentry
-MILLIS_TO_SECONDS = 1000.0
-
-
-EVENT_TO_ATTRIBUTES = {
-    "httpMethod": "http.request.method",
-    "queryStringParameters": "url.query",
-    "path": "url.path",
-}
-
-CONTEXT_TO_ATTRIBUTES = {
-    "function_name": "faas.name",
-}
-
-
-def _wrap_init_error(init_error):
-    # type: (F) -> F
-    @ensure_integration_enabled(AwsLambdaIntegration, init_error)
-    def sentry_init_error(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        client = sentry_sdk_alpha.get_client()
-
-        with capture_internal_exceptions():
-            sentry_sdk_alpha.get_isolation_scope().clear_breadcrumbs()
-
-            exc_info = sys.exc_info()
-            if exc_info and all(exc_info):
-                sentry_event, hint = event_from_exception(
-                    exc_info,
-                    client_options=client.options,
-                    mechanism={"type": "aws_lambda", "handled": False},
-                )
-                sentry_sdk_alpha.capture_event(sentry_event, hint=hint)
-
-            else:
-                # Fall back to AWS lambdas JSON representation of the error
-                error_info = args[1]
-                if isinstance(error_info, str):
-                    error_info = json.loads(error_info)
-                sentry_event = _event_from_error_json(error_info)
-                sentry_sdk_alpha.capture_event(sentry_event)
-
-        return init_error(*args, **kwargs)
-
-    return sentry_init_error  # type: ignore
-
-
-def _wrap_handler(handler):
-    # type: (F) -> F
-    @functools.wraps(handler)
-    def sentry_handler(aws_event, aws_context, *args, **kwargs):
-        # type: (Any, Any, *Any, **Any) -> Any
-
-        # Per https://docs.aws.amazon.com/lambda/latest/dg/python-handler.html,
-        # `event` here is *likely* a dictionary, but also might be a number of
-        # other types (str, int, float, None).
-        #
-        # In some cases, it is a list (if the user is batch-invoking their
-        # function, for example), in which case we'll use the first entry as a
-        # representative from which to try pulling request data. (Presumably it
-        # will be the same for all events in the list, since they're all hitting
-        # the lambda in the same request.)
-
-        client = sentry_sdk_alpha.get_client()
-        integration = client.get_integration(AwsLambdaIntegration)
-
-        if integration is None:
-            return handler(aws_event, aws_context, *args, **kwargs)
-
-        if isinstance(aws_event, list) and len(aws_event) >= 1:
-            request_data = aws_event[0]
-            batch_size = len(aws_event)
-        else:
-            request_data = aws_event
-            batch_size = 1
-
-        if not isinstance(request_data, dict):
-            # If we're not dealing with a dictionary, we won't be able to get
-            # headers, path, http method, etc in any case, so it's fine that
-            # this is empty
-            request_data = {}
-
-        configured_time = aws_context.get_remaining_time_in_millis()
-
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            scope.set_transaction_name(
-                aws_context.function_name, source=TransactionSource.COMPONENT
-            )
-            timeout_thread = None
-            with capture_internal_exceptions():
-                scope.clear_breadcrumbs()
-                scope.add_event_processor(
-                    _make_request_event_processor(request_data, aws_context, configured_time)
-                )
-                scope.set_tag("aws_region", aws_context.invoked_function_arn.split(":")[3])
-                if batch_size > 1:
-                    scope.set_tag("batch_request", True)
-                    scope.set_tag("batch_size", batch_size)
-
-                # Starting the Timeout thread only if the configured time is greater than Timeout warning
-                # buffer and timeout_warning parameter is set True.
-                if integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER:
-                    waiting_time = (configured_time - TIMEOUT_WARNING_BUFFER) / MILLIS_TO_SECONDS
-
-                    timeout_thread = TimeoutThread(
-                        waiting_time,
-                        configured_time / MILLIS_TO_SECONDS,
-                    )
-
-                    # Starting the thread to raise timeout warning exception
-                    timeout_thread.start()
-
-            headers = request_data.get("headers", {})
-            # Some AWS Services (ie. EventBridge) set headers as a list
-            # or None, so we must ensure it is a dict
-            if not isinstance(headers, dict):
-                headers = {}
-
-            with sentry_sdk_alpha.continue_trace(headers):
-                with sentry_sdk_alpha.start_span(
-                    op=OP.FUNCTION_AWS,
-                    name=aws_context.function_name,
-                    source=TransactionSource.COMPONENT,
-                    origin=AwsLambdaIntegration.origin,
-                    attributes=_prepopulate_attributes(request_data, aws_context),
-                ):
-                    try:
-                        return handler(aws_event, aws_context, *args, **kwargs)
-                    except Exception:
-                        exc_info = sys.exc_info()
-                        sentry_event, hint = event_from_exception(
-                            exc_info,
-                            client_options=client.options,
-                            mechanism={"type": "aws_lambda", "handled": False},
-                        )
-                        sentry_sdk_alpha.capture_event(sentry_event, hint=hint)
-                        reraise(*exc_info)
-                    finally:
-                        if timeout_thread:
-                            timeout_thread.stop()
-
-    return sentry_handler  # type: ignore
-
-
-def _drain_queue():
-    # type: () -> None
-    with capture_internal_exceptions():
-        client = sentry_sdk_alpha.get_client()
-        integration = client.get_integration(AwsLambdaIntegration)
-        if integration is not None:
-            # Flush out the event queue before AWS kills the
-            # process.
-            client.flush()
-
-
-class AwsLambdaIntegration(Integration):
-    identifier = "aws_lambda"
-    origin = f"auto.function.{identifier}"
-
-    def __init__(self, timeout_warning=False):
-        # type: (bool) -> None
-        self.timeout_warning = timeout_warning
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        lambda_bootstrap = get_lambda_bootstrap()
-        if not lambda_bootstrap:
-            logger.warning(
-                "Not running in AWS Lambda environment, "
-                "AwsLambdaIntegration disabled (could not find bootstrap module)"
-            )
-            return
-
-        if not hasattr(lambda_bootstrap, "handle_event_request"):
-            logger.warning(
-                "Not running in AWS Lambda environment, "
-                "AwsLambdaIntegration disabled (could not find handle_event_request)"
-            )
-            return
-
-        lambda_bootstrap.LambdaRuntimeClient.post_init_error = _wrap_init_error(
-            lambda_bootstrap.LambdaRuntimeClient.post_init_error
-        )
-
-        old_handle_event_request = lambda_bootstrap.handle_event_request
-
-        def sentry_handle_event_request(  # type: ignore
-            lambda_runtime_client, request_handler, *args, **kwargs
-        ):
-            request_handler = _wrap_handler(request_handler)
-            return old_handle_event_request(lambda_runtime_client, request_handler, *args, **kwargs)
-
-        lambda_bootstrap.handle_event_request = sentry_handle_event_request
-
-        # Patch the runtime client to drain the queue. This should work
-        # even when the SDK is initialized inside of the handler
-
-        def _wrap_post_function(f):
-            # type: (F) -> F
-            def inner(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                _drain_queue()
-                return f(*args, **kwargs)
-
-            return inner  # type: ignore
-
-        lambda_bootstrap.LambdaRuntimeClient.post_invocation_result = _wrap_post_function(
-            lambda_bootstrap.LambdaRuntimeClient.post_invocation_result
-        )
-        lambda_bootstrap.LambdaRuntimeClient.post_invocation_error = _wrap_post_function(
-            lambda_bootstrap.LambdaRuntimeClient.post_invocation_error
-        )
-
-
-def get_lambda_bootstrap():
-    # type: () -> Optional[Any]
-
-    # Python 3.7: If the bootstrap module is *already imported*, it is the
-    # one we actually want to use (no idea what's in __main__)
-    #
-    # Python 3.8: bootstrap is also importable, but will be the same file
-    # as __main__ imported under a different name:
-    #
-    #     sys.modules['__main__'].__file__ == sys.modules['bootstrap'].__file__
-    #     sys.modules['__main__'] is not sys.modules['bootstrap']
-    #
-    # Python 3.9: bootstrap is in __main__.awslambdaricmain
-    #
-    # On container builds using the `aws-lambda-python-runtime-interface-client`
-    # (awslamdaric) module, bootstrap is located in sys.modules['__main__'].bootstrap
-    #
-    # Such a setup would then make all monkeypatches useless.
-    if "bootstrap" in sys.modules:
-        return sys.modules["bootstrap"]
-    elif "__main__" in sys.modules:
-        module = sys.modules["__main__"]
-        # python3.9 runtime
-        if hasattr(module, "awslambdaricmain") and hasattr(module.awslambdaricmain, "bootstrap"):
-            return module.awslambdaricmain.bootstrap
-        elif hasattr(module, "bootstrap"):
-            # awslambdaric python module in container builds
-            return module.bootstrap
-
-        # python3.8 runtime
-        return module
-    else:
-        return None
-
-
-def _make_request_event_processor(aws_event, aws_context, configured_timeout):
-    # type: (Any, Any, Any) -> EventProcessor
-    start_time = datetime.now(timezone.utc)
-
-    def event_processor(sentry_event, hint, start_time=start_time):
-        # type: (Event, Hint, datetime) -> Optional[Event]
-        remaining_time_in_milis = aws_context.get_remaining_time_in_millis()
-        exec_duration = configured_timeout - remaining_time_in_milis
-
-        extra = sentry_event.setdefault("extra", {})
-        extra["lambda"] = {
-            "function_name": aws_context.function_name,
-            "function_version": aws_context.function_version,
-            "invoked_function_arn": aws_context.invoked_function_arn,
-            "aws_request_id": aws_context.aws_request_id,
-            "execution_duration_in_millis": exec_duration,
-            "remaining_time_in_millis": remaining_time_in_milis,
-        }
-
-        extra["cloudwatch logs"] = {
-            "url": _get_cloudwatch_logs_url(aws_context, start_time),
-            "log_group": aws_context.log_group_name,
-            "log_stream": aws_context.log_stream_name,
-        }
-
-        request = sentry_event.get("request", {})
-
-        if "httpMethod" in aws_event:
-            request["method"] = aws_event["httpMethod"]
-
-        request["url"] = _get_url(aws_event, aws_context)
-
-        if "queryStringParameters" in aws_event:
-            request["query_string"] = urlencode(aws_event["queryStringParameters"])
-
-        if "headers" in aws_event:
-            request["headers"] = _filter_headers(aws_event["headers"])
-
-        if should_send_default_pii():
-            user_info = sentry_event.setdefault("user", {})
-
-            identity = aws_event.get("identity")
-            if identity is None:
-                identity = {}
-
-            id = identity.get("userArn")
-            if id is not None:
-                user_info.setdefault("id", id)
-
-            ip = identity.get("sourceIp")
-            if ip is not None:
-                user_info.setdefault("ip_address", ip)
-
-            if "body" in aws_event:
-                request["data"] = aws_event.get("body", "")
-        else:
-            if aws_event.get("body", None):
-                # Unfortunately couldn't find a way to get structured body from AWS
-                # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue.removed_because_raw_data()
-
-        sentry_event["request"] = deepcopy(request)
-
-        return sentry_event
-
-    return event_processor
-
-
-def _get_url(aws_event, aws_context):
-    # type: (Any, Any) -> str
-    path = aws_event.get("path", None)
-
-    headers = aws_event.get("headers")
-    # Some AWS Services (ie. EventBridge) set headers as a list
-    # or None, so we must ensure it is a dict
-    if not isinstance(headers, dict):
-        headers = {}
-
-    host = headers.get("Host", None)
-    proto = headers.get("X-Forwarded-Proto", None)
-    if proto and host and path:
-        return f"{proto}://{host}{path}"
-    return f"awslambda:///{aws_context.function_name}"
-
-
-def _get_cloudwatch_logs_url(aws_context, start_time):
-    # type: (Any, datetime) -> str
-    """
-    Generates a CloudWatchLogs console URL based on the context object
-
-    Arguments:
-        aws_context {Any} -- context from lambda handler
-
-    Returns:
-        str -- AWS Console URL to logs.
-    """
-    formatstring = "%Y-%m-%dT%H:%M:%SZ"
-    region = environ.get("AWS_REGION", "")
-
-    url = (
-        "https://console.{domain}/cloudwatch/home?region={region}"
-        "#logEventViewer:group={log_group};stream={log_stream}"
-        ";start={start_time};end={end_time}"
-    ).format(
-        domain="amazonaws.cn" if region.startswith("cn-") else "aws.amazon.com",
-        region=region,
-        log_group=aws_context.log_group_name,
-        log_stream=aws_context.log_stream_name,
-        start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
-        end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(formatstring),
-    )
-
-    return url
-
-
-def _parse_formatted_traceback(formatted_tb):
-    # type: (list[str]) -> list[dict[str, Any]]
-    frames = []
-    for frame in formatted_tb:
-        match = re.match(r'File "(.+)", line (\d+), in (.+)', frame.strip())
-        if match:
-            file_name, line_number, func_name = match.groups()
-            line_number = int(line_number)
-            frames.append(
-                {
-                    "filename": file_name,
-                    "function": func_name,
-                    "lineno": line_number,
-                    "vars": None,
-                    "pre_context": None,
-                    "context_line": None,
-                    "post_context": None,
-                }
-            )
-    return frames
-
-
-def _event_from_error_json(error_json):
-    # type: (dict[str, Any]) -> Event
-    """
-    Converts the error JSON from AWS Lambda into a Sentry error event.
-    This is not a full fletched event, but better than nothing.
-
-    This is an example of where AWS creates the error JSON:
-    https://github.com/aws/aws-lambda-python-runtime-interface-client/blob/2.2.1/awslambdaric/bootstrap.py#L479
-    """
-    event = {
-        "level": "error",
-        "exception": {
-            "values": [
-                {
-                    "type": error_json.get("errorType"),
-                    "value": error_json.get("errorMessage"),
-                    "stacktrace": {
-                        "frames": _parse_formatted_traceback(error_json.get("stackTrace", [])),
-                    },
-                    "mechanism": {
-                        "type": "aws_lambda",
-                        "handled": False,
-                    },
-                }
-            ],
-        },
-    }  # type: Event
-
-    return event
-
-
-def _prepopulate_attributes(aws_event, aws_context):
-    # type: (Any, Any) -> dict[str, Any]
-    attributes = {
-        "cloud.provider": "aws",
-    }
-
-    for prop, attr in EVENT_TO_ATTRIBUTES.items():
-        if aws_event.get(prop) is not None:
-            if prop == "queryStringParameters":
-                attributes[attr] = urlencode(aws_event[prop])
-            else:
-                attributes[attr] = aws_event[prop]
-
-    for prop, attr in CONTEXT_TO_ATTRIBUTES.items():
-        if getattr(aws_context, prop, None) is not None:
-            attributes[attr] = getattr(aws_context, prop)
-
-    url = _get_url(aws_event, aws_context)
-    if url:
-        if aws_event.get("queryStringParameters"):
-            url += f"?{urlencode(aws_event['queryStringParameters'])}"
-        attributes["url.full"] = url
-
-    headers = {}
-    if aws_event.get("headers") and isinstance(aws_event["headers"], dict):
-        headers = aws_event["headers"]
-
-    if headers.get("X-Forwarded-Proto"):
-        attributes["network.protocol.name"] = headers["X-Forwarded-Proto"]
-    if headers.get("Host"):
-        attributes["server.address"] = headers["Host"]
-
-    attributes.update(_request_headers_to_span_attributes(headers))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/beam.py b/src/sentry_sdk_alpha/integrations/beam.py
deleted file mode 100644
index 63929578c99947..00000000000000
--- a/src/sentry_sdk_alpha/integrations/beam.py
+++ /dev/null
@@ -1,173 +0,0 @@
-import sys
-import types
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterator
-    from typing import Any, TypeVar
-
-    from sentry_sdk_alpha._types import ExcInfo
-
-    T = TypeVar("T")
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-WRAPPED_FUNC = "_wrapped_{}_"
-INSPECT_FUNC = "_inspect_{}"  # Required format per apache_beam/transforms/core.py
-USED_FUNC = "_sentry_used_"
-
-
-class BeamIntegration(Integration):
-    identifier = "beam"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        from apache_beam.transforms.core import DoFn, ParDo  # type: ignore
-
-        ignore_logger("root")
-        ignore_logger("bundle_processor.create")
-
-        function_patches = ["process", "start_bundle", "finish_bundle", "setup"]
-        for func_name in function_patches:
-            setattr(
-                DoFn,
-                INSPECT_FUNC.format(func_name),
-                _wrap_inspect_call(DoFn, func_name),
-            )
-
-        old_init = ParDo.__init__
-
-        def sentry_init_pardo(self, fn, *args, **kwargs):
-            # type: (ParDo, Any, *Any, **Any) -> Any
-            # Do not monkey patch init twice
-            if not getattr(self, "_sentry_is_patched", False):
-                for func_name in function_patches:
-                    if not hasattr(fn, func_name):
-                        continue
-                    wrapped_func = WRAPPED_FUNC.format(func_name)
-
-                    # Check to see if inspect is set and process is not
-                    # to avoid monkey patching process twice.
-                    # Check to see if function is part of object for
-                    # backwards compatibility.
-                    process_func = getattr(fn, func_name)
-                    inspect_func = getattr(fn, INSPECT_FUNC.format(func_name))
-                    if not getattr(inspect_func, USED_FUNC, False) and not getattr(
-                        process_func, USED_FUNC, False
-                    ):
-                        setattr(fn, wrapped_func, process_func)
-                        setattr(fn, func_name, _wrap_task_call(process_func))
-
-                self._sentry_is_patched = True
-            old_init(self, fn, *args, **kwargs)
-
-        ParDo.__init__ = sentry_init_pardo
-
-
-def _wrap_inspect_call(cls, func_name):
-    # type: (Any, Any) -> Any
-
-    if not hasattr(cls, func_name):
-        return None
-
-    def _inspect(self):
-        # type: (Any) -> Any
-        """
-        Inspect function overrides the way Beam gets argspec.
-        """
-        wrapped_func = WRAPPED_FUNC.format(func_name)
-        if hasattr(self, wrapped_func):
-            process_func = getattr(self, wrapped_func)
-        else:
-            process_func = getattr(self, func_name)
-            setattr(self, func_name, _wrap_task_call(process_func))
-            setattr(self, wrapped_func, process_func)
-
-        # getfullargspec is deprecated in more recent beam versions and get_function_args_defaults
-        # (which uses Signatures internally) should be used instead.
-        try:
-            from apache_beam.transforms.core import get_function_args_defaults
-
-            return get_function_args_defaults(process_func)
-        except ImportError:
-            from apache_beam.typehints.decorators import getfullargspec  # type: ignore
-
-            return getfullargspec(process_func)
-
-    setattr(_inspect, USED_FUNC, True)
-    return _inspect
-
-
-def _wrap_task_call(func):
-    # type: (F) -> F
-    """
-    Wrap task call with a try catch to get exceptions.
-    """
-
-    @wraps(func)
-    def _inner(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        try:
-            gen = func(*args, **kwargs)
-        except Exception:
-            raise_exception()
-
-        if not isinstance(gen, types.GeneratorType):
-            return gen
-        return _wrap_generator_call(gen)
-
-    setattr(_inner, USED_FUNC, True)
-    return _inner  # type: ignore
-
-
-@ensure_integration_enabled(BeamIntegration)
-def _capture_exception(exc_info):
-    # type: (ExcInfo) -> None
-    """
-    Send Beam exception to Sentry.
-    """
-    client = sentry_sdk_alpha.get_client()
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=client.options,
-        mechanism={"type": "beam", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def raise_exception():
-    # type: () -> None
-    """
-    Raise an exception.
-    """
-    exc_info = sys.exc_info()
-    with capture_internal_exceptions():
-        _capture_exception(exc_info)
-    reraise(*exc_info)
-
-
-def _wrap_generator_call(gen):
-    # type: (Iterator[T]) -> Iterator[T]
-    """
-    Wrap the generator to handle any failures.
-    """
-    while True:
-        try:
-            yield next(gen)
-        except StopIteration:
-            break
-        except Exception:
-            raise_exception()
diff --git a/src/sentry_sdk_alpha/integrations/boto3.py b/src/sentry_sdk_alpha/integrations/boto3.py
deleted file mode 100644
index 60d87d32c27e28..00000000000000
--- a/src/sentry_sdk_alpha/integrations/boto3.py
+++ /dev/null
@@ -1,162 +0,0 @@
-from functools import partial
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    parse_url,
-    parse_version,
-)
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Optional, Type
-
-    from sentry_sdk_alpha.tracing import Span
-
-try:
-    from botocore import __version__ as BOTOCORE_VERSION  # type: ignore
-    from botocore.awsrequest import AWSRequest  # type: ignore
-    from botocore.client import BaseClient  # type: ignore
-    from botocore.response import StreamingBody  # type: ignore
-except ImportError:
-    raise DidNotEnable("botocore is not installed")
-
-
-class Boto3Integration(Integration):
-    identifier = "boto3"
-    origin = f"auto.http.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(BOTOCORE_VERSION)
-        _check_minimum_version(Boto3Integration, version, "botocore")
-
-        orig_init = BaseClient.__init__
-
-        def sentry_patched_init(self, *args, **kwargs):
-            # type: (Type[BaseClient], *Any, **Any) -> None
-            orig_init(self, *args, **kwargs)
-            meta = self.meta
-            service_id = meta.service_model.service_id.hyphenize()
-            meta.events.register(
-                "request-created",
-                partial(_sentry_request_created, service_id=service_id),
-            )
-            meta.events.register("after-call", _sentry_after_call)
-            meta.events.register("after-call-error", _sentry_after_call_error)
-
-        BaseClient.__init__ = sentry_patched_init
-
-
-@ensure_integration_enabled(Boto3Integration)
-def _sentry_request_created(service_id, request, operation_name, **kwargs):
-    # type: (str, AWSRequest, str, **Any) -> None
-    description = f"aws.{service_id}.{operation_name}"
-    span = sentry_sdk_alpha.start_span(
-        op=OP.HTTP_CLIENT,
-        name=description,
-        origin=Boto3Integration.origin,
-        only_if_parent=True,
-    )
-
-    data = {
-        SPANDATA.HTTP_METHOD: request.method,
-    }
-    with capture_internal_exceptions():
-        parsed_url = parse_url(request.url, sanitize=False)
-        data["aws.request.url"] = parsed_url.url
-        data[SPANDATA.HTTP_QUERY] = parsed_url.query
-        data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment
-
-    for key, value in data.items():
-        span.set_attribute(key, value)
-
-    span.set_tag("aws.service_id", service_id)
-    span.set_tag("aws.operation_name", operation_name)
-
-    # We do it in order for subsequent http calls/retries be
-    # attached to this span.
-    span.__enter__()
-
-    # request.context is an open-ended data-structure
-    # where we can add anything useful in request life cycle.
-    request.context["_sentrysdk_span"] = span
-    request.context["_sentrysdk_span_data"] = data
-
-
-def _sentry_after_call(context, parsed, **kwargs):
-    # type: (Dict[str, Any], Dict[str, Any], **Any) -> None
-    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
-
-    # Span could be absent if the integration is disabled.
-    if span is None:
-        return
-
-    span_data = context.pop("_sentrysdk_span_data", {})
-
-    sentry_sdk_alpha.add_breadcrumb(
-        type="http",
-        category="httplib",
-        data=span_data,
-    )
-
-    body = parsed.get("Body")
-    if not isinstance(body, StreamingBody):
-        span.__exit__(None, None, None)
-        return
-
-    streaming_span = sentry_sdk_alpha.start_span(
-        op=OP.HTTP_CLIENT_STREAM,
-        name=span.name,
-        origin=Boto3Integration.origin,
-        only_if_parent=True,
-    )
-
-    orig_read = body.read
-
-    def sentry_streaming_body_read(*args, **kwargs):
-        # type: (*Any, **Any) -> bytes
-        try:
-            ret = orig_read(*args, **kwargs)
-            if not ret:
-                streaming_span.finish()
-            return ret
-        except Exception:
-            streaming_span.finish()
-            raise
-
-    body.read = sentry_streaming_body_read
-
-    orig_close = body.close
-
-    def sentry_streaming_body_close(*args, **kwargs):
-        # type: (*Any, **Any) -> None
-        streaming_span.finish()
-        orig_close(*args, **kwargs)
-
-    body.close = sentry_streaming_body_close
-
-    span.__exit__(None, None, None)
-
-
-def _sentry_after_call_error(context, exception, **kwargs):
-    # type: (Dict[str, Any], Type[BaseException], **Any) -> None
-    span = context.pop("_sentrysdk_span", None)  # type: Optional[Span]
-
-    # Span could be absent if the integration is disabled.
-    if span is None:
-        return
-
-    span_data = context.pop("_sentrysdk_span_data", {})
-
-    sentry_sdk_alpha.add_breadcrumb(
-        type="http",
-        category="httplib",
-        data=span_data,
-    )
-
-    span.__exit__(type(exception), exception, None)
diff --git a/src/sentry_sdk_alpha/integrations/bottle.py b/src/sentry_sdk_alpha/integrations/bottle.py
deleted file mode 100644
index 1a949833d96c83..00000000000000
--- a/src/sentry_sdk_alpha/integrations/bottle.py
+++ /dev/null
@@ -1,209 +0,0 @@
-import functools
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import (
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    DidNotEnable,
-    Integration,
-    _check_minimum_version,
-)
-from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    parse_version,
-    transaction_from_function,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Set
-    from typing import Any, Dict, Optional
-
-    from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-
-try:
-    from bottle import Bottle, HTTPResponse, Route
-    from bottle import __version__ as BOTTLE_VERSION
-    from bottle import request as bottle_request
-except ImportError:
-    raise DidNotEnable("Bottle not installed")
-
-
-TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-
-
-class BottleIntegration(Integration):
-    identifier = "bottle"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(
-        self,
-        transaction_style="endpoint",  # type: str
-        *,
-        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
-    ):
-        # type: (...) -> None
-
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-        self.failed_request_status_codes = failed_request_status_codes
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(BOTTLE_VERSION)
-        _check_minimum_version(BottleIntegration, version)
-
-        old_app = Bottle.__call__
-
-        @ensure_integration_enabled(BottleIntegration, old_app)
-        def sentry_patched_wsgi_app(self, environ, start_response):
-            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-            middleware = SentryWsgiMiddleware(
-                lambda *a, **kw: old_app(self, *a, **kw),
-                span_origin=BottleIntegration.origin,
-            )
-
-            return middleware(environ, start_response)
-
-        Bottle.__call__ = sentry_patched_wsgi_app
-
-        old_handle = Bottle._handle
-
-        @functools.wraps(old_handle)
-        def _patched_handle(self, environ):
-            # type: (Bottle, Dict[str, Any]) -> Any
-            integration = sentry_sdk_alpha.get_client().get_integration(BottleIntegration)
-            if integration is None:
-                return old_handle(self, environ)
-
-            scope = sentry_sdk_alpha.get_isolation_scope()
-            scope._name = "bottle"
-            scope.add_event_processor(
-                _make_request_event_processor(self, bottle_request, integration)
-            )
-            res = old_handle(self, environ)
-
-            return res
-
-        Bottle._handle = _patched_handle
-
-        old_make_callback = Route._make_callback
-
-        @functools.wraps(old_make_callback)
-        def patched_make_callback(self, *args, **kwargs):
-            # type: (Route, *object, **object) -> Any
-            prepared_callback = old_make_callback(self, *args, **kwargs)
-
-            integration = sentry_sdk_alpha.get_client().get_integration(BottleIntegration)
-            if integration is None:
-                return prepared_callback
-
-            def wrapped_callback(*args, **kwargs):
-                # type: (*object, **object) -> Any
-                try:
-                    res = prepared_callback(*args, **kwargs)
-                except Exception as exception:
-                    _capture_exception(exception, handled=False)
-                    raise exception
-
-                if (
-                    isinstance(res, HTTPResponse)
-                    and res.status_code in integration.failed_request_status_codes
-                ):
-                    _capture_exception(res, handled=True)
-
-                return res
-
-            return wrapped_callback
-
-        Route._make_callback = patched_make_callback
-
-
-class BottleRequestExtractor(RequestExtractor):
-    def env(self):
-        # type: () -> Dict[str, str]
-        return self.request.environ
-
-    def cookies(self):
-        # type: () -> Dict[str, str]
-        return self.request.cookies
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.body.read()
-
-    def form(self):
-        # type: () -> FormsDict
-        if self.is_json():
-            return None
-        return self.request.forms.decode()
-
-    def files(self):
-        # type: () -> Optional[Dict[str, str]]
-        if self.is_json():
-            return None
-
-        return self.request.files
-
-    def size_of_file(self, file):
-        # type: (FileUpload) -> int
-        return file.content_length
-
-
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, Any) -> None
-    name = ""
-
-    if transaction_style == "url":
-        try:
-            name = request.route.rule or ""
-        except RuntimeError:
-            pass
-
-    elif transaction_style == "endpoint":
-        try:
-            name = request.route.name or transaction_from_function(request.route.callback) or ""
-        except RuntimeError:
-            pass
-
-    event["transaction"] = name
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
-
-
-def _make_request_event_processor(app, request, integration):
-    # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
-
-    def event_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        _set_transaction_name_and_source(event, integration.transaction_style, request)
-
-        with capture_internal_exceptions():
-            BottleRequestExtractor(request).extract_into_event(event)
-
-        return event
-
-    return event_processor
-
-
-def _capture_exception(exception, handled):
-    # type: (BaseException, bool) -> None
-    event, hint = event_from_exception(
-        exception,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "bottle", "handled": handled},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
diff --git a/src/sentry_sdk_alpha/integrations/celery/__init__.py b/src/sentry_sdk_alpha/integrations/celery/__init__.py
deleted file mode 100644
index 07c2faaeb298f5..00000000000000
--- a/src/sentry_sdk_alpha/integrations/celery/__init__.py
+++ /dev/null
@@ -1,516 +0,0 @@
-import sys
-from collections.abc import Mapping
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import isolation_scope
-from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, OP, SPANDATA, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations.celery.beat import (
-    _patch_beat_apply_entry,
-    _patch_redbeat_maybe_due,
-    _setup_celery_beat_signals,
-)
-from sentry_sdk_alpha.integrations.celery.utils import _now_seconds_since_epoch
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.tracing_utils import Baggage
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, List, Optional, TypeVar, Union
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
-    from sentry_sdk_alpha.tracing import Span
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-try:
-    from celery import VERSION as CELERY_VERSION  # type: ignore
-    from celery.app.task import Task  # type: ignore
-    from celery.app.trace import task_has_custom
-    from celery.exceptions import Ignore, Reject, Retry, SoftTimeLimitExceeded  # type: ignore
-    from kombu import Producer  # type: ignore
-except ImportError:
-    raise DidNotEnable("Celery not installed")
-
-
-CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
-
-
-class CeleryIntegration(Integration):
-    identifier = "celery"
-    origin = f"auto.queue.{identifier}"
-
-    def __init__(
-        self,
-        propagate_traces=True,
-        monitor_beat_tasks=False,
-        exclude_beat_tasks=None,
-    ):
-        # type: (bool, bool, Optional[List[str]]) -> None
-        self.propagate_traces = propagate_traces
-        self.monitor_beat_tasks = monitor_beat_tasks
-        self.exclude_beat_tasks = exclude_beat_tasks
-
-        _patch_beat_apply_entry()
-        _patch_redbeat_maybe_due()
-        _setup_celery_beat_signals(monitor_beat_tasks)
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _check_minimum_version(CeleryIntegration, CELERY_VERSION)
-
-        _patch_build_tracer()
-        _patch_task_apply_async()
-        _patch_celery_send_task()
-        _patch_worker_exit()
-        _patch_producer_publish()
-
-        # This logger logs every status of every task that ran on the worker.
-        # Meaning that every task's breadcrumbs are full of stuff like "Task
-        #  raised unexpected ".
-        ignore_logger("celery.worker.job")
-        ignore_logger("celery.app.trace")
-
-        # This is stdout/err redirected to a logger, can't deal with this
-        # (need event_level=logging.WARN to reproduce)
-        ignore_logger("celery.redirected")
-
-
-def _set_status(status):
-    # type: (str) -> None
-    with capture_internal_exceptions():
-        scope = sentry_sdk_alpha.get_current_scope()
-        if scope.span is not None:
-            scope.span.set_status(status)
-
-
-def _capture_exception(task, exc_info):
-    # type: (Any, ExcInfo) -> None
-    client = sentry_sdk_alpha.get_client()
-    if client.get_integration(CeleryIntegration) is None:
-        return
-
-    if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
-        _set_status("aborted")
-        return
-
-    _set_status("internal_error")
-
-    if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
-        return
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=client.options,
-        mechanism={"type": "celery", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _make_event_processor(task, uuid, args, kwargs, request=None):
-    # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-
-        with capture_internal_exceptions():
-            tags = event.setdefault("tags", {})
-            tags["celery_task_id"] = uuid
-            extra = event.setdefault("extra", {})
-            extra["celery-job"] = {
-                "task_name": task.name,
-                "args": args,
-                "kwargs": kwargs,
-            }
-
-        if "exc_info" in hint:
-            with capture_internal_exceptions():
-                if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
-                    event["fingerprint"] = [
-                        "celery",
-                        "SoftTimeLimitExceeded",
-                        getattr(task, "name", task),
-                    ]
-
-        return event
-
-    return event_processor
-
-
-def _update_celery_task_headers(original_headers, span, monitor_beat_tasks):
-    # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any]
-    """
-    Updates the headers of the Celery task with the tracing information
-    and eventually Sentry Crons monitoring information for beat tasks.
-    """
-    updated_headers = original_headers.copy()
-    with capture_internal_exceptions():
-        # if span is None (when the task was started by Celery Beat)
-        # this will return the trace headers from the scope.
-        headers = dict(
-            sentry_sdk_alpha.get_isolation_scope().iter_trace_propagation_headers(span=span)
-        )
-
-        if monitor_beat_tasks:
-            headers.update(
-                {
-                    "sentry-monitor-start-timestamp-s": "%.9f" % _now_seconds_since_epoch(),
-                }
-            )
-
-        # Add the time the task was enqueued to the headers
-        # This is used in the consumer to calculate the latency
-        updated_headers.update({"sentry-task-enqueued-time": _now_seconds_since_epoch()})
-
-        if headers:
-            existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME)
-            sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
-
-            combined_baggage = sentry_baggage or existing_baggage
-            if sentry_baggage and existing_baggage:
-                # Merge incoming and sentry baggage, where the sentry trace information
-                # in the incoming baggage takes precedence and the third-party items
-                # are concatenated.
-                incoming = Baggage.from_incoming_header(existing_baggage)
-                combined = Baggage.from_incoming_header(sentry_baggage)
-                combined.sentry_items.update(incoming.sentry_items)
-                combined.third_party_items = ",".join(
-                    [
-                        x
-                        for x in [
-                            combined.third_party_items,
-                            incoming.third_party_items,
-                        ]
-                        if x is not None and x != ""
-                    ]
-                )
-                combined_baggage = combined.serialize(include_third_party=True)
-
-            updated_headers.update(headers)
-            if combined_baggage:
-                updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage
-
-            # https://github.com/celery/celery/issues/4875
-            #
-            # Need to setdefault the inner headers too since other
-            # tracing tools (dd-trace-py) also employ this exact
-            # workaround and we don't want to break them.
-            updated_headers.setdefault("headers", {}).update(headers)
-            if combined_baggage:
-                updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
-
-            # Add the Sentry options potentially added in `sentry_apply_entry`
-            # to the headers (done when auto-instrumenting Celery Beat tasks)
-            for key, value in updated_headers.items():
-                if key.startswith("sentry-"):
-                    updated_headers["headers"][key] = value
-
-    return updated_headers
-
-
-class NoOpMgr:
-    def __enter__(self):
-        # type: () -> None
-        return None
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        # type: (Any, Any, Any) -> None
-        return None
-
-
-def _wrap_task_run(f):
-    # type: (F) -> F
-    @wraps(f)
-    def apply_async(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        # Note: kwargs can contain headers=None, so no setdefault!
-        # Unsure which backend though.
-        integration = sentry_sdk_alpha.get_client().get_integration(CeleryIntegration)
-        if integration is None:
-            return f(*args, **kwargs)
-
-        kwarg_headers = kwargs.get("headers") or {}
-        propagate_traces = kwarg_headers.pop(
-            "sentry-propagate-traces", integration.propagate_traces
-        )
-
-        if not propagate_traces:
-            return f(*args, **kwargs)
-
-        if isinstance(args[0], Task):
-            task_name = args[0].name  # type: str
-        elif len(args) > 1 and isinstance(args[1], str):
-            task_name = args[1]
-        else:
-            task_name = ""
-
-        task_started_from_beat = sentry_sdk_alpha.get_isolation_scope()._name == "celery-beat"
-
-        span_mgr = (
-            sentry_sdk_alpha.start_span(
-                op=OP.QUEUE_SUBMIT_CELERY,
-                name=task_name,
-                origin=CeleryIntegration.origin,
-                only_if_parent=True,
-            )
-            if not task_started_from_beat
-            else NoOpMgr()
-        )  # type: Union[Span, NoOpMgr]
-
-        with span_mgr as span:
-            kwargs["headers"] = _update_celery_task_headers(
-                kwarg_headers, span, integration.monitor_beat_tasks
-            )
-            return f(*args, **kwargs)
-
-    return apply_async  # type: ignore
-
-
-def _wrap_tracer(task, f):
-    # type: (Any, F) -> F
-
-    # Need to wrap tracer for pushing the scope before prerun is sent, and
-    # popping it after postrun is sent.
-    #
-    # This is the reason we don't use signals for hooking in the first place.
-    # Also because in Celery 3, signal dispatch returns early if one handler
-    # crashes.
-    @wraps(f)
-    @ensure_integration_enabled(CeleryIntegration, f)
-    def _inner(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        with isolation_scope() as scope:
-            scope._name = "celery"
-            scope.clear_breadcrumbs()
-            scope.set_transaction_name(task.name, source=TransactionSource.TASK)
-            scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
-
-            # Celery task objects are not a thing to be trusted. Even
-            # something such as attribute access can fail.
-            headers = args[3].get("headers") or {}
-
-            with sentry_sdk_alpha.continue_trace(headers):
-                with sentry_sdk_alpha.start_span(
-                    op=OP.QUEUE_TASK_CELERY,
-                    name=task.name,
-                    source=TransactionSource.TASK,
-                    origin=CeleryIntegration.origin,
-                    # for some reason, args[1] is a list if non-empty but a
-                    # tuple if empty
-                    attributes=_prepopulate_attributes(task, list(args[1]), args[2]),
-                ) as root_span:
-                    return_value = f(*args, **kwargs)
-
-                    if root_span.status is None:
-                        root_span.set_status(SPANSTATUS.OK)
-
-                    return return_value
-
-    return _inner  # type: ignore
-
-
-def _set_messaging_destination_name(task, span):
-    # type: (Any, Span) -> None
-    """Set "messaging.destination.name" tag for span"""
-    with capture_internal_exceptions():
-        delivery_info = task.request.delivery_info
-        if delivery_info:
-            routing_key = delivery_info.get("routing_key")
-            if delivery_info.get("exchange") == "" and routing_key is not None:
-                # Empty exchange indicates the default exchange, meaning the tasks
-                # are sent to the queue with the same name as the routing key.
-                span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
-
-
-def _wrap_task_call(task, f):
-    # type: (Any, F) -> F
-
-    # Need to wrap task call because the exception is caught before we get to
-    # see it. Also celery's reported stacktrace is untrustworthy.
-
-    # functools.wraps is important here because celery-once looks at this
-    # method's name. @ensure_integration_enabled internally calls functools.wraps,
-    # but if we ever remove the @ensure_integration_enabled decorator, we need
-    # to add @functools.wraps(f) here.
-    # https://github.com/getsentry/sentry-python/issues/421
-    @ensure_integration_enabled(CeleryIntegration, f)
-    def _inner(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        try:
-            with sentry_sdk_alpha.start_span(
-                op=OP.QUEUE_PROCESS,
-                name=task.name,
-                origin=CeleryIntegration.origin,
-                only_if_parent=True,
-            ) as span:
-                _set_messaging_destination_name(task, span)
-
-                latency = None
-                with capture_internal_exceptions():
-                    if (
-                        task.request.headers is not None
-                        and "sentry-task-enqueued-time" in task.request.headers
-                    ):
-                        latency = _now_seconds_since_epoch() - task.request.headers.pop(
-                            "sentry-task-enqueued-time"
-                        )
-
-                if latency is not None:
-                    span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency)
-
-                with capture_internal_exceptions():
-                    span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task.request.id)
-
-                with capture_internal_exceptions():
-                    span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, task.request.retries)
-
-                with capture_internal_exceptions():
-                    span.set_attribute(
-                        SPANDATA.MESSAGING_SYSTEM,
-                        task.app.connection().transport.driver_type,
-                    )
-
-                return f(*args, **kwargs)
-
-        except Exception:
-            exc_info = sys.exc_info()
-            with capture_internal_exceptions():
-                _capture_exception(task, exc_info)
-            reraise(*exc_info)
-
-    return _inner  # type: ignore
-
-
-def _patch_build_tracer():
-    # type: () -> None
-    import celery.app.trace as trace  # type: ignore
-
-    original_build_tracer = trace.build_tracer
-
-    def sentry_build_tracer(name, task, *args, **kwargs):
-        # type: (Any, Any, *Any, **Any) -> Any
-        if not getattr(task, "_sentry_is_patched", False):
-            # determine whether Celery will use __call__ or run and patch
-            # accordingly
-            if task_has_custom(task, "__call__"):
-                type(task).__call__ = _wrap_task_call(task, type(task).__call__)
-            else:
-                task.run = _wrap_task_call(task, task.run)
-
-            # `build_tracer` is apparently called for every task
-            # invocation. Can't wrap every celery task for every invocation
-            # or we will get infinitely nested wrapper functions.
-            task._sentry_is_patched = True
-
-        return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs))
-
-    trace.build_tracer = sentry_build_tracer
-
-
-def _patch_task_apply_async():
-    # type: () -> None
-    Task.apply_async = _wrap_task_run(Task.apply_async)
-
-
-def _patch_celery_send_task():
-    # type: () -> None
-    from celery import Celery
-
-    Celery.send_task = _wrap_task_run(Celery.send_task)
-
-
-def _patch_worker_exit():
-    # type: () -> None
-
-    # Need to flush queue before worker shutdown because a crashing worker will
-    # call os._exit
-    from billiard.pool import Worker  # type: ignore
-
-    original_workloop = Worker.workloop
-
-    def sentry_workloop(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        try:
-            return original_workloop(*args, **kwargs)
-        finally:
-            with capture_internal_exceptions():
-                if sentry_sdk_alpha.get_client().get_integration(CeleryIntegration) is not None:
-                    sentry_sdk_alpha.flush()
-
-    Worker.workloop = sentry_workloop
-
-
-def _patch_producer_publish():
-    # type: () -> None
-    original_publish = Producer.publish
-
-    @ensure_integration_enabled(CeleryIntegration, original_publish)
-    def sentry_publish(self, *args, **kwargs):
-        # type: (Producer, *Any, **Any) -> Any
-        kwargs_headers = kwargs.get("headers", {})
-        if not isinstance(kwargs_headers, Mapping):
-            # Ensure kwargs_headers is a Mapping, so we can safely call get().
-            # We don't expect this to happen, but it's better to be safe. Even
-            # if it does happen, only our instrumentation breaks. This line
-            # does not overwrite kwargs["headers"], so the original publish
-            # method will still work.
-            kwargs_headers = {}
-
-        task_name = kwargs_headers.get("task")
-        task_id = kwargs_headers.get("id")
-        retries = kwargs_headers.get("retries")
-
-        routing_key = kwargs.get("routing_key")
-        exchange = kwargs.get("exchange")
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.QUEUE_PUBLISH,
-            name=task_name,
-            origin=CeleryIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            if task_id is not None:
-                span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, task_id)
-
-            if exchange == "" and routing_key is not None:
-                # Empty exchange indicates the default exchange, meaning messages are
-                # routed to the queue with the same name as the routing key.
-                span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, routing_key)
-
-            if retries is not None:
-                span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, retries)
-
-            with capture_internal_exceptions():
-                span.set_attribute(SPANDATA.MESSAGING_SYSTEM, self.connection.transport.driver_type)
-
-            return original_publish(self, *args, **kwargs)
-
-    Producer.publish = sentry_publish
-
-
-def _prepopulate_attributes(task, args, kwargs):
-    # type: (Any, *Any, **Any) -> dict[str, str]
-    attributes = {
-        "celery.job.task": task.name,
-    }
-
-    for i, arg in enumerate(args):
-        with capture_internal_exceptions():
-            attributes[f"celery.job.args.{i}"] = str(arg)
-
-    for kwarg, value in kwargs.items():
-        with capture_internal_exceptions():
-            attributes[f"celery.job.kwargs.{kwarg}"] = str(value)
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/celery/beat.py b/src/sentry_sdk_alpha/integrations/celery/beat.py
deleted file mode 100644
index cd95195ef9cf04..00000000000000
--- a/src/sentry_sdk_alpha/integrations/celery/beat.py
+++ /dev/null
@@ -1,277 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.crons import MonitorStatus, capture_checkin
-from sentry_sdk_alpha.integrations import DidNotEnable
-from sentry_sdk_alpha.integrations.celery.utils import (
-    _get_humanized_interval,
-    _now_seconds_since_epoch,
-)
-from sentry_sdk_alpha.utils import logger, match_regex_list
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar, Union
-
-    from sentry_sdk_alpha._types import (
-        MonitorConfig,
-        MonitorConfigScheduleType,
-        MonitorConfigScheduleUnit,
-    )
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-try:
-    from celery import Celery, Task  # type: ignore
-    from celery.beat import Scheduler  # type: ignore
-    from celery.schedules import crontab, schedule  # type: ignore
-    from celery.signals import task_failure, task_retry, task_success  # type: ignore
-except ImportError:
-    raise DidNotEnable("Celery not installed")
-
-try:
-    from redbeat.schedulers import RedBeatScheduler  # type: ignore
-except ImportError:
-    RedBeatScheduler = None
-
-
-def _get_headers(task):
-    # type: (Task) -> dict[str, Any]
-    headers = task.request.get("headers") or {}
-
-    # flatten nested headers
-    if "headers" in headers:
-        headers.update(headers["headers"])
-        del headers["headers"]
-
-    headers.update(task.request.get("properties") or {})
-
-    return headers
-
-
-def _get_monitor_config(celery_schedule, app, monitor_name):
-    # type: (Any, Celery, str) -> MonitorConfig
-    monitor_config = {}  # type: MonitorConfig
-    schedule_type = None  # type: Optional[MonitorConfigScheduleType]
-    schedule_value = None  # type: Optional[Union[str, int]]
-    schedule_unit = None  # type: Optional[MonitorConfigScheduleUnit]
-
-    if isinstance(celery_schedule, crontab):
-        schedule_type = "crontab"
-        schedule_value = (
-            "{0._orig_minute} "
-            "{0._orig_hour} "
-            "{0._orig_day_of_month} "
-            "{0._orig_month_of_year} "
-            "{0._orig_day_of_week}".format(celery_schedule)
-        )
-    elif isinstance(celery_schedule, schedule):
-        schedule_type = "interval"
-        (schedule_value, schedule_unit) = _get_humanized_interval(celery_schedule.seconds)
-
-        if schedule_unit == "second":
-            logger.warning(
-                "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
-                monitor_name,
-                schedule_value,
-            )
-            return {}
-
-    else:
-        logger.warning(
-            "Celery schedule type '%s' not supported by Sentry Crons.",
-            type(celery_schedule),
-        )
-        return {}
-
-    monitor_config["schedule"] = {}
-    monitor_config["schedule"]["type"] = schedule_type
-    monitor_config["schedule"]["value"] = schedule_value
-
-    if schedule_unit is not None:
-        monitor_config["schedule"]["unit"] = schedule_unit
-
-    monitor_config["timezone"] = (
-        (
-            hasattr(celery_schedule, "tz")
-            and celery_schedule.tz is not None
-            and str(celery_schedule.tz)
-        )
-        or app.timezone
-        or "UTC"
-    )
-
-    return monitor_config
-
-
-def _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration):
-    # type: (Any, Any, sentry_sdk.integrations.celery.CeleryIntegration) -> None
-    """
-    Add Sentry Crons information to the schedule_entry headers.
-    """
-    if not integration.monitor_beat_tasks:
-        return
-
-    monitor_name = schedule_entry.name
-
-    task_should_be_excluded = match_regex_list(monitor_name, integration.exclude_beat_tasks)
-    if task_should_be_excluded:
-        return
-
-    celery_schedule = schedule_entry.schedule
-    app = scheduler.app
-
-    monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
-
-    is_supported_schedule = bool(monitor_config)
-    if not is_supported_schedule:
-        return
-
-    headers = schedule_entry.options.pop("headers", {})
-    headers.update(
-        {
-            "sentry-monitor-slug": monitor_name,
-            "sentry-monitor-config": monitor_config,
-        }
-    )
-
-    check_in_id = capture_checkin(
-        monitor_slug=monitor_name,
-        monitor_config=monitor_config,
-        status=MonitorStatus.IN_PROGRESS,
-    )
-    headers.update({"sentry-monitor-check-in-id": check_in_id})
-
-    # Set the Sentry configuration in the options of the ScheduleEntry.
-    # Those will be picked up in `apply_async` and added to the headers.
-    schedule_entry.options["headers"] = headers
-
-
-def _wrap_beat_scheduler(original_function):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-    """
-    Makes sure that:
-    - a new Sentry trace is started for each task started by Celery Beat and
-      it is propagated to the task.
-    - the Sentry Crons information is set in the Celery Beat task's
-      headers so that is is monitored with Sentry Crons.
-
-    After the patched function is called,
-    Celery Beat will call apply_async to put the task in the queue.
-    """
-    # Patch only once
-    # Can't use __name__ here, because some of our tests mock original_apply_entry
-    already_patched = "sentry_patched_scheduler" in str(original_function)
-    if already_patched:
-        return original_function
-
-    from sentry_sdk_alpha.integrations.celery import CeleryIntegration
-
-    def sentry_patched_scheduler(*args, **kwargs):
-        # type: (*Any, **Any) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(CeleryIntegration)
-        if integration is None:
-            return original_function(*args, **kwargs)
-
-        # Tasks started by Celery Beat start a new Trace
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        scope.set_new_propagation_context()
-        scope._name = "celery-beat"
-
-        scheduler, schedule_entry = args
-        _apply_crons_data_to_schedule_entry(scheduler, schedule_entry, integration)
-
-        return original_function(*args, **kwargs)
-
-    return sentry_patched_scheduler
-
-
-def _patch_beat_apply_entry():
-    # type: () -> None
-    Scheduler.apply_entry = _wrap_beat_scheduler(Scheduler.apply_entry)
-
-
-def _patch_redbeat_maybe_due():
-    # type: () -> None
-    if RedBeatScheduler is None:
-        return
-
-    RedBeatScheduler.maybe_due = _wrap_beat_scheduler(RedBeatScheduler.maybe_due)
-
-
-def _setup_celery_beat_signals(monitor_beat_tasks):
-    # type: (bool) -> None
-    if monitor_beat_tasks:
-        task_success.connect(crons_task_success)
-        task_failure.connect(crons_task_failure)
-        task_retry.connect(crons_task_retry)
-
-
-def crons_task_success(sender, **kwargs):
-    # type: (Task, dict[Any, Any]) -> None
-    logger.debug("celery_task_success %s", sender)
-    headers = _get_headers(sender)
-
-    if "sentry-monitor-slug" not in headers:
-        return
-
-    monitor_config = headers.get("sentry-monitor-config", {})
-
-    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
-
-    capture_checkin(
-        monitor_slug=headers["sentry-monitor-slug"],
-        monitor_config=monitor_config,
-        check_in_id=headers["sentry-monitor-check-in-id"],
-        duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
-        ),
-        status=MonitorStatus.OK,
-    )
-
-
-def crons_task_failure(sender, **kwargs):
-    # type: (Task, dict[Any, Any]) -> None
-    logger.debug("celery_task_failure %s", sender)
-    headers = _get_headers(sender)
-
-    if "sentry-monitor-slug" not in headers:
-        return
-
-    monitor_config = headers.get("sentry-monitor-config", {})
-
-    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
-
-    capture_checkin(
-        monitor_slug=headers["sentry-monitor-slug"],
-        monitor_config=monitor_config,
-        check_in_id=headers["sentry-monitor-check-in-id"],
-        duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
-        ),
-        status=MonitorStatus.ERROR,
-    )
-
-
-def crons_task_retry(sender, **kwargs):
-    # type: (Task, dict[Any, Any]) -> None
-    logger.debug("celery_task_retry %s", sender)
-    headers = _get_headers(sender)
-
-    if "sentry-monitor-slug" not in headers:
-        return
-
-    monitor_config = headers.get("sentry-monitor-config", {})
-
-    start_timestamp_s = headers.get("sentry-monitor-start-timestamp-s")
-
-    capture_checkin(
-        monitor_slug=headers["sentry-monitor-slug"],
-        monitor_config=monitor_config,
-        check_in_id=headers["sentry-monitor-check-in-id"],
-        duration=(
-            _now_seconds_since_epoch() - float(start_timestamp_s) if start_timestamp_s else None
-        ),
-        status=MonitorStatus.ERROR,
-    )
diff --git a/src/sentry_sdk_alpha/integrations/celery/utils.py b/src/sentry_sdk_alpha/integrations/celery/utils.py
deleted file mode 100644
index 47265db49511e3..00000000000000
--- a/src/sentry_sdk_alpha/integrations/celery/utils.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import time
-from typing import TYPE_CHECKING, cast
-
-if TYPE_CHECKING:
-    from typing import Any, Tuple
-
-    from sentry_sdk_alpha._types import MonitorConfigScheduleUnit
-
-
-def _now_seconds_since_epoch():
-    # type: () -> float
-    # We cannot use `time.perf_counter()` when dealing with the duration
-    # of a Celery task, because the start of a Celery task and
-    # the end are recorded in different processes.
-    # Start happens in the Celery Beat process,
-    # the end in a Celery Worker process.
-    return time.time()
-
-
-def _get_humanized_interval(seconds):
-    # type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
-    TIME_UNITS = (  # noqa: N806
-        ("day", 60 * 60 * 24.0),
-        ("hour", 60 * 60.0),
-        ("minute", 60.0),
-    )
-
-    seconds = float(seconds)
-    for unit, divider in TIME_UNITS:
-        if seconds >= divider:
-            interval = int(seconds / divider)
-            return (interval, cast("MonitorConfigScheduleUnit", unit))
-
-    return (int(seconds), "second")
-
-
-class NoOpMgr:
-    def __enter__(self):
-        # type: () -> None
-        return None
-
-    def __exit__(self, exc_type, exc_value, traceback):
-        # type: (Any, Any, Any) -> None
-        return None
diff --git a/src/sentry_sdk_alpha/integrations/chalice.py b/src/sentry_sdk_alpha/integrations/chalice.py
deleted file mode 100644
index b3dd0f3b38f4ed..00000000000000
--- a/src/sentry_sdk_alpha/integrations/chalice.py
+++ /dev/null
@@ -1,126 +0,0 @@
-import sys
-from functools import wraps
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations.aws_lambda import _make_request_event_processor
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_from_exception,
-    parse_version,
-    reraise,
-)
-
-try:
-    import chalice  # type: ignore
-    from chalice import Chalice, ChaliceViewError
-    from chalice import __version__ as CHALICE_VERSION
-    from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
-except ImportError:
-    raise DidNotEnable("Chalice is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, TypeVar
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-class EventSourceHandler(ChaliceEventSourceHandler):  # type: ignore
-    def __call__(self, event, context):
-        # type: (Any, Any) -> Any
-        client = sentry_sdk_alpha.get_client()
-
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            with capture_internal_exceptions():
-                configured_time = context.get_remaining_time_in_millis()
-                scope.add_event_processor(
-                    _make_request_event_processor(event, context, configured_time)
-                )
-            try:
-                return ChaliceEventSourceHandler.__call__(self, event, context)
-            except Exception:
-                exc_info = sys.exc_info()
-                event, hint = event_from_exception(
-                    exc_info,
-                    client_options=client.options,
-                    mechanism={"type": "chalice", "handled": False},
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-                client.flush()
-                reraise(*exc_info)
-
-
-def _get_view_function_response(app, view_function, function_args):
-    # type: (Any, F, Any) -> F
-    @wraps(view_function)
-    def wrapped_view_function(**function_args):
-        # type: (**Any) -> Any
-        client = sentry_sdk_alpha.get_client()
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            with capture_internal_exceptions():
-                configured_time = app.lambda_context.get_remaining_time_in_millis()
-                scope.set_transaction_name(
-                    app.lambda_context.function_name,
-                    source=TransactionSource.COMPONENT,
-                )
-
-                scope.add_event_processor(
-                    _make_request_event_processor(
-                        app.current_request.to_dict(),
-                        app.lambda_context,
-                        configured_time,
-                    )
-                )
-            try:
-                return view_function(**function_args)
-            except Exception as exc:
-                if isinstance(exc, ChaliceViewError):
-                    raise
-                exc_info = sys.exc_info()
-                event, hint = event_from_exception(
-                    exc_info,
-                    client_options=client.options,
-                    mechanism={"type": "chalice", "handled": False},
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-                client.flush()
-                raise
-
-    return wrapped_view_function  # type: ignore
-
-
-class ChaliceIntegration(Integration):
-    identifier = "chalice"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        version = parse_version(CHALICE_VERSION)
-
-        if version is None:
-            raise DidNotEnable(f"Unparsable Chalice version: {CHALICE_VERSION}")
-
-        if version < (1, 20):
-            old_get_view_function_response = Chalice._get_view_function_response
-        else:
-            from chalice.app import RestAPIEventHandler
-
-            old_get_view_function_response = RestAPIEventHandler._get_view_function_response
-
-        def sentry_event_response(app, view_function, function_args):
-            # type: (Any, F, Dict[str, Any]) -> Any
-            wrapped_view_function = _get_view_function_response(app, view_function, function_args)
-
-            return old_get_view_function_response(app, wrapped_view_function, function_args)
-
-        if version < (1, 20):
-            Chalice._get_view_function_response = sentry_event_response
-        else:
-            RestAPIEventHandler._get_view_function_response = sentry_event_response
-        # for everything else (like events)
-        chalice.app.EventSourceHandler = EventSourceHandler
diff --git a/src/sentry_sdk_alpha/integrations/clickhouse_driver.py b/src/sentry_sdk_alpha/integrations/clickhouse_driver.py
deleted file mode 100644
index b5ffed4f14402d..00000000000000
--- a/src/sentry_sdk_alpha/integrations/clickhouse_driver.py
+++ /dev/null
@@ -1,185 +0,0 @@
-from typing import TYPE_CHECKING, Any, Dict, TypeVar, cast
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import (
-    _serialize_span_attribute,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-)
-
-# Hack to get new Python features working in older versions
-# without introducing a hard dependency on `typing_extensions`
-# from: https://stackoverflow.com/a/71944042/300572
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import ParamSpec
-else:
-    # Fake ParamSpec
-    class ParamSpec:
-        def __init__(self, _):
-            self.args = None
-            self.kwargs = None
-
-    # Callable[anything] will return None
-    class _Callable:
-        def __getitem__(self, _):
-            return None
-
-    # Make instances
-    Callable = _Callable()
-
-
-try:
-    import clickhouse_driver  # type: ignore[import-not-found]
-
-except ImportError:
-    raise DidNotEnable("clickhouse-driver not installed.")
-
-
-class ClickhouseDriverIntegration(Integration):
-    identifier = "clickhouse_driver"
-    origin = f"auto.db.{identifier}"
-
-    @staticmethod
-    def setup_once() -> None:
-        _check_minimum_version(ClickhouseDriverIntegration, clickhouse_driver.VERSION)
-
-        # Every query is done using the Connection's `send_query` function
-        clickhouse_driver.connection.Connection.send_query = _wrap_start(
-            clickhouse_driver.connection.Connection.send_query
-        )
-
-        # If the query contains parameters then the send_data function is used to send those parameters to clickhouse
-        clickhouse_driver.client.Client.send_data = _wrap_send_data(
-            clickhouse_driver.client.Client.send_data
-        )
-
-        # Every query ends either with the Client's `receive_end_of_query` (no result expected)
-        # or its `receive_result` (result expected)
-        clickhouse_driver.client.Client.receive_end_of_query = _wrap_end(
-            clickhouse_driver.client.Client.receive_end_of_query
-        )
-        if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"):
-            # In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query`
-            clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end(
-                clickhouse_driver.client.Client.receive_end_of_insert_query
-            )
-        clickhouse_driver.client.Client.receive_result = _wrap_end(
-            clickhouse_driver.client.Client.receive_result
-        )
-
-
-P = ParamSpec("P")
-T = TypeVar("T")
-
-
-def _wrap_start(f: Callable[P, T]) -> Callable[P, T]:
-    @ensure_integration_enabled(ClickhouseDriverIntegration, f)
-    def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
-        connection = args[0]
-        query = args[1]
-        query_id = args[2] if len(args) > 2 else kwargs.get("query_id")
-        params = args[3] if len(args) > 3 else kwargs.get("params")
-
-        span = sentry_sdk_alpha.start_span(
-            op=OP.DB,
-            name=query,
-            origin=ClickhouseDriverIntegration.origin,
-            only_if_parent=True,
-        )
-
-        connection._sentry_span = span  # type: ignore[attr-defined]
-
-        data = _get_db_data(connection)
-        data = cast("dict[str, Any]", data)
-        data["db.query.text"] = query
-
-        if query_id:
-            data["db.query_id"] = query_id
-
-        if params and should_send_default_pii():
-            data["db.params"] = params
-
-        connection._sentry_db_data = data  # type: ignore[attr-defined]
-        _set_on_span(span, data)
-
-        # run the original code
-        ret = f(*args, **kwargs)
-
-        return ret
-
-    return _inner
-
-
-def _wrap_end(f: Callable[P, T]) -> Callable[P, T]:
-    def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
-        res = f(*args, **kwargs)
-        client = cast("clickhouse_driver.client.Client", args[0])
-        connection = client.connection
-
-        span = getattr(connection, "_sentry_span", None)
-        if span is not None:
-            data = getattr(connection, "_sentry_db_data", {})
-
-            if res is not None and should_send_default_pii():
-                data["db.result"] = res
-                span.set_attribute("db.result", _serialize_span_attribute(res))
-
-            with capture_internal_exceptions():
-                query = data.pop("db.query.text", None)
-                if query:
-                    sentry_sdk_alpha.add_breadcrumb(message=query, category="query", data=data)
-
-            span.finish()
-
-            try:
-                del connection._sentry_db_data
-                del connection._sentry_span
-            except AttributeError:
-                pass
-
-        return res
-
-    return _inner_end
-
-
-def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]:
-    def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
-        client = cast("clickhouse_driver.client.Client", args[0])
-        connection = client.connection
-        db_params_data = cast("list[Any]", args[2])
-        span = getattr(connection, "_sentry_span", None)
-
-        if span is not None:
-            data = _get_db_data(connection)
-            _set_on_span(span, data)
-
-            if should_send_default_pii():
-                saved_db_data = getattr(connection, "_sentry_db_data", {})  # type: dict[str, Any]
-                db_params = saved_db_data.get("db.params") or []  # type: list[Any]
-                db_params.extend(db_params_data)
-                saved_db_data["db.params"] = db_params
-                span.set_attribute("db.params", _serialize_span_attribute(db_params))
-
-        return f(*args, **kwargs)
-
-    return _inner_send_data
-
-
-def _get_db_data(connection: clickhouse_driver.connection.Connection) -> dict[str, str]:
-    return {
-        SPANDATA.DB_SYSTEM: "clickhouse",
-        SPANDATA.SERVER_ADDRESS: connection.host,
-        SPANDATA.SERVER_PORT: connection.port,
-        SPANDATA.DB_NAME: connection.database,
-        SPANDATA.DB_USER: connection.user,
-    }
-
-
-def _set_on_span(span: Span, data: dict[str, Any]) -> None:
-    for key, value in data.items():
-        span.set_attribute(key, _serialize_span_attribute(value))
diff --git a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py b/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
deleted file mode 100644
index 8101b9d081e5d2..00000000000000
--- a/src/sentry_sdk_alpha/integrations/cloud_resource_context.py
+++ /dev/null
@@ -1,264 +0,0 @@
-import json
-from typing import TYPE_CHECKING
-
-import urllib3
-
-from sentry_sdk_alpha.api import set_context
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from typing import Dict
-
-
-CONTEXT_TYPE = "cloud_resource"
-
-HTTP_TIMEOUT = 2.0
-
-AWS_METADATA_HOST = "169.254.169.254"
-AWS_TOKEN_URL = f"http://{AWS_METADATA_HOST}/latest/api/token"
-AWS_METADATA_URL = f"http://{AWS_METADATA_HOST}/latest/dynamic/instance-identity/document"
-
-GCP_METADATA_HOST = "metadata.google.internal"
-GCP_METADATA_URL = f"http://{GCP_METADATA_HOST}/computeMetadata/v1/?recursive=true"
-
-
-class CLOUD_PROVIDER:  # noqa: N801
-    """
-    Name of the cloud provider.
-    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
-    """
-
-    ALIBABA = "alibaba_cloud"
-    AWS = "aws"
-    AZURE = "azure"
-    GCP = "gcp"
-    IBM = "ibm_cloud"
-    TENCENT = "tencent_cloud"
-
-
-class CLOUD_PLATFORM:  # noqa: N801
-    """
-    The cloud platform.
-    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
-    """
-
-    AWS_EC2 = "aws_ec2"
-    GCP_COMPUTE_ENGINE = "gcp_compute_engine"
-
-
-class CloudResourceContextIntegration(Integration):
-    """
-    Adds cloud resource context to the Senty scope
-    """
-
-    identifier = "cloudresourcecontext"
-
-    cloud_provider = ""
-
-    aws_token = ""
-    http = urllib3.PoolManager(timeout=HTTP_TIMEOUT)
-
-    gcp_metadata = None
-
-    def __init__(self, cloud_provider=""):
-        # type: (str) -> None
-        CloudResourceContextIntegration.cloud_provider = cloud_provider
-
-    @classmethod
-    def _is_aws(cls):
-        # type: () -> bool
-        try:
-            r = cls.http.request(
-                "PUT",
-                AWS_TOKEN_URL,
-                headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"},
-            )
-
-            if r.status != 200:
-                return False
-
-            cls.aws_token = r.data.decode()
-            return True
-
-        except urllib3.exceptions.TimeoutError:
-            logger.debug("AWS metadata service timed out after %s seconds", HTTP_TIMEOUT)
-            return False
-        except Exception as e:
-            logger.debug("Error checking AWS metadata service: %s", str(e))
-            return False
-
-    @classmethod
-    def _get_aws_context(cls):
-        # type: () -> Dict[str, str]
-        ctx = {
-            "cloud.provider": CLOUD_PROVIDER.AWS,
-            "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
-        }
-
-        try:
-            r = cls.http.request(
-                "GET",
-                AWS_METADATA_URL,
-                headers={"X-aws-ec2-metadata-token": cls.aws_token},
-            )
-
-            if r.status != 200:
-                return ctx
-
-            data = json.loads(r.data.decode("utf-8"))
-
-            try:
-                ctx["cloud.account.id"] = data["accountId"]
-            except Exception:
-                pass
-
-            try:
-                ctx["cloud.availability_zone"] = data["availabilityZone"]
-            except Exception:
-                pass
-
-            try:
-                ctx["cloud.region"] = data["region"]
-            except Exception:
-                pass
-
-            try:
-                ctx["host.id"] = data["instanceId"]
-            except Exception:
-                pass
-
-            try:
-                ctx["host.type"] = data["instanceType"]
-            except Exception:
-                pass
-
-        except urllib3.exceptions.TimeoutError:
-            logger.debug("AWS metadata service timed out after %s seconds", HTTP_TIMEOUT)
-        except Exception as e:
-            logger.debug("Error fetching AWS metadata: %s", str(e))
-
-        return ctx
-
-    @classmethod
-    def _is_gcp(cls):
-        # type: () -> bool
-        try:
-            r = cls.http.request(
-                "GET",
-                GCP_METADATA_URL,
-                headers={"Metadata-Flavor": "Google"},
-            )
-
-            if r.status != 200:
-                return False
-
-            cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
-            return True
-
-        except urllib3.exceptions.TimeoutError:
-            logger.debug("GCP metadata service timed out after %s seconds", HTTP_TIMEOUT)
-            return False
-        except Exception as e:
-            logger.debug("Error checking GCP metadata service: %s", str(e))
-            return False
-
-    @classmethod
-    def _get_gcp_context(cls):
-        # type: () -> Dict[str, str]
-        ctx = {
-            "cloud.provider": CLOUD_PROVIDER.GCP,
-            "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
-        }
-
-        try:
-            if cls.gcp_metadata is None:
-                r = cls.http.request(
-                    "GET",
-                    GCP_METADATA_URL,
-                    headers={"Metadata-Flavor": "Google"},
-                )
-
-                if r.status != 200:
-                    return ctx
-
-                cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
-
-            try:
-                ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"]
-            except Exception:
-                pass
-
-            try:
-                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"]["zone"].split("/")[-1]
-            except Exception:
-                pass
-
-            try:
-                # only populated in google cloud run
-                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[-1]
-            except Exception:
-                pass
-
-            try:
-                ctx["host.id"] = cls.gcp_metadata["instance"]["id"]
-            except Exception:
-                pass
-
-        except urllib3.exceptions.TimeoutError:
-            logger.debug("GCP metadata service timed out after %s seconds", HTTP_TIMEOUT)
-        except Exception as e:
-            logger.debug("Error fetching GCP metadata: %s", str(e))
-
-        return ctx
-
-    @classmethod
-    def _get_cloud_provider(cls):
-        # type: () -> str
-        if cls._is_aws():
-            return CLOUD_PROVIDER.AWS
-
-        if cls._is_gcp():
-            return CLOUD_PROVIDER.GCP
-
-        return ""
-
-    @classmethod
-    def _get_cloud_resource_context(cls):
-        # type: () -> Dict[str, str]
-        cloud_provider = (
-            cls.cloud_provider
-            if cls.cloud_provider != ""
-            else CloudResourceContextIntegration._get_cloud_provider()
-        )
-        if cloud_provider in context_getters.keys():
-            return context_getters[cloud_provider]()
-
-        return {}
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        cloud_provider = CloudResourceContextIntegration.cloud_provider
-        unsupported_cloud_provider = (
-            cloud_provider != "" and cloud_provider not in context_getters.keys()
-        )
-
-        if unsupported_cloud_provider:
-            logger.warning(
-                "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...",
-                CloudResourceContextIntegration.cloud_provider,
-                list(context_getters.keys()),
-            )
-
-        context = CloudResourceContextIntegration._get_cloud_resource_context()
-        if context != {}:
-            set_context(CONTEXT_TYPE, context)
-
-
-# Map with the currently supported cloud providers
-# mapping to functions extracting the context
-context_getters = {
-    CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context,
-    CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context,
-}
diff --git a/src/sentry_sdk_alpha/integrations/cohere.py b/src/sentry_sdk_alpha/integrations/cohere.py
deleted file mode 100644
index bbe68150cf040f..00000000000000
--- a/src/sentry_sdk_alpha/integrations/cohere.py
+++ /dev/null
@@ -1,264 +0,0 @@
-from functools import wraps
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha import consts
-from sentry_sdk_alpha.ai.monitoring import record_token_usage
-from sentry_sdk_alpha.ai.utils import set_data_normalized
-from sentry_sdk_alpha.consts import SPANDATA
-
-if TYPE_CHECKING:
-    from typing import Any
-    from collections.abc import Callable, Iterator
-    from sentry_sdk_alpha.tracing import Span
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-try:
-    from cohere import ChatStreamEndEvent, NonStreamedChatResponse
-    from cohere.base_client import BaseCohere
-    from cohere.client import Client
-
-    if TYPE_CHECKING:
-        from cohere import StreamedChatResponse
-except ImportError:
-    raise DidNotEnable("Cohere not installed")
-
-try:
-    # cohere 5.9.3+
-    from cohere import StreamEndStreamedChatResponse
-except ImportError:
-    from cohere import StreamedChatResponse_StreamEnd as StreamEndStreamedChatResponse
-
-
-COLLECTED_CHAT_PARAMS = {
-    "model": SPANDATA.AI_MODEL_ID,
-    "k": SPANDATA.AI_TOP_K,
-    "p": SPANDATA.AI_TOP_P,
-    "seed": SPANDATA.AI_SEED,
-    "frequency_penalty": SPANDATA.AI_FREQUENCY_PENALTY,
-    "presence_penalty": SPANDATA.AI_PRESENCE_PENALTY,
-    "raw_prompting": SPANDATA.AI_RAW_PROMPTING,
-}
-
-COLLECTED_PII_CHAT_PARAMS = {
-    "tools": SPANDATA.AI_TOOLS,
-    "preamble": SPANDATA.AI_PREAMBLE,
-}
-
-COLLECTED_CHAT_RESP_ATTRS = {
-    "generation_id": SPANDATA.AI_GENERATION_ID,
-    "is_search_required": SPANDATA.AI_SEARCH_REQUIRED,
-    "finish_reason": SPANDATA.AI_FINISH_REASON,
-}
-
-COLLECTED_PII_CHAT_RESP_ATTRS = {
-    "citations": SPANDATA.AI_CITATIONS,
-    "documents": SPANDATA.AI_DOCUMENTS,
-    "search_queries": SPANDATA.AI_SEARCH_QUERIES,
-    "search_results": SPANDATA.AI_SEARCH_RESULTS,
-    "tool_calls": SPANDATA.AI_TOOL_CALLS,
-}
-
-
-class CohereIntegration(Integration):
-    identifier = "cohere"
-    origin = f"auto.ai.{identifier}"
-
-    def __init__(self, include_prompts=True):
-        # type: (CohereIntegration, bool) -> None
-        self.include_prompts = include_prompts
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        BaseCohere.chat = _wrap_chat(BaseCohere.chat, streaming=False)
-        Client.embed = _wrap_embed(Client.embed)
-        BaseCohere.chat_stream = _wrap_chat(BaseCohere.chat_stream, streaming=True)
-
-
-def _capture_exception(exc):
-    # type: (Any) -> None
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "cohere", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _wrap_chat(f, streaming):
-    # type: (Callable[..., Any], bool) -> Callable[..., Any]
-
-    def collect_chat_response_fields(span, res, include_pii):
-        # type: (Span, NonStreamedChatResponse, bool) -> None
-        if include_pii:
-            if hasattr(res, "text"):
-                set_data_normalized(
-                    span,
-                    SPANDATA.AI_RESPONSES,
-                    [res.text],
-                )
-            for pii_attr in COLLECTED_PII_CHAT_RESP_ATTRS:
-                if hasattr(res, pii_attr):
-                    set_data_normalized(span, "ai." + pii_attr, getattr(res, pii_attr))
-
-        for attr in COLLECTED_CHAT_RESP_ATTRS:
-            if hasattr(res, attr):
-                set_data_normalized(span, "ai." + attr, getattr(res, attr))
-
-        if hasattr(res, "meta"):
-            if hasattr(res.meta, "billed_units"):
-                record_token_usage(
-                    span,
-                    prompt_tokens=res.meta.billed_units.input_tokens,
-                    completion_tokens=res.meta.billed_units.output_tokens,
-                )
-            elif hasattr(res.meta, "tokens"):
-                record_token_usage(
-                    span,
-                    prompt_tokens=res.meta.tokens.input_tokens,
-                    completion_tokens=res.meta.tokens.output_tokens,
-                )
-
-            if hasattr(res.meta, "warnings"):
-                set_data_normalized(span, SPANDATA.AI_WARNINGS, res.meta.warnings)
-
-    @wraps(f)
-    def new_chat(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(CohereIntegration)
-
-        if (
-            integration is None
-            or "message" not in kwargs
-            or not isinstance(kwargs.get("message"), str)
-        ):
-            return f(*args, **kwargs)
-
-        message = kwargs.get("message")
-
-        span = sentry_sdk_alpha.start_span(
-            op=consts.OP.COHERE_CHAT_COMPLETIONS_CREATE,
-            name="cohere.client.Chat",
-            origin=CohereIntegration.origin,
-            only_if_parent=True,
-        )
-        span.__enter__()
-        try:
-            res = f(*args, **kwargs)
-        except Exception as e:
-            _capture_exception(e)
-            span.__exit__(None, None, None)
-            raise e from None
-
-        with capture_internal_exceptions():
-            if should_send_default_pii() and integration.include_prompts:
-                set_data_normalized(
-                    span,
-                    SPANDATA.AI_INPUT_MESSAGES,
-                    list(
-                        map(
-                            lambda x: {
-                                "role": getattr(x, "role", "").lower(),
-                                "content": getattr(x, "message", ""),
-                            },
-                            kwargs.get("chat_history", []),
-                        )
-                    )
-                    + [{"role": "user", "content": message}],
-                )
-                for k, v in COLLECTED_PII_CHAT_PARAMS.items():
-                    if k in kwargs:
-                        set_data_normalized(span, v, kwargs[k])
-
-            for k, v in COLLECTED_CHAT_PARAMS.items():
-                if k in kwargs:
-                    set_data_normalized(span, v, kwargs[k])
-            set_data_normalized(span, SPANDATA.AI_STREAMING, False)
-
-            if streaming:
-                old_iterator = res
-
-                def new_iterator():
-                    # type: () -> Iterator[StreamedChatResponse]
-
-                    with capture_internal_exceptions():
-                        for x in old_iterator:
-                            if isinstance(x, ChatStreamEndEvent) or isinstance(
-                                x, StreamEndStreamedChatResponse
-                            ):
-                                collect_chat_response_fields(
-                                    span,
-                                    x.response,
-                                    include_pii=should_send_default_pii()
-                                    and integration.include_prompts,
-                                )
-                            yield x
-
-                    span.__exit__(None, None, None)
-
-                return new_iterator()
-            elif isinstance(res, NonStreamedChatResponse):
-                collect_chat_response_fields(
-                    span,
-                    res,
-                    include_pii=should_send_default_pii() and integration.include_prompts,
-                )
-                span.__exit__(None, None, None)
-            else:
-                set_data_normalized(span, "unknown_response", True)
-                span.__exit__(None, None, None)
-            return res
-
-    return new_chat
-
-
-def _wrap_embed(f):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-
-    @wraps(f)
-    def new_embed(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(CohereIntegration)
-        if integration is None:
-            return f(*args, **kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=consts.OP.COHERE_EMBEDDINGS_CREATE,
-            name="Cohere Embedding Creation",
-            origin=CohereIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            if "texts" in kwargs and (should_send_default_pii() and integration.include_prompts):
-                if isinstance(kwargs["texts"], str):
-                    set_data_normalized(span, SPANDATA.AI_TEXTS, [kwargs["texts"]])
-                elif (
-                    isinstance(kwargs["texts"], list)
-                    and len(kwargs["texts"]) > 0
-                    and isinstance(kwargs["texts"][0], str)
-                ):
-                    set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, kwargs["texts"])
-
-            if "model" in kwargs:
-                set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
-            try:
-                res = f(*args, **kwargs)
-            except Exception as e:
-                _capture_exception(e)
-                raise e from None
-            if (
-                hasattr(res, "meta")
-                and hasattr(res.meta, "billed_units")
-                and hasattr(res.meta.billed_units, "input_tokens")
-            ):
-                record_token_usage(
-                    span,
-                    prompt_tokens=res.meta.billed_units.input_tokens,
-                    total_tokens=res.meta.billed_units.input_tokens,
-                )
-            return res
-
-    return new_embed
diff --git a/src/sentry_sdk_alpha/integrations/dedupe.py b/src/sentry_sdk_alpha/integrations/dedupe.py
deleted file mode 100644
index 02d890475bed88..00000000000000
--- a/src/sentry_sdk_alpha/integrations/dedupe.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import ContextVar
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-class DedupeIntegration(Integration):
-    identifier = "dedupe"
-
-    def __init__(self):
-        # type: () -> None
-        self._last_seen = ContextVar("last-seen")
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        @add_global_event_processor
-        def processor(event, hint):
-            # type: (Event, Optional[Hint]) -> Optional[Event]
-            if hint is None:
-                return event
-
-            integration = sentry_sdk_alpha.get_client().get_integration(DedupeIntegration)
-            if integration is None:
-                return event
-
-            exc_info = hint.get("exc_info", None)
-            if exc_info is None:
-                return event
-
-            exc = exc_info[1]
-            if integration._last_seen.get(None) is exc:
-                return None
-            integration._last_seen.set(exc)
-            return event
-
-    @staticmethod
-    def reset_last_seen():
-        # type: () -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(DedupeIntegration)
-        if integration is None:
-            return
-
-        integration._last_seen.set(None)
diff --git a/src/sentry_sdk_alpha/integrations/django/__init__.py b/src/sentry_sdk_alpha/integrations/django/__init__.py
deleted file mode 100644
index e6c67879f033e0..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/__init__.py
+++ /dev/null
@@ -1,713 +0,0 @@
-import functools
-import inspect
-import sys
-import threading
-import weakref
-from importlib import import_module
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, SPANDATA, TransactionSource
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    RequestExtractor,
-)
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.scope import add_global_event_processor, should_send_default_pii
-from sentry_sdk_alpha.serializer import add_global_repr_processor
-from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
-from sentry_sdk_alpha.utils import (
-    CONTEXTVARS_ERROR_MESSAGE,
-    HAS_REAL_CONTEXTVARS,
-    SENSITIVE_DATA_SUBSTITUTE,
-    AnnotatedValue,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    logger,
-    transaction_from_function,
-    walk_exception_chain,
-)
-
-try:
-    from django import VERSION as DJANGO_VERSION
-    from django.conf import settings
-    from django.conf import settings as django_settings
-    from django.core import signals
-
-    try:
-        from django.urls import resolve
-    except ImportError:
-        from django.core.urlresolvers import resolve
-
-    try:
-        from django.urls import Resolver404
-    except ImportError:
-        from django.core.urlresolvers import Resolver404
-
-    # Only available in Django 3.0+
-    try:
-        from django.core.handlers.asgi import ASGIRequest
-    except Exception:
-        ASGIRequest = None
-
-except ImportError:
-    raise DidNotEnable("Django not installed")
-
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations.django.caching import patch_caching
-from sentry_sdk_alpha.integrations.django.middleware import patch_django_middlewares
-from sentry_sdk_alpha.integrations.django.signals_handlers import patch_signals
-from sentry_sdk_alpha.integrations.django.templates import (
-    get_template_frame_from_exception,
-    patch_templates,
-)
-from sentry_sdk_alpha.integrations.django.transactions import LEGACY_RESOLVER
-from sentry_sdk_alpha.integrations.django.views import patch_views
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, List, Optional, Union
-
-    from django.core.handlers.wsgi import WSGIRequest
-    from django.http.request import QueryDict
-    from django.http.response import HttpResponse
-    from django.utils.datastructures import MultiValueDict
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, Hint, NotImplementedType
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from sentry_sdk_alpha.tracing import Span
-
-
-TRANSACTION_STYLE_VALUES = ("function_name", "url")
-
-
-class DjangoIntegration(Integration):
-    """
-    Auto instrument a Django application.
-
-    :param transaction_style: How to derive transaction names. Either `"function_name"` or `"url"`. Defaults to `"url"`.
-    :param middleware_spans: Whether to create spans for middleware. Defaults to `True`.
-    :param signals_spans: Whether to create spans for signals. Defaults to `True`.
-    :param signals_denylist: A list of signals to ignore when creating spans.
-    :param cache_spans: Whether to create spans for cache operations. Defaults to `False`.
-    """
-
-    identifier = "django"
-    origin = f"auto.http.{identifier}"
-    origin_db = f"auto.db.{identifier}"
-
-    transaction_style = ""
-    middleware_spans = None
-    signals_spans = None
-    cache_spans = None
-    signals_denylist = []  # type: list[signals.Signal]
-
-    def __init__(
-        self,
-        transaction_style="url",  # type: str
-        middleware_spans=True,  # type: bool
-        signals_spans=True,  # type: bool
-        cache_spans=True,  # type: bool
-        signals_denylist=None,  # type: Optional[list[signals.Signal]]
-        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
-    ):
-        # type: (...) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-        self.middleware_spans = middleware_spans
-
-        self.signals_spans = signals_spans
-        self.signals_denylist = signals_denylist or []
-
-        self.cache_spans = cache_spans
-
-        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _check_minimum_version(DjangoIntegration, DJANGO_VERSION)
-
-        install_sql_hook()
-        # Patch in our custom middleware.
-
-        # logs an error for every 500
-        ignore_logger("django.server")
-        ignore_logger("django.request")
-
-        from django.core.handlers.wsgi import WSGIHandler
-
-        old_app = WSGIHandler.__call__
-
-        @ensure_integration_enabled(DjangoIntegration, old_app)
-        def sentry_patched_wsgi_handler(self, environ, start_response):
-            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-            bound_old_app = old_app.__get__(self, WSGIHandler)
-
-            from django.conf import settings
-
-            use_x_forwarded_for = settings.USE_X_FORWARDED_HOST
-
-            integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-
-            middleware = SentryWsgiMiddleware(
-                bound_old_app,
-                use_x_forwarded_for,
-                span_origin=DjangoIntegration.origin,
-                http_methods_to_capture=(
-                    integration.http_methods_to_capture
-                    if integration
-                    else DEFAULT_HTTP_METHODS_TO_CAPTURE
-                ),
-            )
-            return middleware(environ, start_response)
-
-        WSGIHandler.__call__ = sentry_patched_wsgi_handler
-
-        _patch_get_response()
-
-        _patch_django_asgi_handler()
-
-        signals.got_request_exception.connect(_got_request_exception)
-
-        @add_global_event_processor
-        def process_django_templates(event, hint):
-            # type: (Event, Optional[Hint]) -> Optional[Event]
-            if hint is None:
-                return event
-
-            exc_info = hint.get("exc_info", None)
-
-            if exc_info is None:
-                return event
-
-            exception = event.get("exception", None)
-
-            if exception is None:
-                return event
-
-            values = exception.get("values", None)
-
-            if values is None:
-                return event
-
-            for exception, (_, exc_value, _) in zip(
-                reversed(values), walk_exception_chain(exc_info)
-            ):
-                frame = get_template_frame_from_exception(exc_value)
-                if frame is not None:
-                    frames = exception.get("stacktrace", {}).get("frames", [])
-
-                    for i in reversed(range(len(frames))):
-                        f = frames[i]
-                        if (
-                            f.get("function") in ("Parser.parse", "parse", "render")
-                            and f.get("module") == "django.template.base"
-                        ):
-                            i += 1
-                            break
-                    else:
-                        i = len(frames)
-
-                    frames.insert(i, frame)
-
-            return event
-
-        @add_global_repr_processor
-        def _django_queryset_repr(value, hint):
-            # type: (Any, Dict[str, Any]) -> Union[NotImplementedType, str]
-            try:
-                # Django 1.6 can fail to import `QuerySet` when Django settings
-                # have not yet been initialized.
-                #
-                # If we fail to import, return `NotImplemented`. It's at least
-                # unlikely that we have a query set in `value` when importing
-                # `QuerySet` fails.
-                from django.db.models.query import QuerySet
-            except Exception:
-                return NotImplemented
-
-            if not isinstance(value, QuerySet) or value._result_cache:
-                return NotImplemented
-
-            return "<{} from {} at 0x{:x}>".format(
-                value.__class__.__name__,
-                value.__module__,
-                id(value),
-            )
-
-        _patch_channels()
-        patch_django_middlewares()
-        patch_views()
-        patch_templates()
-        patch_signals()
-
-        if patch_caching is not None:
-            patch_caching()
-
-
-_DRF_PATCHED = False
-_DRF_PATCH_LOCK = threading.Lock()
-
-
-def _patch_drf():
-    # type: () -> None
-    """
-    Patch Django Rest Framework for more/better request data. DRF's request
-    type is a wrapper around Django's request type. The attribute we're
-    interested in is `request.data`, which is a cached property containing a
-    parsed request body. Reading a request body from that property is more
-    reliable than reading from any of Django's own properties, as those don't
-    hold payloads in memory and therefore can only be accessed once.
-
-    We patch the Django request object to include a weak backreference to the
-    DRF request object, such that we can later use either in
-    `DjangoRequestExtractor`.
-
-    This function is not called directly on SDK setup, because importing almost
-    any part of Django Rest Framework will try to access Django settings (where
-    `sentry_sdk.init()` might be called from in the first place). Instead we
-    run this function on every request and do the patching on the first
-    request.
-    """
-
-    global _DRF_PATCHED
-
-    if _DRF_PATCHED:
-        # Double-checked locking
-        return
-
-    with _DRF_PATCH_LOCK:
-        if _DRF_PATCHED:
-            return
-
-        # We set this regardless of whether the code below succeeds or fails.
-        # There is no point in trying to patch again on the next request.
-        _DRF_PATCHED = True
-
-        with capture_internal_exceptions():
-            try:
-                from rest_framework.views import APIView  # type: ignore
-            except ImportError:
-                pass
-            else:
-                old_drf_initial = APIView.initial
-
-                @functools.wraps(old_drf_initial)
-                def sentry_patched_drf_initial(self, request, *args, **kwargs):
-                    # type: (APIView, Any, *Any, **Any) -> Any
-                    with capture_internal_exceptions():
-                        request._request._sentry_drf_request_backref = weakref.ref(request)
-                        pass
-                    return old_drf_initial(self, request, *args, **kwargs)
-
-                APIView.initial = sentry_patched_drf_initial
-
-
-def _patch_channels():
-    # type: () -> None
-    try:
-        from channels.http import AsgiHandler  # type: ignore
-    except ImportError:
-        return
-
-    if not HAS_REAL_CONTEXTVARS:
-        # We better have contextvars or we're going to leak state between
-        # requests.
-        #
-        # We cannot hard-raise here because channels may not be used at all in
-        # the current process. That is the case when running traditional WSGI
-        # workers in gunicorn+gevent and the websocket stuff in a separate
-        # process.
-        logger.warning(
-            "We detected that you are using Django channels 2.0." + CONTEXTVARS_ERROR_MESSAGE
-        )
-
-    from sentry_sdk_alpha.integrations.django.asgi import patch_channels_asgi_handler_impl
-
-    patch_channels_asgi_handler_impl(AsgiHandler)
-
-
-def _patch_django_asgi_handler():
-    # type: () -> None
-    try:
-        from django.core.handlers.asgi import ASGIHandler
-    except ImportError:
-        return
-
-    if not HAS_REAL_CONTEXTVARS:
-        # We better have contextvars or we're going to leak state between
-        # requests.
-        #
-        # We cannot hard-raise here because Django's ASGI stuff may not be used
-        # at all.
-        logger.warning("We detected that you are using Django 3." + CONTEXTVARS_ERROR_MESSAGE)
-
-    from sentry_sdk_alpha.integrations.django.asgi import patch_django_asgi_handler_impl
-
-    patch_django_asgi_handler_impl(ASGIHandler)
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, WSGIRequest) -> None
-    try:
-        transaction_name = None
-        if transaction_style == "function_name":
-            fn = resolve(request.path).func
-            transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
-
-        elif transaction_style == "url":
-            if hasattr(request, "urlconf"):
-                transaction_name = LEGACY_RESOLVER.resolve(
-                    request.path_info, urlconf=request.urlconf
-                )
-            else:
-                transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
-
-        if transaction_name is None:
-            transaction_name = request.path_info
-            source = TransactionSource.URL
-        else:
-            source = SOURCE_FOR_STYLE[transaction_style]
-
-        scope.set_transaction_name(
-            transaction_name,
-            source=source,
-        )
-    except Resolver404:
-        urlconf = import_module(settings.ROOT_URLCONF)
-        # This exception only gets thrown when transaction_style is `function_name`
-        # So we don't check here what style is configured
-        if hasattr(urlconf, "handler404"):
-            handler = urlconf.handler404
-            if isinstance(handler, str):
-                scope.set_transaction_name(handler)
-            else:
-                name = transaction_from_function(getattr(handler, "view_class", handler))
-                if isinstance(name, str):
-                    scope.set_transaction_name(name)
-    except Exception:
-        pass
-
-
-def _before_get_response(request):
-    # type: (WSGIRequest) -> None
-    integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-    if integration is None:
-        return
-
-    _patch_drf()
-
-    scope = sentry_sdk_alpha.get_current_scope()
-    # Rely on WSGI middleware to start a trace
-    _set_transaction_name_and_source(scope, integration.transaction_style, request)
-
-    scope.add_event_processor(_make_wsgi_request_event_processor(weakref.ref(request), integration))
-
-
-def _attempt_resolve_again(request, scope, transaction_style):
-    # type: (WSGIRequest, sentry_sdk.Scope, str) -> None
-    """
-    Some django middlewares overwrite request.urlconf
-    so we need to respect that contract,
-    so we try to resolve the url again.
-    """
-    if not hasattr(request, "urlconf"):
-        return
-
-    _set_transaction_name_and_source(scope, transaction_style, request)
-
-
-def _after_get_response(request):
-    # type: (WSGIRequest) -> None
-    integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-    if integration is None or integration.transaction_style != "url":
-        return
-
-    scope = sentry_sdk_alpha.get_current_scope()
-    _attempt_resolve_again(request, scope, integration.transaction_style)
-
-
-def _patch_get_response():
-    # type: () -> None
-    """
-    patch get_response, because at that point we have the Django request object
-    """
-    from django.core.handlers.base import BaseHandler
-
-    old_get_response = BaseHandler.get_response
-
-    @functools.wraps(old_get_response)
-    def sentry_patched_get_response(self, request):
-        # type: (Any, WSGIRequest) -> Union[HttpResponse, BaseException]
-        _before_get_response(request)
-        rv = old_get_response(self, request)
-        _after_get_response(request)
-        return rv
-
-    BaseHandler.get_response = sentry_patched_get_response
-
-    if hasattr(BaseHandler, "get_response_async"):
-        from sentry_sdk_alpha.integrations.django.asgi import patch_get_response_async
-
-        patch_get_response_async(BaseHandler, _before_get_response)
-
-
-def _make_wsgi_request_event_processor(weak_request, integration):
-    # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
-    def wsgi_request_event_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        # if the request is gone we are fine not logging the data from
-        # it.  This might happen if the processor is pushed away to
-        # another thread.
-        request = weak_request()
-        if request is None:
-            return event
-
-        django_3 = ASGIRequest is not None
-        if django_3 and type(request) == ASGIRequest:
-            # We have a `asgi_request_event_processor` for this.
-            return event
-
-        with capture_internal_exceptions():
-            DjangoRequestExtractor(request).extract_into_event(event)
-
-        if should_send_default_pii():
-            with capture_internal_exceptions():
-                _set_user_info(request, event)
-
-        return event
-
-    return wsgi_request_event_processor
-
-
-def _got_request_exception(request=None, **kwargs):
-    # type: (WSGIRequest, **Any) -> None
-    client = sentry_sdk_alpha.get_client()
-    integration = client.get_integration(DjangoIntegration)
-    if integration is None:
-        return
-
-    if request is not None and integration.transaction_style == "url":
-        scope = sentry_sdk_alpha.get_current_scope()
-        _attempt_resolve_again(request, scope, integration.transaction_style)
-
-    event, hint = event_from_exception(
-        sys.exc_info(),
-        client_options=client.options,
-        mechanism={"type": "django", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-class DjangoRequestExtractor(RequestExtractor):
-    def __init__(self, request):
-        # type: (Union[WSGIRequest, ASGIRequest]) -> None
-        try:
-            drf_request = request._sentry_drf_request_backref()
-            if drf_request is not None:
-                request = drf_request
-        except AttributeError:
-            pass
-        self.request = request
-
-    def env(self):
-        # type: () -> Dict[str, str]
-        return self.request.META
-
-    def cookies(self):
-        # type: () -> Dict[str, Union[str, AnnotatedValue]]
-        privacy_cookies = [
-            django_settings.CSRF_COOKIE_NAME,
-            django_settings.SESSION_COOKIE_NAME,
-        ]
-
-        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
-        for key, val in self.request.COOKIES.items():
-            if key in privacy_cookies:
-                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
-            else:
-                clean_cookies[key] = val
-
-        return clean_cookies
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.body
-
-    def form(self):
-        # type: () -> QueryDict
-        return self.request.POST
-
-    def files(self):
-        # type: () -> MultiValueDict
-        return self.request.FILES
-
-    def size_of_file(self, file):
-        # type: (Any) -> int
-        return file.size
-
-    def parsed_body(self):
-        # type: () -> Optional[Dict[str, Any]]
-        try:
-            return self.request.data
-        except Exception:
-            return RequestExtractor.parsed_body(self)
-
-
-def _set_user_info(request, event):
-    # type: (WSGIRequest, Event) -> None
-    user_info = event.setdefault("user", {})
-
-    user = getattr(request, "user", None)
-
-    if user is None or not user.is_authenticated:
-        return
-
-    try:
-        user_info.setdefault("id", str(user.pk))
-    except Exception:
-        pass
-
-    try:
-        user_info.setdefault("email", user.email)
-    except Exception:
-        pass
-
-    try:
-        user_info.setdefault("username", user.get_username())
-    except Exception:
-        pass
-
-
-def install_sql_hook():
-    # type: () -> None
-    """If installed this causes Django's queries to be captured."""
-    try:
-        from django.db.backends.utils import CursorWrapper
-    except ImportError:
-        from django.db.backends.util import CursorWrapper
-
-    from django.db.backends.base.base import BaseDatabaseWrapper
-
-    real_execute = CursorWrapper.execute
-    real_executemany = CursorWrapper.executemany
-    real_connect = BaseDatabaseWrapper.connect
-
-    @ensure_integration_enabled(DjangoIntegration, real_execute)
-    def execute(self, sql, params=None):
-        # type: (CursorWrapper, Any, Optional[Any]) -> Any
-        with record_sql_queries(
-            cursor=self.cursor,
-            query=sql,
-            params_list=params,
-            paramstyle="format",
-            executemany=False,
-            span_origin=DjangoIntegration.origin_db,
-        ) as span:
-            _set_db_data(span, self)
-            result = real_execute(self, sql, params)
-
-            with capture_internal_exceptions():
-                add_query_source(span)
-
-        return result
-
-    @ensure_integration_enabled(DjangoIntegration, real_executemany)
-    def executemany(self, sql, param_list):
-        # type: (CursorWrapper, Any, List[Any]) -> Any
-        with record_sql_queries(
-            cursor=self.cursor,
-            query=sql,
-            params_list=param_list,
-            paramstyle="format",
-            executemany=True,
-            span_origin=DjangoIntegration.origin_db,
-        ) as span:
-            _set_db_data(span, self)
-
-            result = real_executemany(self, sql, param_list)
-
-            with capture_internal_exceptions():
-                add_query_source(span)
-
-        return result
-
-    @ensure_integration_enabled(DjangoIntegration, real_connect)
-    def connect(self):
-        # type: (BaseDatabaseWrapper) -> None
-        with capture_internal_exceptions():
-            sentry_sdk_alpha.add_breadcrumb(message="connect", category="query")
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.DB,
-            name="connect",
-            origin=DjangoIntegration.origin_db,
-            only_if_parent=True,
-        ) as span:
-            _set_db_data(span, self)
-            return real_connect(self)
-
-    CursorWrapper.execute = execute
-    CursorWrapper.executemany = executemany
-    BaseDatabaseWrapper.connect = connect
-    ignore_logger("django.db.backends")
-
-
-def _set_db_data(span, cursor_or_db):
-    # type: (Span, Any) -> None
-    db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db
-    vendor = db.vendor
-    span.set_attribute(SPANDATA.DB_SYSTEM, vendor)
-
-    # Some custom backends override `__getattr__`, making it look like `cursor_or_db`
-    # actually has a `connection` and the `connection` has a `get_dsn_parameters`
-    # attribute, only to throw an error once you actually want to call it.
-    # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable
-    # function.
-    is_psycopg2 = (
-        hasattr(cursor_or_db, "connection")
-        and hasattr(cursor_or_db.connection, "get_dsn_parameters")
-        and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters)
-    )
-    if is_psycopg2:
-        connection_params = cursor_or_db.connection.get_dsn_parameters()
-    else:
-        try:
-            # psycopg3, only extract needed params as get_parameters
-            # can be slow because of the additional logic to filter out default
-            # values
-            connection_params = {
-                "dbname": cursor_or_db.connection.info.dbname,
-                "port": cursor_or_db.connection.info.port,
-            }
-            # PGhost returns host or base dir of UNIX socket as an absolute path
-            # starting with /, use it only when it contains host
-            pg_host = cursor_or_db.connection.info.host
-            if pg_host and not pg_host.startswith("/"):
-                connection_params["host"] = pg_host
-        except Exception:
-            connection_params = db.get_connection_params()
-
-    db_name = connection_params.get("dbname") or connection_params.get("database")
-    if db_name is not None:
-        span.set_attribute(SPANDATA.DB_NAME, db_name)
-
-    server_address = connection_params.get("host")
-    if server_address is not None:
-        span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address)
-
-    server_port = connection_params.get("port")
-    if server_port is not None:
-        span.set_attribute(SPANDATA.SERVER_PORT, str(server_port))
-
-    server_socket_address = connection_params.get("unix_socket")
-    if server_socket_address is not None:
-        span.set_attribute(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address)
diff --git a/src/sentry_sdk_alpha/integrations/django/asgi.py b/src/sentry_sdk_alpha/integrations/django/asgi.py
deleted file mode 100644
index a0ada4f75607df..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/asgi.py
+++ /dev/null
@@ -1,242 +0,0 @@
-"""
-Instrumentation for Django 3.0
-
-Since this file contains `async def` it is conditionally imported in
-`sentry_sdk.integrations.django` (depending on the existence of
-`django.core.handlers.asgi`.
-"""
-
-import asyncio
-import functools
-import inspect
-from typing import TYPE_CHECKING
-
-from django.core.handlers.wsgi import WSGIRequest
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, TypeVar, Union
-
-    from django.core.handlers.asgi import ASGIRequest
-    from django.http.response import HttpResponse
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-    _F = TypeVar("_F", bound=Callable[..., Any])
-
-
-# Python 3.12 deprecates asyncio.iscoroutinefunction() as an alias for
-# inspect.iscoroutinefunction(), whilst also removing the _is_coroutine marker.
-# The latter is replaced with the inspect.markcoroutinefunction decorator.
-# Until 3.12 is the minimum supported Python version, provide a shim.
-# This was copied from https://github.com/django/asgiref/blob/main/asgiref/sync.py
-if hasattr(inspect, "markcoroutinefunction"):
-    iscoroutinefunction = inspect.iscoroutinefunction
-    markcoroutinefunction = inspect.markcoroutinefunction
-else:
-    iscoroutinefunction = asyncio.iscoroutinefunction  # type: ignore[assignment]
-
-    def markcoroutinefunction(func: "_F") -> "_F":
-        func._is_coroutine = asyncio.coroutines._is_coroutine  # type: ignore
-        return func
-
-
-def _make_asgi_request_event_processor(request):
-    # type: (ASGIRequest) -> EventProcessor
-    def asgi_request_event_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        # if the request is gone we are fine not logging the data from
-        # it.  This might happen if the processor is pushed away to
-        # another thread.
-        from sentry_sdk_alpha.integrations.django import DjangoRequestExtractor, _set_user_info
-
-        if request is None:
-            return event
-
-        if type(request) == WSGIRequest:
-            return event
-
-        with capture_internal_exceptions():
-            DjangoRequestExtractor(request).extract_into_event(event)
-
-        if should_send_default_pii():
-            with capture_internal_exceptions():
-                _set_user_info(request, event)
-
-        return event
-
-    return asgi_request_event_processor
-
-
-def patch_django_asgi_handler_impl(cls):
-    # type: (Any) -> None
-
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    old_app = cls.__call__
-
-    @functools.wraps(old_app)
-    async def sentry_patched_asgi_handler(self, scope, receive, send):
-        # type: (Any, Any, Any, Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-        if integration is None:
-            return await old_app(self, scope, receive, send)
-
-        middleware = SentryAsgiMiddleware(
-            old_app.__get__(self, cls),
-            unsafe_context_data=True,
-            span_origin=DjangoIntegration.origin,
-            http_methods_to_capture=integration.http_methods_to_capture,
-        )._run_asgi3
-
-        return await middleware(scope, receive, send)
-
-    cls.__call__ = sentry_patched_asgi_handler
-
-    modern_django_asgi_support = hasattr(cls, "create_request")
-    if modern_django_asgi_support:
-        old_create_request = cls.create_request
-
-        @ensure_integration_enabled(DjangoIntegration, old_create_request)
-        def sentry_patched_create_request(self, *args, **kwargs):
-            # type: (Any, *Any, **Any) -> Any
-            request, error_response = old_create_request(self, *args, **kwargs)
-            scope = sentry_sdk_alpha.get_isolation_scope()
-            scope.add_event_processor(_make_asgi_request_event_processor(request))
-
-            return request, error_response
-
-        cls.create_request = sentry_patched_create_request
-
-
-def patch_get_response_async(cls, _before_get_response):
-    # type: (Any, Any) -> None
-    old_get_response_async = cls.get_response_async
-
-    @functools.wraps(old_get_response_async)
-    async def sentry_patched_get_response_async(self, request):
-        # type: (Any, Any) -> Union[HttpResponse, BaseException]
-        _before_get_response(request)
-        return await old_get_response_async(self, request)
-
-    cls.get_response_async = sentry_patched_get_response_async
-
-
-def patch_channels_asgi_handler_impl(cls):
-    # type: (Any) -> None
-    import channels  # type: ignore
-
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    if channels.__version__ < "3.0.0":
-        old_app = cls.__call__
-
-        @functools.wraps(old_app)
-        async def sentry_patched_asgi_handler(self, receive, send):
-            # type: (Any, Any, Any) -> Any
-            integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-            if integration is None:
-                return await old_app(self, receive, send)
-
-            middleware = SentryAsgiMiddleware(
-                lambda _scope: old_app.__get__(self, cls),
-                unsafe_context_data=True,
-                span_origin=DjangoIntegration.origin,
-                http_methods_to_capture=integration.http_methods_to_capture,
-            )
-
-            return await middleware(self.scope)(receive, send)
-
-        cls.__call__ = sentry_patched_asgi_handler
-
-    else:
-        # The ASGI handler in Channels >= 3 has the same signature as
-        # the Django handler.
-        patch_django_asgi_handler_impl(cls)
-
-
-def wrap_async_view(callback):
-    # type: (Any) -> Any
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    @functools.wraps(callback)
-    async def sentry_wrapped_callback(request, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        current_scope = sentry_sdk_alpha.get_current_scope()
-        if current_scope.root_span is not None:
-            current_scope.root_span.update_active_thread()
-
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        if sentry_scope.profile is not None:
-            sentry_scope.profile.update_active_thread_id()
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.VIEW_RENDER,
-            name=request.resolver_match.view_name,
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ):
-            return await callback(request, *args, **kwargs)
-
-    return sentry_wrapped_callback
-
-
-def _asgi_middleware_mixin_factory(_check_middleware_span):
-    # type: (Callable[..., Any]) -> Any
-    """
-    Mixin class factory that generates a middleware mixin for handling requests
-    in async mode.
-    """
-
-    class SentryASGIMixin:
-        if TYPE_CHECKING:
-            _inner = None
-
-        def __init__(self, get_response):
-            # type: (Callable[..., Any]) -> None
-            self.get_response = get_response
-            self._acall_method = None
-            self._async_check()
-
-        def _async_check(self):
-            # type: () -> None
-            """
-            If get_response is a coroutine function, turns us into async mode so
-            a thread is not consumed during a whole request.
-            Taken from django.utils.deprecation::MiddlewareMixin._async_check
-            """
-            if iscoroutinefunction(self.get_response):
-                markcoroutinefunction(self)
-
-        def async_route_check(self):
-            # type: () -> bool
-            """
-            Function that checks if we are in async mode,
-            and if we are forwards the handling of requests to __acall__
-            """
-            return iscoroutinefunction(self.get_response)
-
-        async def __acall__(self, *args, **kwargs):
-            # type: (*Any, **Any) -> Any
-            f = self._acall_method
-            if f is None:
-                if hasattr(self._inner, "__acall__"):
-                    self._acall_method = f = self._inner.__acall__  # type: ignore
-                else:
-                    self._acall_method = f = self._inner
-
-            middleware_span = _check_middleware_span(old_method=f)
-
-            if middleware_span is None:
-                return await f(*args, **kwargs)
-
-            with middleware_span:
-                return await f(*args, **kwargs)
-
-    return SentryASGIMixin
diff --git a/src/sentry_sdk_alpha/integrations/django/caching.py b/src/sentry_sdk_alpha/integrations/django/caching.py
deleted file mode 100644
index 17963943d79f4f..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/caching.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import functools
-from typing import TYPE_CHECKING
-
-from django import VERSION as DJANGO_VERSION
-from django.core.cache import CacheHandler
-from urllib3.util import parse_url as urlparse
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string
-from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional
-
-
-METHODS_TO_INSTRUMENT = [
-    "set",
-    "set_many",
-    "get",
-    "get_many",
-]
-
-
-def _get_span_description(method_name, args, kwargs):
-    # type: (str, tuple[Any], dict[str, Any]) -> str
-    return _key_as_string(_get_safe_key(method_name, args, kwargs))
-
-
-def _patch_cache_method(cache, method_name, address, port):
-    # type: (CacheHandler, str, Optional[str], Optional[int]) -> None
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    original_method = getattr(cache, method_name)
-
-    @ensure_integration_enabled(DjangoIntegration, original_method)
-    def _instrument_call(cache, method_name, original_method, args, kwargs, address, port):
-        # type: (CacheHandler, str, Callable[..., Any], tuple[Any, ...], dict[str, Any], Optional[str], Optional[int]) -> Any
-        is_set_operation = method_name.startswith("set")
-        is_get_operation = not is_set_operation
-
-        op = OP.CACHE_PUT if is_set_operation else OP.CACHE_GET
-        description = _get_span_description(method_name, args, kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=op,
-            name=description,
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            value = original_method(*args, **kwargs)
-
-            with capture_internal_exceptions():
-                if address is not None:
-                    span.set_attribute(SPANDATA.NETWORK_PEER_ADDRESS, address)
-
-                if port is not None:
-                    span.set_attribute(SPANDATA.NETWORK_PEER_PORT, port)
-
-                key = _get_safe_key(method_name, args, kwargs)
-                if key is not None:
-                    span.set_attribute(SPANDATA.CACHE_KEY, key)
-
-                item_size = None
-                if is_get_operation:
-                    if value:
-                        item_size = len(str(value))
-                        span.set_attribute(SPANDATA.CACHE_HIT, True)
-                    else:
-                        span.set_attribute(SPANDATA.CACHE_HIT, False)
-                else:  # TODO: We don't handle `get_or_set` which we should
-                    arg_count = len(args)
-                    if arg_count >= 2:
-                        # 'set' command
-                        item_size = len(str(args[1]))
-                    elif arg_count == 1:
-                        # 'set_many' command
-                        item_size = len(str(args[0]))
-
-                if item_size is not None:
-                    span.set_attribute(SPANDATA.CACHE_ITEM_SIZE, item_size)
-
-            return value
-
-    @functools.wraps(original_method)
-    def sentry_method(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        return _instrument_call(cache, method_name, original_method, args, kwargs, address, port)
-
-    setattr(cache, method_name, sentry_method)
-
-
-def _patch_cache(cache, address=None, port=None):
-    # type: (CacheHandler, Optional[str], Optional[int]) -> None
-    if not hasattr(cache, "_sentry_patched"):
-        for method_name in METHODS_TO_INSTRUMENT:
-            _patch_cache_method(cache, method_name, address, port)
-        cache._sentry_patched = True
-
-
-def _get_address_port(settings):
-    # type: (dict[str, Any]) -> tuple[Optional[str], Optional[int]]
-    location = settings.get("LOCATION")
-
-    # TODO: location can also be an array of locations
-    #       see: https://docs.djangoproject.com/en/5.0/topics/cache/#redis
-    #       GitHub issue: https://github.com/getsentry/sentry-python/issues/3062
-    if not isinstance(location, str):
-        return None, None
-
-    if "://" in location:
-        parsed_url = urlparse(location)
-        # remove the username and password from URL to not leak sensitive data.
-        address = "{}://{}{}".format(
-            parsed_url.scheme or "",
-            parsed_url.hostname or "",
-            parsed_url.path or "",
-        )
-        port = parsed_url.port
-    else:
-        address = location
-        port = None
-
-    return address, int(port) if port is not None else None
-
-
-def patch_caching():
-    # type: () -> None
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    if not hasattr(CacheHandler, "_sentry_patched"):
-        if DJANGO_VERSION < (3, 2):
-            original_get_item = CacheHandler.__getitem__
-
-            @functools.wraps(original_get_item)
-            def sentry_get_item(self, alias):
-                # type: (CacheHandler, str) -> Any
-                cache = original_get_item(self, alias)
-
-                integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-                if integration is not None and integration.cache_spans:
-                    from django.conf import settings
-
-                    address, port = _get_address_port(settings.CACHES[alias or "default"])
-
-                    _patch_cache(cache, address, port)
-
-                return cache
-
-            CacheHandler.__getitem__ = sentry_get_item
-            CacheHandler._sentry_patched = True
-
-        else:
-            original_create_connection = CacheHandler.create_connection
-
-            @functools.wraps(original_create_connection)
-            def sentry_create_connection(self, alias):
-                # type: (CacheHandler, str) -> Any
-                cache = original_create_connection(self, alias)
-
-                integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-                if integration is not None and integration.cache_spans:
-                    address, port = _get_address_port(self.settings[alias or "default"])
-
-                    _patch_cache(cache, address, port)
-
-                return cache
-
-            CacheHandler.create_connection = sentry_create_connection
-            CacheHandler._sentry_patched = True
diff --git a/src/sentry_sdk_alpha/integrations/django/middleware.py b/src/sentry_sdk_alpha/integrations/django/middleware.py
deleted file mode 100644
index af0835d8ed434c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/middleware.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""
-Create spans from Django middleware invocations
-"""
-
-from functools import wraps
-from typing import TYPE_CHECKING
-
-from django import VERSION as DJANGO_VERSION
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.utils import (
-    ContextVar,
-    capture_internal_exceptions,
-    transaction_from_function,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar
-
-    from sentry_sdk_alpha.tracing import Span
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-_import_string_should_wrap_middleware = ContextVar("import_string_should_wrap_middleware")
-
-DJANGO_SUPPORTS_ASYNC_MIDDLEWARE = DJANGO_VERSION >= (3, 1)
-
-if not DJANGO_SUPPORTS_ASYNC_MIDDLEWARE:
-    _asgi_middleware_mixin_factory = lambda _: object
-else:
-    from .asgi import _asgi_middleware_mixin_factory
-
-
-def patch_django_middlewares():
-    # type: () -> None
-    from django.core.handlers import base
-
-    old_import_string = base.import_string
-
-    def sentry_patched_import_string(dotted_path):
-        # type: (str) -> Any
-        rv = old_import_string(dotted_path)
-
-        if _import_string_should_wrap_middleware.get(None):
-            rv = _wrap_middleware(rv, dotted_path)
-
-        return rv
-
-    base.import_string = sentry_patched_import_string
-
-    old_load_middleware = base.BaseHandler.load_middleware
-
-    def sentry_patched_load_middleware(*args, **kwargs):
-        # type: (Any, Any) -> Any
-        _import_string_should_wrap_middleware.set(True)
-        try:
-            return old_load_middleware(*args, **kwargs)
-        finally:
-            _import_string_should_wrap_middleware.set(False)
-
-    base.BaseHandler.load_middleware = sentry_patched_load_middleware
-
-
-def _wrap_middleware(middleware, middleware_name):
-    # type: (Any, str) -> Any
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    def _check_middleware_span(old_method):
-        # type: (Callable[..., Any]) -> Optional[Span]
-        integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-        if integration is None or not integration.middleware_spans:
-            return None
-
-        function_name = transaction_from_function(old_method)
-
-        description = middleware_name
-        function_basename = getattr(old_method, "__name__", None)
-        if function_basename:
-            description = f"{description}.{function_basename}"
-
-        middleware_span = sentry_sdk_alpha.start_span(
-            op=OP.MIDDLEWARE_DJANGO,
-            name=description,
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        )
-        middleware_span.set_tag("django.function_name", function_name)
-        middleware_span.set_tag("django.middleware_name", middleware_name)
-
-        return middleware_span
-
-    def _get_wrapped_method(old_method):
-        # type: (F) -> F
-        with capture_internal_exceptions():
-
-            def sentry_wrapped_method(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                middleware_span = _check_middleware_span(old_method)
-
-                if middleware_span is None:
-                    return old_method(*args, **kwargs)
-
-                with middleware_span:
-                    return old_method(*args, **kwargs)
-
-            try:
-                # fails for __call__ of function on Python 2 (see py2.7-django-1.11)
-                sentry_wrapped_method = wraps(old_method)(sentry_wrapped_method)
-
-                # Necessary for Django 3.1
-                sentry_wrapped_method.__self__ = old_method.__self__  # type: ignore
-            except Exception:
-                pass
-
-            return sentry_wrapped_method  # type: ignore
-
-        return old_method
-
-    class SentryWrappingMiddleware(
-        _asgi_middleware_mixin_factory(_check_middleware_span)  # type: ignore
-    ):
-        sync_capable = getattr(middleware, "sync_capable", True)
-        async_capable = DJANGO_SUPPORTS_ASYNC_MIDDLEWARE and getattr(
-            middleware, "async_capable", False
-        )
-
-        def __init__(self, get_response=None, *args, **kwargs):
-            # type: (Optional[Callable[..., Any]], *Any, **Any) -> None
-            if get_response:
-                self._inner = middleware(get_response, *args, **kwargs)
-            else:
-                self._inner = middleware(*args, **kwargs)
-            self.get_response = get_response
-            self._call_method = None
-            if self.async_capable:
-                super().__init__(get_response)
-
-        # We need correct behavior for `hasattr()`, which we can only determine
-        # when we have an instance of the middleware we're wrapping.
-        def __getattr__(self, method_name):
-            # type: (str) -> Any
-            if method_name not in (
-                "process_request",
-                "process_view",
-                "process_template_response",
-                "process_response",
-                "process_exception",
-            ):
-                raise AttributeError()
-
-            old_method = getattr(self._inner, method_name)
-            rv = _get_wrapped_method(old_method)
-            self.__dict__[method_name] = rv
-            return rv
-
-        def __call__(self, *args, **kwargs):
-            # type: (*Any, **Any) -> Any
-            if hasattr(self, "async_route_check") and self.async_route_check():
-                return self.__acall__(*args, **kwargs)
-
-            f = self._call_method
-            if f is None:
-                self._call_method = f = self._inner.__call__
-
-            middleware_span = _check_middleware_span(old_method=f)
-
-            if middleware_span is None:
-                return f(*args, **kwargs)
-
-            with middleware_span:
-                return f(*args, **kwargs)
-
-    for attr in (
-        "__name__",
-        "__module__",
-        "__qualname__",
-    ):
-        if hasattr(middleware, attr):
-            setattr(SentryWrappingMiddleware, attr, getattr(middleware, attr))
-
-    return SentryWrappingMiddleware
diff --git a/src/sentry_sdk_alpha/integrations/django/signals_handlers.py b/src/sentry_sdk_alpha/integrations/django/signals_handlers.py
deleted file mode 100644
index 8995d7fcb47f8c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/signals_handlers.py
+++ /dev/null
@@ -1,86 +0,0 @@
-from functools import wraps
-from typing import TYPE_CHECKING
-
-from django.dispatch import Signal
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations.django import DJANGO_VERSION
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Union
-
-
-def _get_receiver_name(receiver):
-    # type: (Callable[..., Any]) -> str
-    name = ""
-
-    if hasattr(receiver, "__qualname__"):
-        name = receiver.__qualname__
-    elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
-        name = receiver.__name__
-    elif hasattr(receiver, "func"):  # certain functions (like partials) dont have a name
-        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):
-            name = "partial()"
-
-    if (
-        name == ""
-    ):  # In case nothing was found, return the string representation (this is the slowest case)
-        return str(receiver)
-
-    if hasattr(receiver, "__module__"):  # prepend with module, if there is one
-        name = receiver.__module__ + "." + name
-
-    return name
-
-
-def patch_signals():
-    # type: () -> None
-    """
-    Patch django signal receivers to create a span.
-
-    This only wraps sync receivers. Django>=5.0 introduced async receivers, but
-    since we don't create transactions for ASGI Django, we don't wrap them.
-    """
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    old_live_receivers = Signal._live_receivers
-
-    @wraps(old_live_receivers)
-    def _sentry_live_receivers(self, sender):
-        # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]]
-        if DJANGO_VERSION >= (5, 0):
-            sync_receivers, async_receivers = old_live_receivers(self, sender)
-        else:
-            sync_receivers = old_live_receivers(self, sender)
-            async_receivers = []
-
-        def sentry_sync_receiver_wrapper(receiver):
-            # type: (Callable[..., Any]) -> Callable[..., Any]
-            @wraps(receiver)
-            def wrapper(*args, **kwargs):
-                # type: (Any, Any) -> Any
-                signal_name = _get_receiver_name(receiver)
-                with sentry_sdk_alpha.start_span(
-                    op=OP.EVENT_DJANGO,
-                    name=signal_name,
-                    origin=DjangoIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_attribute("signal", signal_name)
-                    return receiver(*args, **kwargs)
-
-            return wrapper
-
-        integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-        if integration and integration.signals_spans and self not in integration.signals_denylist:
-            for idx, receiver in enumerate(sync_receivers):
-                sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver)
-
-        if DJANGO_VERSION >= (5, 0):
-            return sync_receivers, async_receivers
-        else:
-            return sync_receivers
-
-    Signal._live_receivers = _sentry_live_receivers
diff --git a/src/sentry_sdk_alpha/integrations/django/templates.py b/src/sentry_sdk_alpha/integrations/django/templates.py
deleted file mode 100644
index b42e4b57976f94..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/templates.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import functools
-from typing import TYPE_CHECKING
-
-from django.template import TemplateSyntaxError
-from django.template.base import Origin
-from django.utils.safestring import mark_safe
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.utils import ensure_integration_enabled
-
-if TYPE_CHECKING:
-    from collections.abc import Iterator
-    from typing import Any, Dict, Optional, Tuple
-
-
-def get_template_frame_from_exception(exc_value):
-    # type: (Optional[BaseException]) -> Optional[Dict[str, Any]]
-
-    # As of Django 1.9 or so the new template debug thing showed up.
-    if hasattr(exc_value, "template_debug"):
-        return _get_template_frame_from_debug(exc_value.template_debug)  # type: ignore
-
-    # As of r16833 (Django) all exceptions may contain a
-    # ``django_template_source`` attribute (rather than the legacy
-    # ``TemplateSyntaxError.source`` check)
-    if hasattr(exc_value, "django_template_source"):
-        return _get_template_frame_from_source(exc_value.django_template_source)  # type: ignore
-
-    if isinstance(exc_value, TemplateSyntaxError) and hasattr(exc_value, "source"):
-        source = exc_value.source
-        if isinstance(source, (tuple, list)) and isinstance(source[0], Origin):
-            return _get_template_frame_from_source(source)  # type: ignore
-
-    return None
-
-
-def _get_template_name_description(template_name):
-    # type: (str) -> str
-    if isinstance(template_name, (list, tuple)):
-        if template_name:
-            return f"[{template_name[0]}, ...]"
-    else:
-        return template_name
-
-
-def patch_templates():
-    # type: () -> None
-    from django.template.response import SimpleTemplateResponse
-
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    real_rendered_content = SimpleTemplateResponse.rendered_content
-
-    @property  # type: ignore
-    @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget)
-    def rendered_content(self):
-        # type: (SimpleTemplateResponse) -> str
-        with sentry_sdk_alpha.start_span(
-            op=OP.TEMPLATE_RENDER,
-            name=_get_template_name_description(self.template_name),
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            if isinstance(self.context_data, dict):
-                for k, v in self.context_data.items():
-                    span.set_attribute(f"context.{k}", v)
-            return real_rendered_content.fget(self)
-
-    SimpleTemplateResponse.rendered_content = rendered_content
-
-    import django.shortcuts
-
-    real_render = django.shortcuts.render
-
-    @functools.wraps(real_render)
-    @ensure_integration_enabled(DjangoIntegration, real_render)
-    def render(request, template_name, context=None, *args, **kwargs):
-        # type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
-
-        # Inject trace meta tags into template context
-        context = context or {}
-        if "sentry_trace_meta" not in context:
-            context["sentry_trace_meta"] = mark_safe(
-                sentry_sdk_alpha.get_current_scope().trace_propagation_meta()
-            )
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.TEMPLATE_RENDER,
-            name=_get_template_name_description(template_name),
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            for k, v in context.items():
-                span.set_attribute(f"context.{k}", v)
-            return real_render(request, template_name, context, *args, **kwargs)
-
-    django.shortcuts.render = render
-
-
-def _get_template_frame_from_debug(debug):
-    # type: (Dict[str, Any]) -> Dict[str, Any]
-    if debug is None:
-        return None
-
-    lineno = debug["line"]
-    filename = debug["name"]
-    if filename is None:
-        filename = ""
-
-    pre_context = []
-    post_context = []
-    context_line = None
-
-    for i, line in debug["source_lines"]:
-        if i < lineno:
-            pre_context.append(line)
-        elif i > lineno:
-            post_context.append(line)
-        else:
-            context_line = line
-
-    return {
-        "filename": filename,
-        "lineno": lineno,
-        "pre_context": pre_context[-5:],
-        "post_context": post_context[:5],
-        "context_line": context_line,
-        "in_app": True,
-    }
-
-
-def _linebreak_iter(template_source):
-    # type: (str) -> Iterator[int]
-    yield 0
-    p = template_source.find("\n")
-    while p >= 0:
-        yield p + 1
-        p = template_source.find("\n", p + 1)
-
-
-def _get_template_frame_from_source(source):
-    # type: (Tuple[Origin, Tuple[int, int]]) -> Optional[Dict[str, Any]]
-    if not source:
-        return None
-
-    origin, (start, end) = source
-    filename = getattr(origin, "loadname", None)
-    if filename is None:
-        filename = ""
-    template_source = origin.reload()
-    lineno = None
-    upto = 0
-    pre_context = []
-    post_context = []
-    context_line = None
-
-    for num, next in enumerate(_linebreak_iter(template_source)):
-        line = template_source[upto:next]
-        if start >= upto and end <= next:
-            lineno = num
-            context_line = line
-        elif lineno is None:
-            pre_context.append(line)
-        else:
-            post_context.append(line)
-
-        upto = next
-
-    if context_line is None or lineno is None:
-        return None
-
-    return {
-        "filename": filename,
-        "lineno": lineno,
-        "pre_context": pre_context[-5:],
-        "post_context": post_context[:5],
-        "context_line": context_line,
-    }
diff --git a/src/sentry_sdk_alpha/integrations/django/transactions.py b/src/sentry_sdk_alpha/integrations/django/transactions.py
deleted file mode 100644
index fc2d58cd4dec90..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/transactions.py
+++ /dev/null
@@ -1,153 +0,0 @@
-"""
-Copied from raven-python.
-
-Despite being called "legacy" in some places this resolver is very much still
-in use.
-"""
-
-import re
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from django.urls.resolvers import URLResolver
-    from typing import Dict
-    from typing import List
-    from typing import Optional
-    from django.urls.resolvers import URLPattern
-    from typing import Tuple
-    from typing import Union
-    from re import Pattern
-
-from django.urls.resolvers import RoutePattern
-
-try:
-    from django.urls import get_resolver
-except ImportError:
-    from django.core.urlresolvers import get_resolver
-
-
-def get_regex(resolver_or_pattern):
-    # type: (Union[URLPattern, URLResolver]) -> Pattern[str]
-    """Utility method for django's deprecated resolver.regex"""
-    try:
-        regex = resolver_or_pattern.regex
-    except AttributeError:
-        regex = resolver_or_pattern.pattern.regex
-    return regex
-
-
-class RavenResolver:
-    _new_style_group_matcher = re.compile(
-        r"<(?:([^>:]+):)?([^>]+)>"
-    )  # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247
-    _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
-    _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
-    _non_named_group_matcher = re.compile(r"\([^\)]+\)")
-    # [foo|bar|baz]
-    _either_option_matcher = re.compile(r"\[([^\]]+)\|([^\]]+)\]")
-    _camel_re = re.compile(r"([A-Z]+)([a-z])")
-
-    _cache = {}  # type: Dict[URLPattern, str]
-
-    def _simplify(self, pattern):
-        # type: (Union[URLPattern, URLResolver]) -> str
-        r"""
-        Clean up urlpattern regexes into something readable by humans:
-
-        From:
-        > "^(?P\w+)/athletes/(?P\w+)/$"
-
-        To:
-        > "{sport_slug}/athletes/{athlete_slug}/"
-        """
-        # "new-style" path patterns can be parsed directly without turning them
-        # into regexes first
-        if (
-            RoutePattern is not None
-            and hasattr(pattern, "pattern")
-            and isinstance(pattern.pattern, RoutePattern)
-        ):
-            return self._new_style_group_matcher.sub(
-                lambda m: "{%s}" % m.group(2), str(pattern.pattern._route)
-            )
-
-        result = get_regex(pattern).pattern
-
-        # remove optional params
-        # TODO(dcramer): it'd be nice to change these into [%s] but it currently
-        # conflicts with the other rules because we're doing regexp matches
-        # rather than parsing tokens
-        result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result)
-
-        # handle named groups first
-        result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
-
-        # handle non-named groups
-        result = self._non_named_group_matcher.sub("{var}", result)
-
-        # handle optional params
-        result = self._either_option_matcher.sub(lambda m: m.group(1), result)
-
-        # clean up any outstanding regex-y characters.
-        result = (
-            result.replace("^", "")
-            .replace("$", "")
-            .replace("?", "")
-            .replace("\\A", "")
-            .replace("\\Z", "")
-            .replace("//", "/")
-            .replace("\\", "")
-        )
-
-        return result
-
-    def _resolve(self, resolver, path, parents=None):
-        # type: (URLResolver, str, Optional[List[URLResolver]]) -> Optional[str]
-
-        match = get_regex(resolver).search(path)  # Django < 2.0
-
-        if not match:
-            return None
-
-        if parents is None:
-            parents = [resolver]
-        elif resolver not in parents:
-            parents = parents + [resolver]
-
-        new_path = path[match.end() :]
-        for pattern in resolver.url_patterns:
-            # this is an include()
-            if not pattern.callback:
-                match_ = self._resolve(pattern, new_path, parents)
-                if match_:
-                    return match_
-                continue
-            elif not get_regex(pattern).search(new_path):
-                continue
-
-            try:
-                return self._cache[pattern]
-            except KeyError:
-                pass
-
-            prefix = "".join(self._simplify(p) for p in parents)
-            result = prefix + self._simplify(pattern)
-            if not result.startswith("/"):
-                result = "/" + result
-            self._cache[pattern] = result
-            return result
-
-        return None
-
-    def resolve(
-        self,
-        path,  # type: str
-        urlconf=None,  # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
-    ):
-        # type: (...) -> Optional[str]
-        resolver = get_resolver(urlconf)
-        match = self._resolve(resolver, path)
-        return match
-
-
-LEGACY_RESOLVER = RavenResolver()
diff --git a/src/sentry_sdk_alpha/integrations/django/views.py b/src/sentry_sdk_alpha/integrations/django/views.py
deleted file mode 100644
index c9f7874bfbe0c0..00000000000000
--- a/src/sentry_sdk_alpha/integrations/django/views.py
+++ /dev/null
@@ -1,99 +0,0 @@
-import functools
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-
-if TYPE_CHECKING:
-    from typing import Any
-
-
-try:
-    from asyncio import iscoroutinefunction
-except ImportError:
-    iscoroutinefunction = None  # type: ignore
-
-
-try:
-    from sentry_sdk_alpha.integrations.django.asgi import wrap_async_view
-except (ImportError, SyntaxError):
-    wrap_async_view = None  # type: ignore
-
-
-def patch_views():
-    # type: () -> None
-
-    from django.core.handlers.base import BaseHandler
-    from django.template.response import SimpleTemplateResponse
-
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    old_make_view_atomic = BaseHandler.make_view_atomic
-    old_render = SimpleTemplateResponse.render
-
-    @functools.wraps(old_render)
-    def sentry_patched_render(self):
-        # type: (SimpleTemplateResponse) -> Any
-        with sentry_sdk_alpha.start_span(
-            op=OP.VIEW_RESPONSE_RENDER,
-            name="serialize response",
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ):
-            return old_render(self)
-
-    @functools.wraps(old_make_view_atomic)
-    def sentry_patched_make_view_atomic(self, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        callback = old_make_view_atomic(self, *args, **kwargs)
-
-        # XXX: The wrapper function is created for every request. Find more
-        # efficient way to wrap views (or build a cache?)
-
-        integration = sentry_sdk_alpha.get_client().get_integration(DjangoIntegration)
-        if integration is not None and integration.middleware_spans:
-            is_async_view = (
-                iscoroutinefunction is not None
-                and wrap_async_view is not None
-                and iscoroutinefunction(callback)
-            )
-            if is_async_view:
-                sentry_wrapped_callback = wrap_async_view(callback)
-            else:
-                sentry_wrapped_callback = _wrap_sync_view(callback)
-
-        else:
-            sentry_wrapped_callback = callback
-
-        return sentry_wrapped_callback
-
-    SimpleTemplateResponse.render = sentry_patched_render
-    BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
-
-
-def _wrap_sync_view(callback):
-    # type: (Any) -> Any
-    from sentry_sdk_alpha.integrations.django import DjangoIntegration
-
-    @functools.wraps(callback)
-    def sentry_wrapped_callback(request, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        current_scope = sentry_sdk_alpha.get_current_scope()
-        if current_scope.root_span is not None:
-            current_scope.root_span.update_active_thread()
-
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        # set the active thread id to the handler thread for sync views
-        # this isn't necessary for async views since that runs on main
-        if sentry_scope.profile is not None:
-            sentry_scope.profile.update_active_thread_id()
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.VIEW_RENDER,
-            name=request.resolver_match.view_name,
-            origin=DjangoIntegration.origin,
-            only_if_parent=True,
-        ):
-            return callback(request, *args, **kwargs)
-
-    return sentry_wrapped_callback
diff --git a/src/sentry_sdk_alpha/integrations/dramatiq.py b/src/sentry_sdk_alpha/integrations/dramatiq.py
deleted file mode 100644
index 19338f101e955a..00000000000000
--- a/src/sentry_sdk_alpha/integrations/dramatiq.py
+++ /dev/null
@@ -1,165 +0,0 @@
-import json
-from typing import TYPE_CHECKING
-
-from dramatiq.broker import Broker  # type: ignore
-from dramatiq.errors import Retry  # type: ignore
-from dramatiq.message import Message  # type: ignore
-from dramatiq.middleware import Middleware, default_middleware  # type: ignore
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations._wsgi_common import request_body_within_bounds
-from sentry_sdk_alpha.utils import AnnotatedValue, capture_internal_exceptions, event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, Optional, Union
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-class DramatiqIntegration(Integration):
-    """
-    Dramatiq integration for Sentry
-
-    Please make sure that you call `sentry_sdk.init` *before* initializing
-    your broker, as it monkey patches `Broker.__init__`.
-
-    This integration was originally developed and maintained
-    by https://github.com/jacobsvante and later donated to the Sentry
-    project.
-    """
-
-    identifier = "dramatiq"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _patch_dramatiq_broker()
-
-
-def _patch_dramatiq_broker():
-    # type: () -> None
-    original_broker__init__ = Broker.__init__
-
-    def sentry_patched_broker__init__(self, *args, **kw):
-        # type: (Broker, *Any, **Any) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration)
-
-        try:
-            middleware = kw.pop("middleware")
-        except KeyError:
-            # Unfortunately Broker and StubBroker allows middleware to be
-            # passed in as positional arguments, whilst RabbitmqBroker and
-            # RedisBroker does not.
-            if len(args) == 1:
-                middleware = args[0]
-                args = []  # type: ignore
-            else:
-                middleware = None
-
-        if middleware is None:
-            middleware = list(m() for m in default_middleware)
-        else:
-            middleware = list(middleware)
-
-        if integration is not None:
-            middleware = [m for m in middleware if not isinstance(m, SentryMiddleware)]
-            middleware.insert(0, SentryMiddleware())
-
-        kw["middleware"] = middleware
-        original_broker__init__(self, *args, **kw)
-
-    Broker.__init__ = sentry_patched_broker__init__
-
-
-class SentryMiddleware(Middleware):  # type: ignore[misc]
-    """
-    A Dramatiq middleware that automatically captures and sends
-    exceptions to Sentry.
-
-    This is automatically added to every instantiated broker via the
-    DramatiqIntegration.
-    """
-
-    def before_process_message(self, broker, message):
-        # type: (Broker, Message) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration)
-        if integration is None:
-            return
-
-        message._scope_manager = sentry_sdk_alpha.new_scope()
-        message._scope_manager.__enter__()
-
-        scope = sentry_sdk_alpha.get_current_scope()
-        scope.set_transaction_name(message.actor_name)
-        scope.set_extra("dramatiq_message_id", message.message_id)
-        scope.add_event_processor(_make_message_event_processor(message, integration))
-
-    def after_process_message(self, broker, message, *, result=None, exception=None):
-        # type: (Broker, Message, Any, Optional[Any], Optional[Exception]) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(DramatiqIntegration)
-        if integration is None:
-            return
-
-        actor = broker.get_actor(message.actor_name)
-        throws = message.options.get("throws") or actor.options.get("throws")
-
-        try:
-            if (
-                exception is not None
-                and not (throws and isinstance(exception, throws))
-                and not isinstance(exception, Retry)
-            ):
-                event, hint = event_from_exception(
-                    exception,
-                    client_options=sentry_sdk_alpha.get_client().options,
-                    mechanism={
-                        "type": DramatiqIntegration.identifier,
-                        "handled": False,
-                    },
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-        finally:
-            message._scope_manager.__exit__(None, None, None)
-
-
-def _make_message_event_processor(message, integration):
-    # type: (Message, DramatiqIntegration) -> Callable[[Event, Hint], Optional[Event]]
-
-    def inner(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-        with capture_internal_exceptions():
-            DramatiqMessageExtractor(message).extract_into_event(event)
-
-        return event
-
-    return inner
-
-
-class DramatiqMessageExtractor:
-    def __init__(self, message):
-        # type: (Message) -> None
-        self.message_data = dict(message.asdict())
-
-    def content_length(self):
-        # type: () -> int
-        return len(json.dumps(self.message_data))
-
-    def extract_into_event(self, event):
-        # type: (Event) -> None
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            return
-
-        contexts = event.setdefault("contexts", {})
-        request_info = contexts.setdefault("dramatiq", {})
-        request_info["type"] = "dramatiq"
-
-        data = None  # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
-        if not request_body_within_bounds(client, self.content_length()):
-            data = AnnotatedValue.removed_because_over_size_limit()
-        else:
-            data = self.message_data
-
-        request_info["data"] = data
diff --git a/src/sentry_sdk_alpha/integrations/excepthook.py b/src/sentry_sdk_alpha/integrations/excepthook.py
deleted file mode 100644
index f61bcf59569982..00000000000000
--- a/src/sentry_sdk_alpha/integrations/excepthook.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import sys
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from types import TracebackType
-    from typing import Any, Optional, Type
-
-    Excepthook = Callable[
-        [type[BaseException], BaseException, Optional[TracebackType]],
-        Any,
-    ]
-
-
-class ExcepthookIntegration(Integration):
-    identifier = "excepthook"
-
-    always_run = False
-
-    def __init__(self, always_run=False):
-        # type: (bool) -> None
-
-        if not isinstance(always_run, bool):
-            raise ValueError(f"Invalid value for always_run: {always_run} (must be type boolean)")
-        self.always_run = always_run
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        sys.excepthook = _make_excepthook(sys.excepthook)
-
-
-def _make_excepthook(old_excepthook):
-    # type: (Excepthook) -> Excepthook
-    def sentry_sdk_excepthook(type_, value, traceback):
-        # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(ExcepthookIntegration)
-
-        # Note: If  we replace this with ensure_integration_enabled then
-        # we break the exceptiongroup backport;
-        # See: https://github.com/getsentry/sentry-python/issues/3097
-        if integration is None:
-            return old_excepthook(type_, value, traceback)
-
-        if _should_send(integration.always_run):
-            with capture_internal_exceptions():
-                event, hint = event_from_exception(
-                    (type_, value, traceback),
-                    client_options=sentry_sdk_alpha.get_client().options,
-                    mechanism={"type": "excepthook", "handled": False},
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        return old_excepthook(type_, value, traceback)
-
-    return sentry_sdk_excepthook
-
-
-def _should_send(always_run=False):
-    # type: (bool) -> bool
-    if always_run:
-        return True
-
-    if hasattr(sys, "ps1"):
-        # Disable the excepthook for interactive Python shells, otherwise
-        # every typo gets sent to Sentry.
-        return False
-
-    return True
diff --git a/src/sentry_sdk_alpha/integrations/executing.py b/src/sentry_sdk_alpha/integrations/executing.py
deleted file mode 100644
index f17193e308cbfc..00000000000000
--- a/src/sentry_sdk_alpha/integrations/executing.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import iter_stacks, walk_exception_chain
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-try:
-    import executing
-except ImportError:
-    raise DidNotEnable("executing is not installed")
-
-
-class ExecutingIntegration(Integration):
-    identifier = "executing"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        @add_global_event_processor
-        def add_executing_info(event, hint):
-            # type: (Event, Optional[Hint]) -> Optional[Event]
-            if sentry_sdk_alpha.get_client().get_integration(ExecutingIntegration) is None:
-                return event
-
-            if hint is None:
-                return event
-
-            exc_info = hint.get("exc_info", None)
-
-            if exc_info is None:
-                return event
-
-            exception = event.get("exception", None)
-
-            if exception is None:
-                return event
-
-            values = exception.get("values", None)
-
-            if values is None:
-                return event
-
-            for exception, (_exc_type, _exc_value, exc_tb) in zip(
-                reversed(values), walk_exception_chain(exc_info)
-            ):
-                sentry_frames = [
-                    frame
-                    for frame in exception.get("stacktrace", {}).get("frames", [])
-                    if frame.get("function")
-                ]
-                tbs = list(iter_stacks(exc_tb))
-                if len(sentry_frames) != len(tbs):
-                    continue
-
-                for sentry_frame, tb in zip(sentry_frames, tbs):
-                    frame = tb.tb_frame
-                    source = executing.Source.for_frame(frame)
-                    sentry_frame["function"] = source.code_qualname(frame.f_code)
-
-            return event
diff --git a/src/sentry_sdk_alpha/integrations/falcon.py b/src/sentry_sdk_alpha/integrations/falcon.py
deleted file mode 100644
index 7b5ef0ccd1a101..00000000000000
--- a/src/sentry_sdk_alpha/integrations/falcon.py
+++ /dev/null
@@ -1,245 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    parse_version,
-)
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Optional
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-
-try:
-    import falcon  # type: ignore
-    from falcon import __version__ as FALCON_VERSION
-except ImportError:
-    raise DidNotEnable("Falcon not installed")
-
-import falcon.app_helpers  # type: ignore
-
-falcon_helpers = falcon.app_helpers
-falcon_app_class = falcon.App
-
-
-_FALCON_UNSET = None  # type: Optional[object]
-with capture_internal_exceptions():
-    from falcon.request import _UNSET as _FALCON_UNSET  # type: ignore[import-not-found, no-redef]
-
-
-class FalconRequestExtractor(RequestExtractor):
-    def env(self):
-        # type: () -> Dict[str, Any]
-        return self.request.env
-
-    def cookies(self):
-        # type: () -> Dict[str, Any]
-        return self.request.cookies
-
-    def form(self):
-        # type: () -> None
-        return None  # No such concept in Falcon
-
-    def files(self):
-        # type: () -> None
-        return None  # No such concept in Falcon
-
-    def raw_data(self):
-        # type: () -> Optional[str]
-
-        # As request data can only be read once we won't make this available
-        # to Sentry. Just send back a dummy string in case there was a
-        # content length.
-        # TODO(jmagnusson): Figure out if there's a way to support this
-        content_length = self.content_length()
-        if content_length > 0:
-            return "[REQUEST_CONTAINING_RAW_DATA]"
-        else:
-            return None
-
-    def json(self):
-        # type: () -> Optional[Dict[str, Any]]
-        # fallback to cached_media = None if self.request._media is not available
-        cached_media = None
-        with capture_internal_exceptions():
-            # self.request._media is the cached self.request.media
-            # value. It is only available if self.request.media
-            # has already been accessed. Therefore, reading
-            # self.request._media will not exhaust the raw request
-            # stream (self.request.bounded_stream) because it has
-            # already been read if self.request._media is set.
-            cached_media = self.request._media
-
-        if cached_media is not _FALCON_UNSET:
-            return cached_media
-
-        return None
-
-
-class SentryFalconMiddleware:
-    """Captures exceptions in Falcon requests and send to Sentry"""
-
-    def process_request(self, req, resp, *args, **kwargs):
-        # type: (Any, Any, *Any, **Any) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration)
-        if integration is None:
-            return
-
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        scope._name = "falcon"
-        scope.add_event_processor(_make_request_event_processor(req, integration))
-
-
-TRANSACTION_STYLE_VALUES = ("uri_template", "path")
-
-
-class FalconIntegration(Integration):
-    identifier = "falcon"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(self, transaction_style="uri_template"):
-        # type: (str) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        version = parse_version(FALCON_VERSION)
-        _check_minimum_version(FalconIntegration, version)
-
-        _patch_wsgi_app()
-        _patch_handle_exception()
-        _patch_prepare_middleware()
-
-
-def _patch_wsgi_app():
-    # type: () -> None
-    original_wsgi_app = falcon_app_class.__call__
-
-    def sentry_patched_wsgi_app(self, env, start_response):
-        # type: (falcon.API, Any, Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration)
-        if integration is None:
-            return original_wsgi_app(self, env, start_response)
-
-        sentry_wrapped = SentryWsgiMiddleware(
-            lambda envi, start_resp: original_wsgi_app(self, envi, start_resp),
-            span_origin=FalconIntegration.origin,
-        )
-
-        return sentry_wrapped(env, start_response)
-
-    falcon_app_class.__call__ = sentry_patched_wsgi_app
-
-
-def _patch_handle_exception():
-    # type: () -> None
-    original_handle_exception = falcon_app_class._handle_exception
-
-    @ensure_integration_enabled(FalconIntegration, original_handle_exception)
-    def sentry_patched_handle_exception(self, *args):
-        # type: (falcon.API, *Any) -> Any
-        # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
-        # method signature from `(ex, req, resp, params)` to
-        # `(req, resp, ex, params)`
-        ex = response = None
-        with capture_internal_exceptions():
-            ex = next(argument for argument in args if isinstance(argument, Exception))
-            response = next(argument for argument in args if isinstance(argument, falcon.Response))
-
-        was_handled = original_handle_exception(self, *args)
-
-        if ex is None or response is None:
-            # Both ex and response should have a non-None value at this point; otherwise,
-            # there is an error with the SDK that will have been captured in the
-            # capture_internal_exceptions block above.
-            return was_handled
-
-        if _exception_leads_to_http_5xx(ex, response):
-            event, hint = event_from_exception(
-                ex,
-                client_options=sentry_sdk_alpha.get_client().options,
-                mechanism={"type": "falcon", "handled": False},
-            )
-            sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        return was_handled
-
-    falcon_app_class._handle_exception = sentry_patched_handle_exception
-
-
-def _patch_prepare_middleware():
-    # type: () -> None
-    original_prepare_middleware = falcon_helpers.prepare_middleware
-
-    def sentry_patched_prepare_middleware(
-        middleware=None, independent_middleware=False, asgi=False
-    ):
-        # type: (Any, Any, bool) -> Any
-        if asgi:
-            # We don't support ASGI Falcon apps, so we don't patch anything here
-            return original_prepare_middleware(middleware, independent_middleware, asgi)
-
-        integration = sentry_sdk_alpha.get_client().get_integration(FalconIntegration)
-        if integration is not None:
-            middleware = [SentryFalconMiddleware()] + (middleware or [])
-
-        # We intentionally omit the asgi argument here, since the default is False anyways,
-        # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.
-        return original_prepare_middleware(middleware, independent_middleware)
-
-    falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
-
-
-def _exception_leads_to_http_5xx(ex, response):
-    # type: (Exception, falcon.Response) -> bool
-    is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith("5")
-    is_unhandled_error = not isinstance(ex, (falcon.HTTPError, falcon.http_status.HTTPStatus))
-
-    return (is_server_error or is_unhandled_error) and _has_http_5xx_status(response)
-
-
-def _has_http_5xx_status(response):
-    # type: (falcon.Response) -> bool
-    return response.status.startswith("5")
-
-
-def _set_transaction_name_and_source(event, transaction_style, request):
-    # type: (Event, str, falcon.Request) -> None
-    name_for_style = {
-        "uri_template": request.uri_template,
-        "path": request.path,
-    }
-    event["transaction"] = name_for_style[transaction_style]
-    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
-
-
-def _make_request_event_processor(req, integration):
-    # type: (falcon.Request, FalconIntegration) -> EventProcessor
-
-    def event_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        _set_transaction_name_and_source(event, integration.transaction_style, req)
-
-        with capture_internal_exceptions():
-            FalconRequestExtractor(req).extract_into_event(event)
-
-        return event
-
-    return event_processor
diff --git a/src/sentry_sdk_alpha/integrations/fastapi.py b/src/sentry_sdk_alpha/integrations/fastapi.py
deleted file mode 100644
index 9cdb3672418645..00000000000000
--- a/src/sentry_sdk_alpha/integrations/fastapi.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import asyncio
-from copy import deepcopy
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE, TransactionSource
-from sentry_sdk_alpha.integrations import DidNotEnable
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import logger, transaction_from_function
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict
-
-    from sentry_sdk_alpha._types import Event
-
-try:
-    from sentry_sdk_alpha.integrations.starlette import (
-        StarletteIntegration,
-        StarletteRequestExtractor,
-    )
-except DidNotEnable:
-    raise DidNotEnable("Starlette is not installed")
-
-try:
-    import fastapi  # type: ignore
-except ImportError:
-    raise DidNotEnable("FastAPI is not installed")
-
-
-_DEFAULT_TRANSACTION_NAME = "generic FastAPI request"
-
-
-class FastApiIntegration(StarletteIntegration):
-    identifier = "fastapi"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        patch_get_request_handler()
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, Any) -> None
-    name = ""
-
-    if transaction_style == "endpoint":
-        endpoint = request.scope.get("endpoint")
-        if endpoint:
-            name = transaction_from_function(endpoint) or ""
-
-    elif transaction_style == "url":
-        route = request.scope.get("route")
-        if route:
-            path = getattr(route, "path", None)
-            if path is not None:
-                name = path
-
-    if not name:
-        name = _DEFAULT_TRANSACTION_NAME
-        source = TransactionSource.ROUTE
-    else:
-        source = SOURCE_FOR_STYLE[transaction_style]
-
-    scope.set_transaction_name(name, source=source)
-    logger.debug("[FastAPI] Set transaction name and source on scope: %s / %s", name, source)
-
-
-def patch_get_request_handler():
-    # type: () -> None
-    old_get_request_handler = fastapi.routing.get_request_handler
-
-    def _sentry_get_request_handler(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        dependant = kwargs.get("dependant")
-        if (
-            dependant
-            and dependant.call is not None
-            and not asyncio.iscoroutinefunction(dependant.call)
-        ):
-            old_call = dependant.call
-
-            @wraps(old_call)
-            def _sentry_call(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                current_scope = sentry_sdk_alpha.get_current_scope()
-                if current_scope.root_span is not None:
-                    current_scope.root_span.update_active_thread()
-
-                sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-                if sentry_scope.profile is not None:
-                    sentry_scope.profile.update_active_thread_id()
-
-                return old_call(*args, **kwargs)
-
-            dependant.call = _sentry_call
-
-        old_app = old_get_request_handler(*args, **kwargs)
-
-        async def _sentry_app(*args, **kwargs):
-            # type: (*Any, **Any) -> Any
-            integration = sentry_sdk_alpha.get_client().get_integration(FastApiIntegration)
-            if integration is None:
-                return await old_app(*args, **kwargs)
-
-            request = args[0]
-
-            _set_transaction_name_and_source(
-                sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request
-            )
-            sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-            extractor = StarletteRequestExtractor(request)
-            info = await extractor.extract_request_info()
-
-            def _make_request_event_processor(req, integration):
-                # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event]
-                def event_processor(event, hint):
-                    # type: (Event, Dict[str, Any]) -> Event
-
-                    # Extract information from request
-                    request_info = event.get("request", {})
-                    if info:
-                        if "cookies" in info and should_send_default_pii():
-                            request_info["cookies"] = info["cookies"]
-                        if "data" in info:
-                            request_info["data"] = info["data"]
-                    event["request"] = deepcopy(request_info)
-
-                    return event
-
-                return event_processor
-
-            sentry_scope._name = FastApiIntegration.identifier
-            sentry_scope.add_event_processor(_make_request_event_processor(request, integration))
-
-            return await old_app(*args, **kwargs)
-
-        return _sentry_app
-
-    fastapi.routing.get_request_handler = _sentry_get_request_handler
diff --git a/src/sentry_sdk_alpha/integrations/flask.py b/src/sentry_sdk_alpha/integrations/flask.py
deleted file mode 100644
index 746ca1dbbca080..00000000000000
--- a/src/sentry_sdk_alpha/integrations/flask.py
+++ /dev/null
@@ -1,273 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    RequestExtractor,
-)
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    package_version,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, Union
-
-    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-
-
-try:
-    import flask_login  # type: ignore
-except ImportError:
-    flask_login = None
-
-try:
-    from flask import Flask, Request  # type: ignore
-    from flask import request as flask_request
-    from flask.signals import before_render_template, got_request_exception, request_started
-    from markupsafe import Markup
-except ImportError:
-    raise DidNotEnable("Flask is not installed")
-
-try:
-    import blinker  # noqa
-except ImportError:
-    raise DidNotEnable("blinker is not installed")
-
-TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-
-
-class FlaskIntegration(Integration):
-    identifier = "flask"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(
-        self,
-        transaction_style="endpoint",  # type: str
-        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
-    ):
-        # type: (...) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        try:
-            from quart import Quart  # type: ignore
-
-            if Flask == Quart:
-                # This is Quart masquerading as Flask, don't enable the Flask
-                # integration. See https://github.com/getsentry/sentry-python/issues/2709
-                raise DidNotEnable(
-                    "This is not a Flask app but rather Quart pretending to be Flask"
-                )
-        except ImportError:
-            pass
-
-        version = package_version("flask")
-        _check_minimum_version(FlaskIntegration, version)
-
-        before_render_template.connect(_add_sentry_trace)
-        request_started.connect(_request_started)
-        got_request_exception.connect(_capture_exception)
-
-        old_app = Flask.__call__
-
-        def sentry_patched_wsgi_app(self, environ, start_response):
-            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-            if sentry_sdk_alpha.get_client().get_integration(FlaskIntegration) is None:
-                return old_app(self, environ, start_response)
-
-            integration = sentry_sdk_alpha.get_client().get_integration(FlaskIntegration)
-
-            middleware = SentryWsgiMiddleware(
-                lambda *a, **kw: old_app(self, *a, **kw),
-                span_origin=FlaskIntegration.origin,
-                http_methods_to_capture=(
-                    integration.http_methods_to_capture
-                    if integration
-                    else DEFAULT_HTTP_METHODS_TO_CAPTURE
-                ),
-            )
-            return middleware(environ, start_response)
-
-        Flask.__call__ = sentry_patched_wsgi_app
-
-
-def _add_sentry_trace(sender, template, context, **extra):
-    # type: (Flask, Any, Dict[str, Any], **Any) -> None
-    if "sentry_trace" in context:
-        return
-
-    scope = sentry_sdk_alpha.get_current_scope()
-    trace_meta = Markup(scope.trace_propagation_meta())
-    context["sentry_trace"] = trace_meta  # for backwards compatibility
-    context["sentry_trace_meta"] = trace_meta
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, Request) -> None
-    try:
-        name_for_style = {
-            "url": request.url_rule.rule,
-            "endpoint": request.url_rule.endpoint,
-        }
-        scope.set_transaction_name(
-            name_for_style[transaction_style],
-            source=SOURCE_FOR_STYLE[transaction_style],
-        )
-    except Exception:
-        pass
-
-
-def _request_started(app, **kwargs):
-    # type: (Flask, **Any) -> None
-    integration = sentry_sdk_alpha.get_client().get_integration(FlaskIntegration)
-    if integration is None:
-        return
-
-    request = flask_request._get_current_object()
-
-    # Set the transaction name and source here,
-    # but rely on WSGI middleware to actually start the transaction
-    _set_transaction_name_and_source(
-        sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request
-    )
-
-    scope = sentry_sdk_alpha.get_isolation_scope()
-    evt_processor = _make_request_event_processor(app, request, integration)
-    scope.add_event_processor(evt_processor)
-
-
-class FlaskRequestExtractor(RequestExtractor):
-    def env(self):
-        # type: () -> Dict[str, str]
-        return self.request.environ
-
-    def cookies(self):
-        # type: () -> Dict[Any, Any]
-        return {
-            k: v[0] if isinstance(v, list) and len(v) == 1 else v
-            for k, v in self.request.cookies.items()
-        }
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.get_data()
-
-    def form(self):
-        # type: () -> ImmutableMultiDict[str, Any]
-        return self.request.form
-
-    def files(self):
-        # type: () -> ImmutableMultiDict[str, Any]
-        return self.request.files
-
-    def is_json(self):
-        # type: () -> bool
-        return self.request.is_json
-
-    def json(self):
-        # type: () -> Any
-        return self.request.get_json(silent=True)
-
-    def size_of_file(self, file):
-        # type: (FileStorage) -> int
-        return file.content_length
-
-
-def _make_request_event_processor(app, request, integration):
-    # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
-
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-
-        # if the request is gone we are fine not logging the data from
-        # it.  This might happen if the processor is pushed away to
-        # another thread.
-        if request is None:
-            return event
-
-        with capture_internal_exceptions():
-            FlaskRequestExtractor(request).extract_into_event(event)
-
-        if should_send_default_pii():
-            with capture_internal_exceptions():
-                _add_user_to_event(event)
-
-        return event
-
-    return inner
-
-
-@ensure_integration_enabled(FlaskIntegration)
-def _capture_exception(sender, exception, **kwargs):
-    # type: (Flask, Union[ValueError, BaseException], **Any) -> None
-    event, hint = event_from_exception(
-        exception,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "flask", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _add_user_to_event(event):
-    # type: (Event) -> None
-    if flask_login is None:
-        return
-
-    user = flask_login.current_user
-    if user is None:
-        return
-
-    with capture_internal_exceptions():
-        # Access this object as late as possible as accessing the user
-        # is relatively costly
-
-        user_info = event.setdefault("user", {})
-
-        try:
-            user_info.setdefault("id", user.get_id())
-            # TODO: more configurable user attrs here
-        except AttributeError:
-            # might happen if:
-            # - flask_login could not be imported
-            # - flask_login is not configured
-            # - no user is logged in
-            pass
-
-        # The following attribute accesses are ineffective for the general
-        # Flask-Login case, because the User interface of Flask-Login does not
-        # care about anything but the ID. However, Flask-User (based on
-        # Flask-Login) documents a few optional extra attributes.
-        #
-        # https://github.com/lingthio/Flask-User/blob/a379fa0a281789618c484b459cb41236779b95b1/docs/source/data_models.rst#fixed-data-model-property-names
-
-        try:
-            user_info.setdefault("email", user.email)
-        except Exception:
-            pass
-
-        try:
-            user_info.setdefault("username", user.username)
-        except Exception:
-            pass
diff --git a/src/sentry_sdk_alpha/integrations/gcp.py b/src/sentry_sdk_alpha/integrations/gcp.py
deleted file mode 100644
index 911b67aeb0480c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/gcp.py
+++ /dev/null
@@ -1,249 +0,0 @@
-import functools
-import sys
-from copy import deepcopy
-from datetime import datetime, timedelta, timezone
-from os import environ
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    _filter_headers,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    TimeoutThread,
-    capture_internal_exceptions,
-    event_from_exception,
-    logger,
-    reraise,
-)
-
-# Constants
-TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
-MILLIS_TO_SECONDS = 1000.0
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-def _wrap_func(func):
-    # type: (F) -> F
-    @functools.wraps(func)
-    def sentry_func(functionhandler, gcp_event, *args, **kwargs):
-        # type: (Any, Any, *Any, **Any) -> Any
-        client = sentry_sdk_alpha.get_client()
-
-        integration = client.get_integration(GcpIntegration)
-        if integration is None:
-            return func(functionhandler, gcp_event, *args, **kwargs)
-
-        configured_time = environ.get("FUNCTION_TIMEOUT_SEC")
-        if not configured_time:
-            logger.debug(
-                "The configured timeout could not be fetched from Cloud Functions configuration."
-            )
-            return func(functionhandler, gcp_event, *args, **kwargs)
-
-        configured_time = int(configured_time)
-
-        initial_time = datetime.now(timezone.utc)
-
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            with capture_internal_exceptions():
-                scope.clear_breadcrumbs()
-                scope.add_event_processor(
-                    _make_request_event_processor(gcp_event, configured_time, initial_time)
-                )
-                scope.set_tag("gcp_region", environ.get("FUNCTION_REGION"))
-                timeout_thread = None
-                if integration.timeout_warning and configured_time > TIMEOUT_WARNING_BUFFER:
-                    waiting_time = configured_time - TIMEOUT_WARNING_BUFFER
-
-                    timeout_thread = TimeoutThread(waiting_time, configured_time)
-
-                    # Starting the thread to raise timeout warning exception
-                    timeout_thread.start()
-
-            headers = {}
-            if hasattr(gcp_event, "headers"):
-                headers = gcp_event.headers
-
-            with sentry_sdk_alpha.continue_trace(headers):
-                with sentry_sdk_alpha.start_span(
-                    op=OP.FUNCTION_GCP,
-                    name=environ.get("FUNCTION_NAME", ""),
-                    source=TransactionSource.COMPONENT,
-                    origin=GcpIntegration.origin,
-                    attributes=_prepopulate_attributes(gcp_event),
-                ):
-                    try:
-                        return func(functionhandler, gcp_event, *args, **kwargs)
-                    except Exception:
-                        exc_info = sys.exc_info()
-                        sentry_event, hint = event_from_exception(
-                            exc_info,
-                            client_options=client.options,
-                            mechanism={"type": "gcp", "handled": False},
-                        )
-                        sentry_sdk_alpha.capture_event(sentry_event, hint=hint)
-                        reraise(*exc_info)
-                    finally:
-                        if timeout_thread:
-                            timeout_thread.stop()
-                        # Flush out the event queue
-                        client.flush()
-
-    return sentry_func  # type: ignore
-
-
-class GcpIntegration(Integration):
-    identifier = "gcp"
-    origin = f"auto.function.{identifier}"
-
-    def __init__(self, timeout_warning=False):
-        # type: (bool) -> None
-        self.timeout_warning = timeout_warning
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        import __main__ as gcp_functions
-
-        if not hasattr(gcp_functions, "worker_v1"):
-            logger.warning("GcpIntegration currently supports only Python 3.7 runtime environment.")
-            return
-
-        worker1 = gcp_functions.worker_v1
-
-        worker1.FunctionHandler.invoke_user_function = _wrap_func(
-            worker1.FunctionHandler.invoke_user_function
-        )
-
-
-def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
-    # type: (Any, Any, Any) -> EventProcessor
-
-    def event_processor(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-
-        final_time = datetime.now(timezone.utc)
-        time_diff = final_time - initial_time
-
-        execution_duration_in_millis = time_diff / timedelta(milliseconds=1)
-
-        extra = event.setdefault("extra", {})
-        extra["google cloud functions"] = {
-            "function_name": environ.get("FUNCTION_NAME"),
-            "function_entry_point": environ.get("ENTRY_POINT"),
-            "function_identity": environ.get("FUNCTION_IDENTITY"),
-            "function_region": environ.get("FUNCTION_REGION"),
-            "function_project": environ.get("GCP_PROJECT"),
-            "execution_duration_in_millis": execution_duration_in_millis,
-            "configured_timeout_in_seconds": configured_timeout,
-        }
-
-        extra["google cloud logs"] = {
-            "url": _get_google_cloud_logs_url(final_time),
-        }
-
-        request = event.get("request", {})
-
-        request["url"] = "gcp:///{}".format(environ.get("FUNCTION_NAME"))
-
-        if hasattr(gcp_event, "method"):
-            request["method"] = gcp_event.method
-
-        if hasattr(gcp_event, "query_string"):
-            request["query_string"] = gcp_event.query_string.decode("utf-8")
-
-        if hasattr(gcp_event, "headers"):
-            request["headers"] = _filter_headers(gcp_event.headers)
-
-        if should_send_default_pii():
-            if hasattr(gcp_event, "data"):
-                request["data"] = gcp_event.data
-        else:
-            if hasattr(gcp_event, "data"):
-                # Unfortunately couldn't find a way to get structured body from GCP
-                # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue.removed_because_raw_data()
-
-        event["request"] = deepcopy(request)
-
-        return event
-
-    return event_processor
-
-
-def _get_google_cloud_logs_url(final_time):
-    # type: (datetime) -> str
-    """
-    Generates a Google Cloud Logs console URL based on the environment variables
-    Arguments:
-        final_time {datetime} -- Final time
-    Returns:
-        str -- Google Cloud Logs Console URL to logs.
-    """
-    hour_ago = final_time - timedelta(hours=1)
-    formatstring = "%Y-%m-%dT%H:%M:%SZ"
-
-    url = (
-        "https://console.cloud.google.com/logs/viewer?project={project}&resource=cloud_function"
-        "%2Ffunction_name%2F{function_name}%2Fregion%2F{region}&minLogLevel=0&expandAll=false"
-        "×tamp={timestamp_end}&customFacets=&limitCustomFacetWidth=true"
-        "&dateRangeStart={timestamp_start}&dateRangeEnd={timestamp_end}"
-        "&interval=PT1H&scrollTimestamp={timestamp_end}"
-    ).format(
-        project=environ.get("GCP_PROJECT"),
-        function_name=environ.get("FUNCTION_NAME"),
-        region=environ.get("FUNCTION_REGION"),
-        timestamp_end=final_time.strftime(formatstring),
-        timestamp_start=hour_ago.strftime(formatstring),
-    )
-
-    return url
-
-
-ENV_TO_ATTRIBUTE = {
-    "FUNCTION_NAME": "faas.name",
-    "ENTRY_POINT": "gcp.function.entry_point",
-    "FUNCTION_IDENTITY": "gcp.function.identity",
-    "FUNCTION_REGION": "faas.region",
-    "GCP_PROJECT": "gcp.function.project",
-}
-
-EVENT_TO_ATTRIBUTE = {
-    "method": "http.request.method",
-    "query_string": "url.query",
-}
-
-
-def _prepopulate_attributes(gcp_event):
-    # type: (Any) -> dict[str, Any]
-    attributes = {
-        "cloud.provider": "gcp",
-    }
-
-    for key, attr in ENV_TO_ATTRIBUTE.items():
-        if environ.get(key):
-            attributes[attr] = environ[key]
-
-    for key, attr in EVENT_TO_ATTRIBUTE.items():
-        if getattr(gcp_event, key, None):
-            attributes[attr] = getattr(gcp_event, key)
-
-    if hasattr(gcp_event, "headers"):
-        headers = gcp_event.headers
-        attributes.update(_request_headers_to_span_attributes(headers))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/gnu_backtrace.py b/src/sentry_sdk_alpha/integrations/gnu_backtrace.py
deleted file mode 100644
index 36daacb4303e02..00000000000000
--- a/src/sentry_sdk_alpha/integrations/gnu_backtrace.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import re
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-if TYPE_CHECKING:
-    from typing import Any
-
-    from sentry_sdk_alpha._types import Event
-
-
-MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
-TYPE_RE = r"[a-zA-Z0-9._:<>,-]+"
-HEXVAL_RE = r"[A-Fa-f0-9]+"
-
-
-FRAME_RE = r"""
-^(?P\d+)\.\s
-(?P{MODULE_RE})\(
-  (?P{TYPE_RE}\ )?
-  ((?P{TYPE_RE})
-    (?P\(.*\))?
-  )?
-  ((?P\ const)?\+0x(?P{HEXVAL_RE}))?
-\)\s
-\[0x(?P{HEXVAL_RE})\]$
-""".format(
-    MODULE_RE=MODULE_RE, HEXVAL_RE=HEXVAL_RE, TYPE_RE=TYPE_RE
-)
-
-FRAME_RE = re.compile(FRAME_RE, re.MULTILINE | re.VERBOSE)
-
-
-class GnuBacktraceIntegration(Integration):
-    identifier = "gnu_backtrace"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        @add_global_event_processor
-        def process_gnu_backtrace(event, hint):
-            # type: (Event, dict[str, Any]) -> Event
-            with capture_internal_exceptions():
-                return _process_gnu_backtrace(event, hint)
-
-
-def _process_gnu_backtrace(event, hint):
-    # type: (Event, dict[str, Any]) -> Event
-    if sentry_sdk_alpha.get_client().get_integration(GnuBacktraceIntegration) is None:
-        return event
-
-    exc_info = hint.get("exc_info", None)
-
-    if exc_info is None:
-        return event
-
-    exception = event.get("exception", None)
-
-    if exception is None:
-        return event
-
-    values = exception.get("values", None)
-
-    if values is None:
-        return event
-
-    for exception in values:
-        frames = exception.get("stacktrace", {}).get("frames", [])
-        if not frames:
-            continue
-
-        msg = exception.get("value", None)
-        if not msg:
-            continue
-
-        additional_frames = []
-        new_msg = []
-
-        for line in msg.splitlines():
-            match = FRAME_RE.match(line)
-            if match:
-                additional_frames.append(
-                    (
-                        int(match.group("index")),
-                        {
-                            "package": match.group("package") or None,
-                            "function": match.group("function") or None,
-                            "platform": "native",
-                        },
-                    )
-                )
-            else:
-                # Put garbage lines back into message, not sure what to do with them.
-                new_msg.append(line)
-
-        if additional_frames:
-            additional_frames.sort(key=lambda x: -x[0])
-            for _, frame in additional_frames:
-                frames.append(frame)
-
-            new_msg.append("")
-            exception["value"] = "\n".join(new_msg)
-
-    return event
diff --git a/src/sentry_sdk_alpha/integrations/gql.py b/src/sentry_sdk_alpha/integrations/gql.py
deleted file mode 100644
index 67960c99c067e1..00000000000000
--- a/src/sentry_sdk_alpha/integrations/gql.py
+++ /dev/null
@@ -1,136 +0,0 @@
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception, parse_version
-
-try:
-    import gql  # type: ignore[import-not-found]
-    from gql.transport import AsyncTransport, Transport  # type: ignore[import-not-found]
-    from gql.transport.exceptions import TransportQueryError  # type: ignore[import-not-found]
-    from graphql import DocumentNode, VariableDefinitionNode, get_operation_ast, print_ast
-except ImportError:
-    raise DidNotEnable("gql is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Tuple, Union
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-    EventDataType = dict[str, Union[str, tuple[VariableDefinitionNode, ...]]]
-
-
-class GQLIntegration(Integration):
-    identifier = "gql"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        gql_version = parse_version(gql.__version__)
-        _check_minimum_version(GQLIntegration, gql_version)
-
-        _patch_execute()
-
-
-def _data_from_document(document):
-    # type: (DocumentNode) -> EventDataType
-    try:
-        operation_ast = get_operation_ast(document)
-        data = {"query": print_ast(document)}  # type: EventDataType
-
-        if operation_ast is not None:
-            data["variables"] = operation_ast.variable_definitions
-            if operation_ast.name is not None:
-                data["operationName"] = operation_ast.name.value
-
-        return data
-    except (AttributeError, TypeError):
-        return dict()
-
-
-def _transport_method(transport):
-    # type: (Union[Transport, AsyncTransport]) -> str
-    """
-    The RequestsHTTPTransport allows defining the HTTP method; all
-    other transports use POST.
-    """
-    try:
-        return transport.method
-    except AttributeError:
-        return "POST"
-
-
-def _request_info_from_transport(transport):
-    # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str]
-    if transport is None:
-        return {}
-
-    request_info = {
-        "method": _transport_method(transport),
-    }
-
-    try:
-        request_info["url"] = transport.url
-    except AttributeError:
-        pass
-
-    return request_info
-
-
-def _patch_execute():
-    # type: () -> None
-    real_execute = gql.Client.execute
-
-    @ensure_integration_enabled(GQLIntegration, real_execute)
-    def sentry_patched_execute(self, document, *args, **kwargs):
-        # type: (gql.Client, DocumentNode, Any, Any) -> Any
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        scope.add_event_processor(_make_gql_event_processor(self, document))
-
-        try:
-            return real_execute(self, document, *args, **kwargs)
-        except TransportQueryError as e:
-            event, hint = event_from_exception(
-                e,
-                client_options=sentry_sdk_alpha.get_client().options,
-                mechanism={"type": "gql", "handled": False},
-            )
-
-            sentry_sdk_alpha.capture_event(event, hint)
-            raise e
-
-    gql.Client.execute = sentry_patched_execute
-
-
-def _make_gql_event_processor(client, document):
-    # type: (gql.Client, DocumentNode) -> EventProcessor
-    def processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        try:
-            errors = hint["exc_info"][1].errors
-        except (AttributeError, KeyError):
-            errors = None
-
-        request = event.setdefault("request", {})
-        request.update(
-            {
-                "api_target": "graphql",
-                **_request_info_from_transport(client.transport),
-            }
-        )
-
-        if should_send_default_pii():
-            request["data"] = _data_from_document(document)
-            contexts = event.setdefault("contexts", {})
-            response = contexts.setdefault("response", {})
-            response.update(
-                {
-                    "data": {"errors": errors},
-                    "type": response,
-                }
-            )
-
-        return event
-
-    return processor
diff --git a/src/sentry_sdk_alpha/integrations/graphene.py b/src/sentry_sdk_alpha/integrations/graphene.py
deleted file mode 100644
index af5ed6e88e2cc5..00000000000000
--- a/src/sentry_sdk_alpha/integrations/graphene.py
+++ /dev/null
@@ -1,146 +0,0 @@
-from contextlib import contextmanager
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    package_version,
-)
-
-try:
-    from graphene.types import schema as graphene_schema  # type: ignore
-except ImportError:
-    raise DidNotEnable("graphene is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Generator
-    from typing import Any, Dict, Union
-
-    from graphene.language.source import Source  # type: ignore
-    from graphql.execution import ExecutionResult
-    from graphql.type import GraphQLSchema
-
-    from sentry_sdk_alpha._types import Event
-
-
-class GrapheneIntegration(Integration):
-    identifier = "graphene"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = package_version("graphene")
-        _check_minimum_version(GrapheneIntegration, version)
-
-        _patch_graphql()
-
-
-def _patch_graphql():
-    # type: () -> None
-    old_graphql_sync = graphene_schema.graphql_sync
-    old_graphql_async = graphene_schema.graphql
-
-    @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync)
-    def _sentry_patched_graphql_sync(schema, source, *args, **kwargs):
-        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        scope.add_event_processor(_event_processor)
-
-        with graphql_span(schema, source, kwargs):
-            result = old_graphql_sync(schema, source, *args, **kwargs)
-
-        with capture_internal_exceptions():
-            client = sentry_sdk_alpha.get_client()
-            for error in result.errors or []:
-                event, hint = event_from_exception(
-                    error,
-                    client_options=client.options,
-                    mechanism={
-                        "type": GrapheneIntegration.identifier,
-                        "handled": False,
-                    },
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        return result
-
-    async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
-        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
-        integration = sentry_sdk_alpha.get_client().get_integration(GrapheneIntegration)
-        if integration is None:
-            return await old_graphql_async(schema, source, *args, **kwargs)
-
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        scope.add_event_processor(_event_processor)
-
-        with graphql_span(schema, source, kwargs):
-            result = await old_graphql_async(schema, source, *args, **kwargs)
-
-        with capture_internal_exceptions():
-            client = sentry_sdk_alpha.get_client()
-            for error in result.errors or []:
-                event, hint = event_from_exception(
-                    error,
-                    client_options=client.options,
-                    mechanism={
-                        "type": GrapheneIntegration.identifier,
-                        "handled": False,
-                    },
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        return result
-
-    graphene_schema.graphql_sync = _sentry_patched_graphql_sync
-    graphene_schema.graphql = _sentry_patched_graphql_async
-
-
-def _event_processor(event, hint):
-    # type: (Event, Dict[str, Any]) -> Event
-    if should_send_default_pii():
-        request_info = event.setdefault("request", {})
-        request_info["api_target"] = "graphql"
-
-    elif event.get("request", {}).get("data"):
-        del event["request"]["data"]
-
-    return event
-
-
-@contextmanager
-def graphql_span(schema, source, kwargs):
-    # type: (GraphQLSchema, Union[str, Source], Dict[str, Any]) -> Generator[None, None, None]
-    operation_name = kwargs.get("operation_name")
-
-    operation_type = "query"
-    op = OP.GRAPHQL_QUERY
-    if source.strip().startswith("mutation"):
-        operation_type = "mutation"
-        op = OP.GRAPHQL_MUTATION
-    elif source.strip().startswith("subscription"):
-        operation_type = "subscription"
-        op = OP.GRAPHQL_SUBSCRIPTION
-
-    sentry_sdk_alpha.add_breadcrumb(
-        crumb={
-            "data": {
-                "operation_name": operation_name,
-                "operation_type": operation_type,
-            },
-            "category": "graphql.operation",
-        },
-    )
-
-    with sentry_sdk_alpha.start_span(
-        op=op, name=operation_name, only_if_parent=True
-    ) as graphql_span:
-        graphql_span.set_attribute("graphql.document", source)
-        graphql_span.set_attribute("graphql.operation.name", operation_name)
-        graphql_span.set_attribute("graphql.operation.type", operation_type)
-        yield
diff --git a/src/sentry_sdk_alpha/integrations/grpc/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/__init__.py
deleted file mode 100644
index a5b00477bd6539..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/__init__.py
+++ /dev/null
@@ -1,148 +0,0 @@
-from collections.abc import Sequence
-from functools import wraps
-from typing import TYPE_CHECKING, Any, Optional
-
-import grpc
-from grpc import Channel, Server, intercept_channel
-from grpc.aio import Channel as AsyncChannel
-from grpc.aio import Server as AsyncServer
-
-from sentry_sdk_alpha.integrations import Integration
-
-from .aio.client import SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor
-from .aio.client import SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor
-from .aio.server import ServerInterceptor as AsyncServerInterceptor
-from .client import ClientInterceptor
-from .server import ServerInterceptor
-
-# Hack to get new Python features working in older versions
-# without introducing a hard dependency on `typing_extensions`
-# from: https://stackoverflow.com/a/71944042/300572
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import ParamSpec
-else:
-    # Fake ParamSpec
-    class ParamSpec:
-        def __init__(self, _):
-            self.args = None
-            self.kwargs = None
-
-    # Callable[anything] will return None
-    class _Callable:
-        def __getitem__(self, _):
-            return None
-
-    # Make instances
-    Callable = _Callable()
-
-P = ParamSpec("P")
-
-
-def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]:
-    "Wrapper for synchronous secure and insecure channel."
-
-    @wraps(func)
-    def patched_channel(*args: Any, **kwargs: Any) -> Channel:
-        channel = func(*args, **kwargs)
-        if not ClientInterceptor._is_intercepted:
-            ClientInterceptor._is_intercepted = True
-            return intercept_channel(channel, ClientInterceptor())
-        else:
-            return channel
-
-    return patched_channel
-
-
-def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]:
-    @wraps(func)
-    def patched_intercept_channel(
-        channel: Channel, *interceptors: grpc.ServerInterceptor
-    ) -> Channel:
-        if ClientInterceptor._is_intercepted:
-            interceptors = tuple(
-                [
-                    interceptor
-                    for interceptor in interceptors
-                    if not isinstance(interceptor, ClientInterceptor)
-                ]
-            )
-        else:
-            interceptors = interceptors
-        return intercept_channel(channel, *interceptors)
-
-    return patched_intercept_channel  # type: ignore
-
-
-def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]:
-    "Wrapper for asynchronous secure and insecure channel."
-
-    @wraps(func)
-    def patched_channel(  # type: ignore
-        *args: P.args,
-        interceptors: Sequence[grpc.aio.ClientInterceptor] | None = None,
-        **kwargs: P.kwargs,
-    ) -> Channel:
-        sentry_interceptors = [
-            AsyncUnaryUnaryClientInterceptor(),
-            AsyncUnaryStreamClientIntercetor(),
-        ]
-        interceptors = [*sentry_interceptors, *(interceptors or [])]
-        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
-
-    return patched_channel  # type: ignore
-
-
-def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]:
-    """Wrapper for synchronous server."""
-
-    @wraps(func)
-    def patched_server(  # type: ignore
-        *args: P.args,
-        interceptors: Sequence[grpc.ServerInterceptor] | None = None,
-        **kwargs: P.kwargs,
-    ) -> Server:
-        interceptors = [
-            interceptor
-            for interceptor in interceptors or []
-            if not isinstance(interceptor, ServerInterceptor)
-        ]
-        server_interceptor = ServerInterceptor()
-        interceptors = [server_interceptor, *(interceptors or [])]
-        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
-
-    return patched_server  # type: ignore
-
-
-def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]:
-    """Wrapper for asynchronous server."""
-
-    @wraps(func)
-    def patched_aio_server(  # type: ignore
-        *args: P.args,
-        interceptors: Sequence[grpc.ServerInterceptor] | None = None,
-        **kwargs: P.kwargs,
-    ) -> Server:
-        server_interceptor = AsyncServerInterceptor()
-        interceptors = (server_interceptor, *(interceptors or []))
-        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
-
-    return patched_aio_server  # type: ignore
-
-
-class GRPCIntegration(Integration):
-    identifier = "grpc"
-
-    @staticmethod
-    def setup_once() -> None:
-        import grpc
-
-        grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel)
-        grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel)
-        grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel)
-
-        grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel)
-        grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel)
-
-        grpc.server = _wrap_sync_server(grpc.server)
-        grpc.aio.server = _wrap_async_server(grpc.aio.server)
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py b/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
deleted file mode 100644
index 4d218152548e67..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-from .client import ClientInterceptor
-from .server import ServerInterceptor
-
-__all__ = [
-    "ClientInterceptor",
-    "ServerInterceptor",
-]
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/client.py b/src/sentry_sdk_alpha/integrations/grpc/aio/client.py
deleted file mode 100644
index 34617a4c76d63d..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/client.py
+++ /dev/null
@@ -1,101 +0,0 @@
-from collections.abc import AsyncIterable, Callable
-from typing import Any, Union
-
-from google.protobuf.message import Message
-from grpc.aio import (
-    ClientCallDetails,
-    Metadata,
-    UnaryStreamCall,
-    UnaryStreamClientInterceptor,
-    UnaryUnaryCall,
-    UnaryUnaryClientInterceptor,
-)
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
-
-
-class ClientInterceptor:
-    @staticmethod
-    def _update_client_call_details_metadata_from_scope(
-        client_call_details: ClientCallDetails,
-    ) -> ClientCallDetails:
-        if client_call_details.metadata is None:
-            client_call_details = client_call_details._replace(metadata=Metadata())
-        elif not isinstance(client_call_details.metadata, Metadata):
-            # This is a workaround for a GRPC bug, which was fixed in grpcio v1.60.0
-            # See https://github.com/grpc/grpc/issues/34298.
-            client_call_details = client_call_details._replace(
-                metadata=Metadata.from_tuple(client_call_details.metadata)
-            )
-        for (
-            key,
-            value,
-        ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers():
-            client_call_details.metadata.add(key, value)
-        return client_call_details
-
-
-class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor):  # type: ignore
-    async def intercept_unary_unary(
-        self,
-        continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall],
-        client_call_details: ClientCallDetails,
-        request: Message,
-    ) -> UnaryUnaryCall | Message:
-        method = client_call_details.method
-        if isinstance(method, bytes):
-            method = method.decode()
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRPC_CLIENT,
-            name="unary unary call to %s" % method,
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            span.set_attribute("type", "unary unary")
-            span.set_attribute("method", method)
-
-            client_call_details = self._update_client_call_details_metadata_from_scope(
-                client_call_details
-            )
-
-            response = await continuation(client_call_details, request)
-            status_code = await response.code()
-            span.set_attribute("code", status_code.name)
-
-            return response
-
-
-class SentryUnaryStreamClientInterceptor(
-    ClientInterceptor, UnaryStreamClientInterceptor  # type: ignore
-):
-    async def intercept_unary_stream(
-        self,
-        continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall],
-        client_call_details: ClientCallDetails,
-        request: Message,
-    ) -> AsyncIterable[Any] | UnaryStreamCall:
-        method = client_call_details.method
-        if isinstance(method, bytes):
-            method = method.decode()
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRPC_CLIENT,
-            name="unary stream call to %s" % method,
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            span.set_attribute("type", "unary stream")
-            span.set_attribute("method", method)
-
-            client_call_details = self._update_client_call_details_metadata_from_scope(
-                client_call_details
-            )
-
-            response = await continuation(client_call_details, request)
-            # status_code = await response.code()
-            # span.set_attribute("code", status_code)
-
-            return response
diff --git a/src/sentry_sdk_alpha/integrations/grpc/aio/server.py b/src/sentry_sdk_alpha/integrations/grpc/aio/server.py
deleted file mode 100644
index 8daf0a5f74e6cf..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/aio/server.py
+++ /dev/null
@@ -1,98 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable
-from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import Awaitable, Callable
-    from typing import Any, Optional
-
-
-try:
-    import grpc
-    from grpc import HandlerCallDetails, RpcMethodHandler
-    from grpc.aio import AbortError, ServicerContext
-except ImportError:
-    raise DidNotEnable("grpcio is not installed")
-
-
-class ServerInterceptor(grpc.aio.ServerInterceptor):  # type: ignore
-    def __init__(self, find_name=None):
-        # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None
-        self._find_method_name = find_name or self._find_name
-
-        super().__init__()
-
-    async def intercept_service(self, continuation, handler_call_details):
-        # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Optional[Awaitable[RpcMethodHandler]]
-        self._handler_call_details = handler_call_details
-        handler = await continuation(handler_call_details)
-        if handler is None:
-            return None
-
-        if not handler.request_streaming and not handler.response_streaming:
-            handler_factory = grpc.unary_unary_rpc_method_handler
-
-            async def wrapped(request, context):
-                # type: (Any, ServicerContext) -> Any
-                name = self._find_method_name(context)
-                if not name:
-                    return await handler(request, context)
-
-                # What if the headers are empty?
-                with sentry_sdk_alpha.continue_trace(dict(context.invocation_metadata())):
-                    with sentry_sdk_alpha.start_span(
-                        op=OP.GRPC_SERVER,
-                        name=name,
-                        source=TransactionSource.CUSTOM,
-                        origin=SPAN_ORIGIN,
-                    ):
-                        try:
-                            return await handler.unary_unary(request, context)
-                        except AbortError:
-                            raise
-                        except Exception as exc:
-                            event, hint = event_from_exception(
-                                exc,
-                                mechanism={"type": "grpc", "handled": False},
-                            )
-                            sentry_sdk_alpha.capture_event(event, hint=hint)
-                            raise
-
-        elif not handler.request_streaming and handler.response_streaming:
-            handler_factory = grpc.unary_stream_rpc_method_handler
-
-            async def wrapped(request, context):  # type: ignore
-                # type: (Any, ServicerContext) -> Any
-                async for r in handler.unary_stream(request, context):
-                    yield r
-
-        elif handler.request_streaming and not handler.response_streaming:
-            handler_factory = grpc.stream_unary_rpc_method_handler
-
-            async def wrapped(request, context):
-                # type: (Any, ServicerContext) -> Any
-                response = handler.stream_unary(request, context)
-                return await response
-
-        elif handler.request_streaming and handler.response_streaming:
-            handler_factory = grpc.stream_stream_rpc_method_handler
-
-            async def wrapped(request, context):  # type: ignore
-                # type: (Any, ServicerContext) -> Any
-                async for r in handler.stream_stream(request, context):
-                    yield r
-
-        return handler_factory(
-            wrapped,
-            request_deserializer=handler.request_deserializer,
-            response_serializer=handler.response_serializer,
-        )
-
-    def _find_name(self, context):
-        # type: (ServicerContext) -> str
-        return self._handler_call_details.method
diff --git a/src/sentry_sdk_alpha/integrations/grpc/client.py b/src/sentry_sdk_alpha/integrations/grpc/client.py
deleted file mode 100644
index 130a5c06b2afb7..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/client.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable
-from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterable, Iterator
-    from typing import Any, Union
-
-try:
-    import grpc
-    from google.protobuf.message import Message
-    from grpc import Call, ClientCallDetails
-    from grpc._interceptor import _UnaryOutcome
-    from grpc.aio._interceptor import UnaryStreamCall
-except ImportError:
-    raise DidNotEnable("grpcio is not installed")
-
-
-class ClientInterceptor(
-    grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor  # type: ignore
-):
-    _is_intercepted = False
-
-    def intercept_unary_unary(self, continuation, client_call_details, request):
-        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome
-        method = client_call_details.method
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRPC_CLIENT,
-            name="unary unary call to %s" % method,
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            span.set_attribute("type", "unary unary")
-            span.set_attribute("method", method)
-
-            client_call_details = self._update_client_call_details_metadata_from_scope(
-                client_call_details
-            )
-
-            response = continuation(client_call_details, request)
-            span.set_attribute("code", response.code().name)
-
-            return response
-
-    def intercept_unary_stream(self, continuation, client_call_details, request):
-        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call]
-        method = client_call_details.method
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRPC_CLIENT,
-            name="unary stream call to %s" % method,
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            span.set_attribute("type", "unary stream")
-            span.set_attribute("method", method)
-
-            client_call_details = self._update_client_call_details_metadata_from_scope(
-                client_call_details
-            )
-
-            response = continuation(client_call_details, request)  # type: UnaryStreamCall
-            # Setting code on unary-stream leads to execution getting stuck
-            # span.set_attribute("code", response.code().name)
-
-            return response
-
-    @staticmethod
-    def _update_client_call_details_metadata_from_scope(client_call_details):
-        # type: (ClientCallDetails) -> ClientCallDetails
-        metadata = list(client_call_details.metadata) if client_call_details.metadata else []
-        for (
-            key,
-            value,
-        ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers():
-            metadata.append((key, value))
-
-        client_call_details = grpc._interceptor._ClientCallDetails(
-            method=client_call_details.method,
-            timeout=client_call_details.timeout,
-            metadata=metadata,
-            credentials=client_call_details.credentials,
-            wait_for_ready=client_call_details.wait_for_ready,
-            compression=client_call_details.compression,
-        )
-
-        return client_call_details
diff --git a/src/sentry_sdk_alpha/integrations/grpc/consts.py b/src/sentry_sdk_alpha/integrations/grpc/consts.py
deleted file mode 100644
index 9fdb975caf4cb4..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/consts.py
+++ /dev/null
@@ -1 +0,0 @@
-SPAN_ORIGIN = "auto.grpc.grpc"
diff --git a/src/sentry_sdk_alpha/integrations/grpc/server.py b/src/sentry_sdk_alpha/integrations/grpc/server.py
deleted file mode 100644
index ad91bb077c70cb..00000000000000
--- a/src/sentry_sdk_alpha/integrations/grpc/server.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable
-from sentry_sdk_alpha.integrations.grpc.consts import SPAN_ORIGIN
-from sentry_sdk_alpha.tracing import TransactionSource
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Optional
-
-    from google.protobuf.message import Message
-
-try:
-    import grpc
-    from grpc import HandlerCallDetails, RpcMethodHandler, ServicerContext
-except ImportError:
-    raise DidNotEnable("grpcio is not installed")
-
-
-class ServerInterceptor(grpc.ServerInterceptor):  # type: ignore
-    def __init__(self, find_name=None):
-        # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None
-        self._find_method_name = find_name or ServerInterceptor._find_name
-
-        super().__init__()
-
-    def intercept_service(self, continuation, handler_call_details):
-        # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler
-        handler = continuation(handler_call_details)
-        if not handler or not handler.unary_unary:
-            return handler
-
-        def behavior(request, context):
-            # type: (Message, ServicerContext) -> Message
-            with sentry_sdk_alpha.isolation_scope():
-                name = self._find_method_name(context)
-
-                if name:
-                    metadata = dict(context.invocation_metadata())
-
-                    with sentry_sdk_alpha.continue_trace(metadata):
-                        with sentry_sdk_alpha.start_span(
-                            op=OP.GRPC_SERVER,
-                            name=name,
-                            source=TransactionSource.CUSTOM,
-                            origin=SPAN_ORIGIN,
-                        ):
-                            try:
-                                return handler.unary_unary(request, context)
-                            except BaseException as e:
-                                raise e
-                else:
-                    return handler.unary_unary(request, context)
-
-        return grpc.unary_unary_rpc_method_handler(
-            behavior,
-            request_deserializer=handler.request_deserializer,
-            response_serializer=handler.response_serializer,
-        )
-
-    @staticmethod
-    def _find_name(context):
-        # type: (ServicerContext) -> str
-        return context._rpc_event.call_details.method.decode()
diff --git a/src/sentry_sdk_alpha/integrations/httpx.py b/src/sentry_sdk_alpha/integrations/httpx.py
deleted file mode 100644
index cd522348b5f173..00000000000000
--- a/src/sentry_sdk_alpha/integrations/httpx.py
+++ /dev/null
@@ -1,198 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, OP, SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.tracing_utils import Baggage, should_propagate_trace
-from sentry_sdk_alpha.utils import (
-    SENSITIVE_DATA_SUBSTITUTE,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    http_client_status_to_breadcrumb_level,
-    logger,
-    parse_url,
-    set_thread_info_from_span,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import MutableMapping
-    from typing import Any
-
-
-try:
-    from httpx import AsyncClient, Client, Request, Response  # type: ignore
-except ImportError:
-    raise DidNotEnable("httpx is not installed")
-
-__all__ = ["HttpxIntegration"]
-
-
-class HttpxIntegration(Integration):
-    identifier = "httpx"
-    origin = f"auto.http.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        """
-        httpx has its own transport layer and can be customized when needed,
-        so patch Client.send and AsyncClient.send to support both synchronous and async interfaces.
-        """
-        _install_httpx_client()
-        _install_httpx_async_client()
-
-
-def _install_httpx_client():
-    # type: () -> None
-    real_send = Client.send
-
-    @ensure_integration_enabled(HttpxIntegration, real_send)
-    def send(self, request, **kwargs):
-        # type: (Client, Request, **Any) -> Response
-        parsed_url = None
-        with capture_internal_exceptions():
-            parsed_url = parse_url(str(request.url), sanitize=False)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.HTTP_CLIENT,
-            name="%s %s"
-            % (
-                request.method,
-                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
-            ),
-            origin=HttpxIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            data = {
-                SPANDATA.HTTP_METHOD: request.method,
-            }
-            set_thread_info_from_span(data, span)
-
-            if parsed_url is not None:
-                data["url"] = parsed_url.url
-                data[SPANDATA.HTTP_QUERY] = parsed_url.query
-                data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment
-
-            for key, value in data.items():
-                span.set_attribute(key, value)
-
-            if should_propagate_trace(sentry_sdk_alpha.get_client(), str(request.url)):
-                for (
-                    key,
-                    value,
-                ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers():
-                    logger.debug(
-                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                            key=key, value=value, url=request.url
-                        )
-                    )
-
-                    if key == BAGGAGE_HEADER_NAME:
-                        _add_sentry_baggage_to_headers(request.headers, value)
-                    else:
-                        request.headers[key] = value
-
-            rv = real_send(self, request, **kwargs)
-
-            span.set_http_status(rv.status_code)
-            span.set_attribute("reason", rv.reason_phrase)
-
-            data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code
-            data["reason"] = rv.reason_phrase
-
-            sentry_sdk_alpha.add_breadcrumb(
-                type="http",
-                category="httplib",
-                data=data,
-                level=http_client_status_to_breadcrumb_level(rv.status_code),
-            )
-
-            return rv
-
-    Client.send = send
-
-
-def _install_httpx_async_client():
-    # type: () -> None
-    real_send = AsyncClient.send
-
-    async def send(self, request, **kwargs):
-        # type: (AsyncClient, Request, **Any) -> Response
-        if sentry_sdk_alpha.get_client().get_integration(HttpxIntegration) is None:
-            return await real_send(self, request, **kwargs)
-
-        parsed_url = None
-        with capture_internal_exceptions():
-            parsed_url = parse_url(str(request.url), sanitize=False)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.HTTP_CLIENT,
-            name="%s %s"
-            % (
-                request.method,
-                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
-            ),
-            origin=HttpxIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            data = {
-                SPANDATA.HTTP_METHOD: request.method,
-            }
-            if parsed_url is not None:
-                data["url"] = parsed_url.url
-                data[SPANDATA.HTTP_QUERY] = parsed_url.query
-                data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment
-
-            for key, value in data.items():
-                span.set_attribute(key, value)
-
-            if should_propagate_trace(sentry_sdk_alpha.get_client(), str(request.url)):
-                for (
-                    key,
-                    value,
-                ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers():
-                    logger.debug(
-                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                            key=key, value=value, url=request.url
-                        )
-                    )
-                    if key == BAGGAGE_HEADER_NAME and request.headers.get(BAGGAGE_HEADER_NAME):
-                        # do not overwrite any existing baggage, just append to it
-                        request.headers[key] += "," + value
-                    else:
-                        request.headers[key] = value
-
-            rv = await real_send(self, request, **kwargs)
-
-            span.set_http_status(rv.status_code)
-            span.set_attribute("reason", rv.reason_phrase)
-
-            data[SPANDATA.HTTP_STATUS_CODE] = rv.status_code
-            data["reason"] = rv.reason_phrase
-
-            sentry_sdk_alpha.add_breadcrumb(
-                type="http",
-                category="httplib",
-                data=data,
-                level=http_client_status_to_breadcrumb_level(rv.status_code),
-            )
-
-            return rv
-
-    AsyncClient.send = send
-
-
-def _add_sentry_baggage_to_headers(headers, sentry_baggage):
-    # type: (MutableMapping[str, str], str) -> None
-    """Add the Sentry baggage to the headers.
-
-    This function directly mutates the provided headers. The provided sentry_baggage
-    is appended to the existing baggage. If the baggage already contains Sentry items,
-    they are stripped out first.
-    """
-    existing_baggage = headers.get(BAGGAGE_HEADER_NAME, "")
-    stripped_existing_baggage = Baggage.strip_sentry_baggage(existing_baggage)
-
-    separator = "," if len(stripped_existing_baggage) > 0 else ""
-
-    headers[BAGGAGE_HEADER_NAME] = stripped_existing_baggage + separator + sentry_baggage
diff --git a/src/sentry_sdk_alpha/integrations/huey.py b/src/sentry_sdk_alpha/integrations/huey.py
deleted file mode 100644
index 46f8b66d8ad793..00000000000000
--- a/src/sentry_sdk_alpha/integrations/huey.py
+++ /dev/null
@@ -1,170 +0,0 @@
-import sys
-from datetime import datetime
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.api import get_baggage, get_traceparent
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    OP,
-    SENTRY_TRACE_HEADER_NAME,
-    SPANSTATUS,
-    TransactionSource,
-)
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    SENSITIVE_DATA_SUBSTITUTE,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar, Union
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, Hint
-    from sentry_sdk_alpha.utils import ExcInfo
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-try:
-    from huey.api import Huey, PeriodicTask, Result, ResultGroup, Task
-    from huey.exceptions import CancelExecution, RetryTask, TaskLockedException
-except ImportError:
-    raise DidNotEnable("Huey is not installed")
-
-
-HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException)
-
-
-class HueyIntegration(Integration):
-    identifier = "huey"
-    origin = f"auto.queue.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        patch_enqueue()
-        patch_execute()
-
-
-def patch_enqueue():
-    # type: () -> None
-    old_enqueue = Huey.enqueue
-
-    @ensure_integration_enabled(HueyIntegration, old_enqueue)
-    def _sentry_enqueue(self, task):
-        # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]]
-        with sentry_sdk_alpha.start_span(
-            op=OP.QUEUE_SUBMIT_HUEY,
-            name=task.name,
-            origin=HueyIntegration.origin,
-            only_if_parent=True,
-        ):
-            if not isinstance(task, PeriodicTask):
-                # Attach trace propagation data to task kwargs. We do
-                # not do this for periodic tasks, as these don't
-                # really have an originating transaction.
-                task.kwargs["sentry_headers"] = {
-                    BAGGAGE_HEADER_NAME: get_baggage(),
-                    SENTRY_TRACE_HEADER_NAME: get_traceparent(),
-                }
-            return old_enqueue(self, task)
-
-    Huey.enqueue = _sentry_enqueue
-
-
-def _make_event_processor(task):
-    # type: (Any) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-
-        with capture_internal_exceptions():
-            tags = event.setdefault("tags", {})
-            tags["huey_task_id"] = task.id
-            tags["huey_task_retry"] = task.default_retries > task.retries
-            extra = event.setdefault("extra", {})
-            extra["huey-job"] = {
-                "task": task.name,
-                "args": (task.args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
-                "kwargs": (task.kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE),
-                "retry": (task.default_retries or 0) - task.retries,
-            }
-
-        return event
-
-    return event_processor
-
-
-def _capture_exception(exc_info):
-    # type: (ExcInfo) -> None
-    scope = sentry_sdk_alpha.get_current_scope()
-
-    if scope.root_span is not None:
-        if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS:
-            scope.root_span.set_status(SPANSTATUS.ABORTED)
-            return
-
-        scope.root_span.set_status(SPANSTATUS.INTERNAL_ERROR)
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": HueyIntegration.identifier, "handled": False},
-    )
-    scope.capture_event(event, hint=hint)
-
-
-def _wrap_task_execute(func):
-    # type: (F) -> F
-
-    @ensure_integration_enabled(HueyIntegration, func)
-    def _sentry_execute(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        try:
-            result = func(*args, **kwargs)
-        except Exception:
-            exc_info = sys.exc_info()
-            _capture_exception(exc_info)
-            reraise(*exc_info)
-
-        root_span = sentry_sdk_alpha.get_current_scope().root_span
-        if root_span is not None:
-            root_span.set_status(SPANSTATUS.OK)
-
-        return result
-
-    return _sentry_execute  # type: ignore
-
-
-def patch_execute():
-    # type: () -> None
-    old_execute = Huey._execute
-
-    @ensure_integration_enabled(HueyIntegration, old_execute)
-    def _sentry_execute(self, task, timestamp=None):
-        # type: (Huey, Task, Optional[datetime]) -> Any
-        with sentry_sdk_alpha.isolation_scope() as scope:
-            with capture_internal_exceptions():
-                scope._name = "huey"
-                scope.clear_breadcrumbs()
-                scope.add_event_processor(_make_event_processor(task))
-
-            if not getattr(task, "_sentry_is_patched", False):
-                task.execute = _wrap_task_execute(task.execute)
-                task._sentry_is_patched = True
-
-            sentry_headers = task.kwargs.pop("sentry_headers", {})
-            with sentry_sdk_alpha.continue_trace(sentry_headers):
-                with sentry_sdk_alpha.start_span(
-                    name=task.name,
-                    op=OP.QUEUE_TASK_HUEY,
-                    source=TransactionSource.TASK,
-                    origin=HueyIntegration.origin,
-                ):
-                    return old_execute(self, task, timestamp)
-
-    Huey._execute = _sentry_execute
diff --git a/src/sentry_sdk_alpha/integrations/huggingface_hub.py b/src/sentry_sdk_alpha/integrations/huggingface_hub.py
deleted file mode 100644
index 135bfde34ce922..00000000000000
--- a/src/sentry_sdk_alpha/integrations/huggingface_hub.py
+++ /dev/null
@@ -1,163 +0,0 @@
-from collections.abc import Callable, Iterable
-from functools import wraps
-from typing import Any
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import consts
-from sentry_sdk_alpha.ai.monitoring import record_token_usage
-from sentry_sdk_alpha.ai.utils import set_data_normalized
-from sentry_sdk_alpha.consts import SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-try:
-    import huggingface_hub.inference._client
-    from huggingface_hub import ChatCompletionStreamOutput, TextGenerationOutput
-except ImportError:
-    raise DidNotEnable("Huggingface not installed")
-
-
-class HuggingfaceHubIntegration(Integration):
-    identifier = "huggingface_hub"
-    origin = f"auto.ai.{identifier}"
-
-    def __init__(self, include_prompts=True):
-        # type: (HuggingfaceHubIntegration, bool) -> None
-        self.include_prompts = include_prompts
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        huggingface_hub.inference._client.InferenceClient.text_generation = _wrap_text_generation(
-            huggingface_hub.inference._client.InferenceClient.text_generation
-        )
-
-
-def _capture_exception(exc):
-    # type: (Any) -> None
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "huggingface_hub", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _wrap_text_generation(f):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-    @wraps(f)
-    def new_text_generation(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(HuggingfaceHubIntegration)
-        if integration is None:
-            return f(*args, **kwargs)
-
-        if "prompt" in kwargs:
-            prompt = kwargs["prompt"]
-        elif len(args) >= 2:
-            kwargs["prompt"] = args[1]
-            prompt = kwargs["prompt"]
-            args = (args[0],) + args[2:]
-        else:
-            # invalid call, let it return error
-            return f(*args, **kwargs)
-
-        model = kwargs.get("model")
-        streaming = kwargs.get("stream")
-
-        span = sentry_sdk_alpha.start_span(
-            op=consts.OP.HUGGINGFACE_HUB_CHAT_COMPLETIONS_CREATE,
-            name="Text Generation",
-            origin=HuggingfaceHubIntegration.origin,
-            only_if_parent=True,
-        )
-        span.__enter__()
-        try:
-            res = f(*args, **kwargs)
-        except Exception as e:
-            _capture_exception(e)
-            span.__exit__(None, None, None)
-            raise e from None
-
-        with capture_internal_exceptions():
-            if should_send_default_pii() and integration.include_prompts:
-                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompt)
-
-            set_data_normalized(span, SPANDATA.AI_MODEL_ID, model)
-            set_data_normalized(span, SPANDATA.AI_STREAMING, streaming)
-
-            if isinstance(res, str):
-                if should_send_default_pii() and integration.include_prompts:
-                    set_data_normalized(
-                        span,
-                        SPANDATA.AI_RESPONSES,
-                        [res],
-                    )
-                span.__exit__(None, None, None)
-                return res
-
-            if isinstance(res, TextGenerationOutput):
-                if should_send_default_pii() and integration.include_prompts:
-                    set_data_normalized(
-                        span,
-                        SPANDATA.AI_RESPONSES,
-                        [res.generated_text],
-                    )
-                if res.details is not None and res.details.generated_tokens > 0:
-                    record_token_usage(span, total_tokens=res.details.generated_tokens)
-                span.__exit__(None, None, None)
-                return res
-
-            if not isinstance(res, Iterable):
-                # we only know how to deal with strings and iterables, ignore
-                set_data_normalized(span, "unknown_response", True)
-                span.__exit__(None, None, None)
-                return res
-
-            if kwargs.get("details", False):
-                # res is Iterable[TextGenerationStreamOutput]
-                def new_details_iterator():
-                    # type: () -> Iterable[ChatCompletionStreamOutput]
-                    with capture_internal_exceptions():
-                        tokens_used = 0
-                        data_buf: list[str] = []
-                        for x in res:
-                            if hasattr(x, "token") and hasattr(x.token, "text"):
-                                data_buf.append(x.token.text)
-                            if hasattr(x, "details") and hasattr(x.details, "generated_tokens"):
-                                tokens_used = x.details.generated_tokens
-                            yield x
-                        if (
-                            len(data_buf) > 0
-                            and should_send_default_pii()
-                            and integration.include_prompts
-                        ):
-                            set_data_normalized(span, SPANDATA.AI_RESPONSES, "".join(data_buf))
-                        if tokens_used > 0:
-                            record_token_usage(span, total_tokens=tokens_used)
-                    span.__exit__(None, None, None)
-
-                return new_details_iterator()
-            else:
-                # res is Iterable[str]
-
-                def new_iterator():
-                    # type: () -> Iterable[str]
-                    data_buf: list[str] = []
-                    with capture_internal_exceptions():
-                        for s in res:
-                            if isinstance(s, str):
-                                data_buf.append(s)
-                            yield s
-                        if (
-                            len(data_buf) > 0
-                            and should_send_default_pii()
-                            and integration.include_prompts
-                        ):
-                            set_data_normalized(span, SPANDATA.AI_RESPONSES, "".join(data_buf))
-                        span.__exit__(None, None, None)
-
-                return new_iterator()
-
-    return new_text_generation
diff --git a/src/sentry_sdk_alpha/integrations/langchain.py b/src/sentry_sdk_alpha/integrations/langchain.py
deleted file mode 100644
index 38cf11d6ebd784..00000000000000
--- a/src/sentry_sdk_alpha/integrations/langchain.py
+++ /dev/null
@@ -1,462 +0,0 @@
-from collections import OrderedDict
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.ai.monitoring import record_token_usage, set_ai_pipeline_name
-from sentry_sdk_alpha.ai.utils import set_data_normalized
-from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import capture_internal_exceptions, logger
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, List, Optional, Union
-    from uuid import UUID
-
-try:
-    from langchain_core.agents import AgentAction, AgentFinish
-    from langchain_core.callbacks import BaseCallbackHandler, manager
-    from langchain_core.messages import BaseMessage
-    from langchain_core.outputs import LLMResult
-except ImportError:
-    raise DidNotEnable("langchain not installed")
-
-
-DATA_FIELDS = {
-    "temperature": SPANDATA.AI_TEMPERATURE,
-    "top_p": SPANDATA.AI_TOP_P,
-    "top_k": SPANDATA.AI_TOP_K,
-    "function_call": SPANDATA.AI_FUNCTION_CALL,
-    "tool_calls": SPANDATA.AI_TOOL_CALLS,
-    "tools": SPANDATA.AI_TOOLS,
-    "response_format": SPANDATA.AI_RESPONSE_FORMAT,
-    "logit_bias": SPANDATA.AI_LOGIT_BIAS,
-    "tags": SPANDATA.AI_TAGS,
-}
-
-# To avoid double collecting tokens, we do *not* measure
-# token counts for models for which we have an explicit integration
-NO_COLLECT_TOKEN_MODELS = [
-    "openai-chat",
-    "anthropic-chat",
-    "cohere-chat",
-    "huggingface_endpoint",
-]
-
-
-class LangchainIntegration(Integration):
-    identifier = "langchain"
-    origin = f"auto.ai.{identifier}"
-
-    # The most number of spans (e.g., LLM calls) that can be processed at the same time.
-    max_spans = 1024
-
-    def __init__(self, include_prompts=True, max_spans=1024, tiktoken_encoding_name=None):
-        # type: (LangchainIntegration, bool, int, Optional[str]) -> None
-        self.include_prompts = include_prompts
-        self.max_spans = max_spans
-        self.tiktoken_encoding_name = tiktoken_encoding_name
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        manager._configure = _wrap_configure(manager._configure)
-
-
-class WatchedSpan:
-    num_completion_tokens = 0  # type: int
-    num_prompt_tokens = 0  # type: int
-    no_collect_tokens = False  # type: bool
-    children = []  # type: List[WatchedSpan]
-    is_pipeline = False  # type: bool
-
-    def __init__(self, span):
-        # type: (Span) -> None
-        self.span = span
-
-
-class SentryLangchainCallback(BaseCallbackHandler):  # type: ignore[misc]
-    """Base callback handler that can be used to handle callbacks from langchain."""
-
-    span_map = OrderedDict()  # type: OrderedDict[UUID, WatchedSpan]
-
-    max_span_map_size = 0
-
-    def __init__(self, max_span_map_size, include_prompts, tiktoken_encoding_name=None):
-        # type: (int, bool, Optional[str]) -> None
-        self.max_span_map_size = max_span_map_size
-        self.include_prompts = include_prompts
-
-        self.tiktoken_encoding = None
-        if tiktoken_encoding_name is not None:
-            import tiktoken  # type: ignore
-
-            self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name)
-
-    def count_tokens(self, s):
-        # type: (str) -> int
-        if self.tiktoken_encoding is not None:
-            return len(self.tiktoken_encoding.encode_ordinary(s))
-        return 0
-
-    def gc_span_map(self):
-        # type: () -> None
-
-        while len(self.span_map) > self.max_span_map_size:
-            run_id, watched_span = self.span_map.popitem(last=False)
-            self._exit_span(watched_span, run_id)
-
-    def _handle_error(self, run_id, error):
-        # type: (UUID, Any) -> None
-        if not run_id or run_id not in self.span_map:
-            return
-
-        span_data = self.span_map[run_id]
-        if not span_data:
-            return
-        sentry_sdk_alpha.capture_exception(error)
-        span_data.span.set_status(SPANSTATUS.INTERNAL_ERROR)
-        span_data.span.finish()
-        del self.span_map[run_id]
-
-    def _normalize_langchain_message(self, message):
-        # type: (BaseMessage) -> Any
-        parsed = {"content": message.content, "role": message.type}
-        parsed.update(message.additional_kwargs)
-        return parsed
-
-    def _create_span(self, run_id, parent_id, **kwargs):
-        # type: (SentryLangchainCallback, UUID, Optional[Any], Any) -> WatchedSpan
-
-        parent_watched_span = self.span_map.get(parent_id) if parent_id else None
-        sentry_span = sentry_sdk_alpha.start_span(
-            parent_span=parent_watched_span.span if parent_watched_span else None,
-            only_if_parent=True,
-            **kwargs,
-        )
-        watched_span = WatchedSpan(sentry_span)
-        if parent_watched_span:
-            parent_watched_span.children.append(watched_span)
-
-        if kwargs.get("op", "").startswith("ai.pipeline."):
-            if kwargs.get("name"):
-                set_ai_pipeline_name(kwargs.get("name"))
-            watched_span.is_pipeline = True
-
-        # the same run_id is reused for the pipeline it seems
-        # so we need to end the older span to avoid orphan spans
-        existing_span_data = self.span_map.get(run_id)
-        if existing_span_data is not None:
-            self._exit_span(existing_span_data, run_id)
-
-        self.span_map[run_id] = watched_span
-        self.gc_span_map()
-        return watched_span
-
-    def _exit_span(self, span_data, run_id):
-        # type: (SentryLangchainCallback, WatchedSpan, UUID) -> None
-
-        if span_data.is_pipeline:
-            set_ai_pipeline_name(None)
-
-        span_data.span.set_status(SPANSTATUS.OK)
-        span_data.span.finish()
-        del self.span_map[run_id]
-
-    def on_llm_start(
-        self,
-        serialized,
-        prompts,
-        *,
-        run_id,
-        tags=None,
-        parent_run_id=None,
-        metadata=None,
-        **kwargs,
-    ):
-        # type: (SentryLangchainCallback, Dict[str, Any], List[str], UUID, Optional[List[str]], Optional[UUID], Optional[Dict[str, Any]], Any) -> Any
-        """Run when LLM starts running."""
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-            all_params = kwargs.get("invocation_params", {})
-            all_params.update(serialized.get("kwargs", {}))
-            watched_span = self._create_span(
-                run_id,
-                kwargs.get("parent_run_id"),
-                op=OP.LANGCHAIN_RUN,
-                name=kwargs.get("name") or "Langchain LLM call",
-                origin=LangchainIntegration.origin,
-            )
-            span = watched_span.span
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, prompts)
-            for k, v in DATA_FIELDS.items():
-                if k in all_params:
-                    set_data_normalized(span, v, all_params[k])
-
-    def on_chat_model_start(self, serialized, messages, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Dict[str, Any], List[List[BaseMessage]], UUID, Any) -> Any
-        """Run when Chat Model starts running."""
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-            all_params = kwargs.get("invocation_params", {})
-            all_params.update(serialized.get("kwargs", {}))
-            watched_span = self._create_span(
-                run_id,
-                kwargs.get("parent_run_id"),
-                op=OP.LANGCHAIN_CHAT_COMPLETIONS_CREATE,
-                name=kwargs.get("name") or "Langchain Chat Model",
-                origin=LangchainIntegration.origin,
-            )
-            span = watched_span.span
-            model = all_params.get(
-                "model", all_params.get("model_name", all_params.get("model_id"))
-            )
-            watched_span.no_collect_tokens = any(
-                x in all_params.get("_type", "") for x in NO_COLLECT_TOKEN_MODELS
-            )
-
-            if not model and "anthropic" in all_params.get("_type"):
-                model = "claude-2"
-            if model:
-                span.set_attribute(SPANDATA.AI_MODEL_ID, model)
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(
-                    span,
-                    SPANDATA.AI_INPUT_MESSAGES,
-                    [[self._normalize_langchain_message(x) for x in list_] for list_ in messages],
-                )
-            for k, v in DATA_FIELDS.items():
-                if k in all_params:
-                    set_data_normalized(span, v, all_params[k])
-            if not watched_span.no_collect_tokens:
-                for list_ in messages:
-                    for message in list_:
-                        self.span_map[run_id].num_prompt_tokens += self.count_tokens(
-                            message.content
-                        ) + self.count_tokens(message.type)
-
-    def on_llm_new_token(self, token, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, str, UUID, Any) -> Any
-        """Run on new LLM token. Only available when streaming is enabled."""
-        with capture_internal_exceptions():
-            if not run_id or run_id not in self.span_map:
-                return
-            span_data = self.span_map[run_id]
-            if not span_data or span_data.no_collect_tokens:
-                return
-            span_data.num_completion_tokens += self.count_tokens(token)
-
-    def on_llm_end(self, response, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, LLMResult, UUID, Any) -> Any
-        """Run when LLM ends running."""
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-
-            token_usage = response.llm_output.get("token_usage") if response.llm_output else None
-
-            span_data = self.span_map[run_id]
-            if not span_data:
-                return
-
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(
-                    span_data.span,
-                    SPANDATA.AI_RESPONSES,
-                    [[x.text for x in list_] for list_ in response.generations],
-                )
-
-            if not span_data.no_collect_tokens:
-                if token_usage:
-                    record_token_usage(
-                        span_data.span,
-                        token_usage.get("prompt_tokens"),
-                        token_usage.get("completion_tokens"),
-                        token_usage.get("total_tokens"),
-                    )
-                else:
-                    record_token_usage(
-                        span_data.span,
-                        span_data.num_prompt_tokens,
-                        span_data.num_completion_tokens,
-                    )
-
-            self._exit_span(span_data, run_id)
-
-    def on_llm_error(self, error, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
-        """Run when LLM errors."""
-        with capture_internal_exceptions():
-            self._handle_error(run_id, error)
-
-    def on_chain_start(self, serialized, inputs, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Dict[str, Any], Dict[str, Any], UUID, Any) -> Any
-        """Run when chain starts running."""
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-            watched_span = self._create_span(
-                run_id,
-                kwargs.get("parent_run_id"),
-                op=(
-                    OP.LANGCHAIN_RUN
-                    if kwargs.get("parent_run_id") is not None
-                    else OP.LANGCHAIN_PIPELINE
-                ),
-                name=kwargs.get("name") or "Chain execution",
-                origin=LangchainIntegration.origin,
-            )
-            metadata = kwargs.get("metadata")
-            if metadata:
-                set_data_normalized(watched_span.span, SPANDATA.AI_METADATA, metadata)
-
-    def on_chain_end(self, outputs, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Dict[str, Any], UUID, Any) -> Any
-        """Run when chain ends running."""
-        with capture_internal_exceptions():
-            if not run_id or run_id not in self.span_map:
-                return
-
-            span_data = self.span_map[run_id]
-            if not span_data:
-                return
-            self._exit_span(span_data, run_id)
-
-    def on_chain_error(self, error, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
-        """Run when chain errors."""
-        self._handle_error(run_id, error)
-
-    def on_agent_action(self, action, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, AgentAction, UUID, Any) -> Any
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-            watched_span = self._create_span(
-                run_id,
-                kwargs.get("parent_run_id"),
-                op=OP.LANGCHAIN_AGENT,
-                name=action.tool or "AI tool usage",
-                origin=LangchainIntegration.origin,
-            )
-            if action.tool_input and should_send_default_pii() and self.include_prompts:
-                set_data_normalized(
-                    watched_span.span, SPANDATA.AI_INPUT_MESSAGES, action.tool_input
-                )
-
-    def on_agent_finish(self, finish, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, AgentFinish, UUID, Any) -> Any
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-
-            span_data = self.span_map[run_id]
-            if not span_data:
-                return
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(
-                    span_data.span, SPANDATA.AI_RESPONSES, finish.return_values.items()
-                )
-            self._exit_span(span_data, run_id)
-
-    def on_tool_start(self, serialized, input_str, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Dict[str, Any], str, UUID, Any) -> Any
-        """Run when tool starts running."""
-        with capture_internal_exceptions():
-            if not run_id:
-                return
-            watched_span = self._create_span(
-                run_id,
-                kwargs.get("parent_run_id"),
-                op=OP.LANGCHAIN_TOOL,
-                name=serialized.get("name") or kwargs.get("name") or "AI tool usage",
-                origin=LangchainIntegration.origin,
-            )
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(
-                    watched_span.span,
-                    SPANDATA.AI_INPUT_MESSAGES,
-                    kwargs.get("inputs", [input_str]),
-                )
-                if kwargs.get("metadata"):
-                    set_data_normalized(
-                        watched_span.span, SPANDATA.AI_METADATA, kwargs.get("metadata")
-                    )
-
-    def on_tool_end(self, output, *, run_id, **kwargs):
-        # type: (SentryLangchainCallback, str, UUID, Any) -> Any
-        """Run when tool ends running."""
-        with capture_internal_exceptions():
-            if not run_id or run_id not in self.span_map:
-                return
-
-            span_data = self.span_map[run_id]
-            if not span_data:
-                return
-            if should_send_default_pii() and self.include_prompts:
-                set_data_normalized(span_data.span, SPANDATA.AI_RESPONSES, output)
-            self._exit_span(span_data, run_id)
-
-    def on_tool_error(self, error, *args, run_id, **kwargs):
-        # type: (SentryLangchainCallback, Union[Exception, KeyboardInterrupt], UUID, Any) -> Any
-        """Run when tool errors."""
-        self._handle_error(run_id, error)
-
-
-def _wrap_configure(f):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-
-    @wraps(f)
-    def new_configure(*args, **kwargs):
-        # type: (Any, Any) -> Any
-
-        integration = sentry_sdk_alpha.get_client().get_integration(LangchainIntegration)
-        if integration is None:
-            return f(*args, **kwargs)
-
-        with capture_internal_exceptions():
-            new_callbacks = []  # type: List[BaseCallbackHandler]
-            if "local_callbacks" in kwargs:
-                existing_callbacks = kwargs["local_callbacks"]
-                kwargs["local_callbacks"] = new_callbacks
-            elif len(args) > 2:
-                existing_callbacks = args[2]
-                args = (
-                    args[0],
-                    args[1],
-                    new_callbacks,
-                ) + args[3:]
-            else:
-                existing_callbacks = []
-
-            if existing_callbacks:
-                if isinstance(existing_callbacks, list):
-                    for cb in existing_callbacks:
-                        new_callbacks.append(cb)
-                elif isinstance(existing_callbacks, BaseCallbackHandler):
-                    new_callbacks.append(existing_callbacks)
-                else:
-                    logger.debug("Unknown callback type: %s", existing_callbacks)
-
-            already_added = False
-            for callback in new_callbacks:
-                if isinstance(callback, SentryLangchainCallback):
-                    already_added = True
-
-            if not already_added:
-                new_callbacks.append(
-                    SentryLangchainCallback(
-                        integration.max_spans,
-                        integration.include_prompts,
-                        integration.tiktoken_encoding_name,
-                    )
-                )
-        return f(*args, **kwargs)
-
-    return new_configure
diff --git a/src/sentry_sdk_alpha/integrations/launchdarkly.py b/src/sentry_sdk_alpha/integrations/launchdarkly.py
deleted file mode 100644
index 424c0605ad16ed..00000000000000
--- a/src/sentry_sdk_alpha/integrations/launchdarkly.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-
-try:
-    import ldclient
-    from ldclient.hook import Hook, Metadata
-
-    if TYPE_CHECKING:
-        from typing import Any
-
-        from ldclient import LDClient
-        from ldclient.evaluation import EvaluationDetail
-        from ldclient.hook import EvaluationSeriesContext
-except ImportError:
-    raise DidNotEnable("LaunchDarkly is not installed")
-
-
-class LaunchDarklyIntegration(Integration):
-    identifier = "launchdarkly"
-
-    def __init__(self, ld_client=None):
-        # type: (LDClient | None) -> None
-        """
-        :param client: An initialized LDClient instance. If a client is not provided, this
-            integration will attempt to use the shared global instance.
-        """
-        try:
-            client = ld_client or ldclient.get()
-        except Exception as exc:
-            raise DidNotEnable("Error getting LaunchDarkly client. " + repr(exc))
-
-        if not client.is_initialized():
-            raise DidNotEnable("LaunchDarkly client is not initialized.")
-
-        # Register the flag collection hook with the LD client.
-        client.add_hook(LaunchDarklyHook())
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        pass
-
-
-class LaunchDarklyHook(Hook):
-
-    @property
-    def metadata(self):
-        # type: () -> Metadata
-        return Metadata(name="sentry-flag-auditor")
-
-    def after_evaluation(self, series_context, data, detail):
-        # type: (EvaluationSeriesContext, dict[Any, Any], EvaluationDetail) -> dict[Any, Any]
-        if isinstance(detail.value, bool):
-            add_feature_flag(series_context.key, detail.value)
-
-        return data
-
-    def before_evaluation(self, series_context, data):
-        # type: (EvaluationSeriesContext, dict[Any, Any]) -> dict[Any, Any]
-        return data  # No-op.
diff --git a/src/sentry_sdk_alpha/integrations/litestar.py b/src/sentry_sdk_alpha/integrations/litestar.py
deleted file mode 100644
index 81e04eb31f82c5..00000000000000
--- a/src/sentry_sdk_alpha/integrations/litestar.py
+++ /dev/null
@@ -1,302 +0,0 @@
-from collections.abc import Set
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
-from sentry_sdk_alpha.integrations import (
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    DidNotEnable,
-    Integration,
-)
-from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    ensure_integration_enabled,
-    event_from_exception,
-    transaction_from_function,
-)
-
-try:
-    from litestar import Litestar, Request  # type: ignore
-    from litestar.data_extractors import ConnectionDataExtractor  # type: ignore
-    from litestar.exceptions import HTTPException  # type: ignore
-    from litestar.handlers.base import BaseRouteHandler  # type: ignore
-    from litestar.middleware import DefineMiddleware  # type: ignore
-    from litestar.routes.http import HTTPRoute  # type: ignore
-except ImportError:
-    raise DidNotEnable("Litestar is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Optional, Union
-
-    from litestar.middleware import MiddlewareProtocol
-    from litestar.types import HTTPReceiveMessage, HTTPScope, Message, Middleware, Receive
-    from litestar.types import Scope as LitestarScope  # type: ignore
-    from litestar.types import Send, WebSocketReceiveMessage
-    from litestar.types.asgi_types import ASGIApp  # type: ignore
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-_DEFAULT_TRANSACTION_NAME = "generic Litestar request"
-
-
-class LitestarIntegration(Integration):
-    identifier = "litestar"
-    origin = f"auto.http.{identifier}"
-
-    def __init__(
-        self,
-        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
-    ) -> None:
-        self.failed_request_status_codes = failed_request_status_codes
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        patch_app_init()
-        patch_middlewares()
-        patch_http_route_handle()
-
-        # The following line follows the pattern found in other integrations such as `DjangoIntegration.setup_once`.
-        # The Litestar `ExceptionHandlerMiddleware.__call__` catches exceptions and does the following
-        # (among other things):
-        #   1. Logs them, some at least (such as 500s) as errors
-        #   2. Calls after_exception hooks
-        # The `LitestarIntegration`` provides an after_exception hook (see `patch_app_init` below) to create a Sentry event
-        # from an exception, which ends up being called during step 2 above. However, the Sentry `LoggingIntegration` will
-        # by default create a Sentry event from error logs made in step 1 if we do not prevent it from doing so.
-        ignore_logger("litestar")
-
-
-class SentryLitestarASGIMiddleware(SentryAsgiMiddleware):
-    def __init__(self, app, span_origin=LitestarIntegration.origin):
-        # type: (ASGIApp, str) -> None
-
-        super().__init__(
-            app=app,
-            unsafe_context_data=False,
-            transaction_style="endpoint",
-            mechanism_type="asgi",
-            span_origin=span_origin,
-        )
-
-
-def patch_app_init():
-    # type: () -> None
-    """
-    Replaces the Litestar class's `__init__` function in order to inject `after_exception` handlers and set the
-    `SentryLitestarASGIMiddleware` as the outmost middleware in the stack.
-    See:
-    - https://docs.litestar.dev/2/usage/applications.html#after-exception
-    - https://docs.litestar.dev/2/usage/middleware/using-middleware.html
-    """
-    old__init__ = Litestar.__init__
-
-    @ensure_integration_enabled(LitestarIntegration, old__init__)
-    def injection_wrapper(self, *args, **kwargs):
-        # type: (Litestar, *Any, **Any) -> None
-        kwargs["after_exception"] = [
-            exception_handler,
-            *(kwargs.get("after_exception") or []),
-        ]
-
-        SentryLitestarASGIMiddleware.__call__ = SentryLitestarASGIMiddleware._run_asgi3  # type: ignore
-        middleware = kwargs.get("middleware") or []
-        kwargs["middleware"] = [SentryLitestarASGIMiddleware, *middleware]
-        old__init__(self, *args, **kwargs)
-
-    Litestar.__init__ = injection_wrapper
-
-
-def patch_middlewares():
-    # type: () -> None
-    old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware
-
-    @ensure_integration_enabled(LitestarIntegration, old_resolve_middleware_stack)
-    def resolve_middleware_wrapper(self):
-        # type: (BaseRouteHandler) -> list[Middleware]
-        return [
-            enable_span_for_middleware(middleware)
-            for middleware in old_resolve_middleware_stack(self)
-        ]
-
-    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
-
-
-def enable_span_for_middleware(middleware):
-    # type: (Middleware) -> Middleware
-    if (
-        not hasattr(middleware, "__call__")  # noqa: B004
-        or middleware is SentryLitestarASGIMiddleware
-    ):
-        return middleware
-
-    if isinstance(middleware, DefineMiddleware):
-        old_call = middleware.middleware.__call__  # type: ASGIApp
-    else:
-        old_call = middleware.__call__
-
-    async def _create_span_call(self, scope, receive, send):
-        # type: (MiddlewareProtocol, LitestarScope, Receive, Send) -> None
-        if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None:
-            return await old_call(self, scope, receive, send)
-
-        middleware_name = self.__class__.__name__
-        with sentry_sdk_alpha.start_span(
-            op=OP.MIDDLEWARE_LITESTAR,
-            name=middleware_name,
-            origin=LitestarIntegration.origin,
-            only_if_parent=True,
-        ) as middleware_span:
-            middleware_span.set_tag("litestar.middleware_name", middleware_name)
-
-            # Creating spans for the "receive" callback
-            async def _sentry_receive(*args, **kwargs):
-                # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]
-                if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None:
-                    return await receive(*args, **kwargs)
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_LITESTAR_RECEIVE,
-                    name=getattr(receive, "__qualname__", str(receive)),
-                    origin=LitestarIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("litestar.middleware_name", middleware_name)
-                    return await receive(*args, **kwargs)
-
-            receive_name = getattr(receive, "__name__", str(receive))
-            receive_patched = receive_name == "_sentry_receive"
-            new_receive = _sentry_receive if not receive_patched else receive
-
-            # Creating spans for the "send" callback
-            async def _sentry_send(message):
-                # type: (Message) -> None
-                if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None:
-                    return await send(message)
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_LITESTAR_SEND,
-                    name=getattr(send, "__qualname__", str(send)),
-                    origin=LitestarIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("litestar.middleware_name", middleware_name)
-                    return await send(message)
-
-            send_name = getattr(send, "__name__", str(send))
-            send_patched = send_name == "_sentry_send"
-            new_send = _sentry_send if not send_patched else send
-
-            return await old_call(self, scope, new_receive, new_send)
-
-    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
-
-    if not_yet_patched:
-        if isinstance(middleware, DefineMiddleware):
-            middleware.middleware.__call__ = _create_span_call
-        else:
-            middleware.__call__ = _create_span_call
-
-    return middleware
-
-
-def patch_http_route_handle():
-    # type: () -> None
-    old_handle = HTTPRoute.handle
-
-    async def handle_wrapper(self, scope, receive, send):
-        # type: (HTTPRoute, HTTPScope, Receive, Send) -> None
-        if sentry_sdk_alpha.get_client().get_integration(LitestarIntegration) is None:
-            return await old_handle(self, scope, receive, send)
-
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        request = scope["app"].request_class(
-            scope=scope, receive=receive, send=send
-        )  # type: Request[Any, Any]
-        extracted_request_data = ConnectionDataExtractor(parse_body=True, parse_query=True)(request)
-        body = extracted_request_data.pop("body")
-
-        request_data = await body
-
-        def event_processor(event, _):
-            # type: (Event, Hint) -> Event
-            route_handler = scope.get("route_handler")
-
-            request_info = event.get("request", {})
-            request_info["content_length"] = len(scope.get("_body", b""))
-            if should_send_default_pii():
-                request_info["cookies"] = extracted_request_data["cookies"]
-            if request_data is not None:
-                request_info["data"] = request_data
-
-            func = None
-            if route_handler.name is not None:
-                tx_name = route_handler.name
-            # Accounts for use of type `Ref` in earlier versions of litestar without the need to reference it as a type
-            elif hasattr(route_handler.fn, "value"):
-                func = route_handler.fn.value
-            else:
-                func = route_handler.fn
-            if func is not None:
-                tx_name = transaction_from_function(func)
-
-            tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
-
-            if not tx_name:
-                tx_name = _DEFAULT_TRANSACTION_NAME
-                tx_info = {"source": TransactionSource.ROUTE}
-
-            event.update(
-                {
-                    "request": request_info,
-                    "transaction": tx_name,
-                    "transaction_info": tx_info,
-                }
-            )
-            return event
-
-        sentry_scope._name = LitestarIntegration.identifier
-        sentry_scope.add_event_processor(event_processor)
-
-        return await old_handle(self, scope, receive, send)
-
-    HTTPRoute.handle = handle_wrapper
-
-
-def retrieve_user_from_scope(scope):
-    # type: (LitestarScope) -> Optional[dict[str, Any]]
-    scope_user = scope.get("user")
-    if isinstance(scope_user, dict):
-        return scope_user
-    if hasattr(scope_user, "asdict"):  # dataclasses
-        return scope_user.asdict()
-
-    return None
-
-
-@ensure_integration_enabled(LitestarIntegration)
-def exception_handler(exc, scope):
-    # type: (Exception, LitestarScope) -> None
-    user_info = None  # type: Optional[dict[str, Any]]
-    if should_send_default_pii():
-        user_info = retrieve_user_from_scope(scope)
-    if user_info and isinstance(user_info, dict):
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        sentry_scope.set_user(user_info)
-
-    if isinstance(exc, HTTPException):
-        integration = sentry_sdk_alpha.get_client().get_integration(LitestarIntegration)
-        if (
-            integration is not None
-            and exc.status_code not in integration.failed_request_status_codes
-        ):
-            return
-
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": LitestarIntegration.identifier, "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
diff --git a/src/sentry_sdk_alpha/integrations/logging.py b/src/sentry_sdk_alpha/integrations/logging.py
deleted file mode 100644
index 577c83f0c6493e..00000000000000
--- a/src/sentry_sdk_alpha/integrations/logging.py
+++ /dev/null
@@ -1,400 +0,0 @@
-import logging
-import sys
-from datetime import datetime, timezone
-from fnmatch import fnmatch
-from typing import TYPE_CHECKING, Tuple
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.client import BaseClient
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    current_stacktrace,
-    event_from_exception,
-    safe_repr,
-    to_string,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import MutableMapping
-    from logging import LogRecord
-    from typing import Any, Dict, Optional
-
-DEFAULT_LEVEL = logging.INFO
-DEFAULT_EVENT_LEVEL = None  # None means no events are captured
-LOGGING_TO_EVENT_LEVEL = {
-    logging.NOTSET: "notset",
-    logging.DEBUG: "debug",
-    logging.INFO: "info",
-    logging.WARN: "warning",  # WARN is same a WARNING
-    logging.WARNING: "warning",
-    logging.ERROR: "error",
-    logging.FATAL: "fatal",
-    logging.CRITICAL: "fatal",  # CRITICAL is same as FATAL
-}
-
-# Capturing events from those loggers causes recursion errors. We cannot allow
-# the user to unconditionally create events from those loggers under any
-# circumstances.
-#
-# Note: Ignoring by logger name here is better than mucking with thread-locals.
-# We do not necessarily know whether thread-locals work 100% correctly in the user's environment.
-_IGNORED_LOGGERS = {
-    "sentry_sdk.errors",
-    "urllib3.connectionpool",
-    "urllib3.connection",
-    "opentelemetry.*",
-}
-
-
-def ignore_logger(
-    name,  # type: str
-):
-    # type: (...) -> None
-    """This disables recording (both in breadcrumbs and as events) calls to
-    a logger of a specific name.  Among other uses, many of our integrations
-    use this to prevent their actions being recorded as breadcrumbs. Exposed
-    to users as a way to quiet spammy loggers.
-
-    :param name: The name of the logger to ignore (same string you would pass to ``logging.getLogger``).
-    """
-    _IGNORED_LOGGERS.add(name)
-
-
-class LoggingIntegration(Integration):
-    identifier = "logging"
-
-    def __init__(
-        self,
-        level=DEFAULT_LEVEL,
-        event_level=DEFAULT_EVENT_LEVEL,
-        sentry_logs_level=DEFAULT_LEVEL,
-    ):
-        # type: (Optional[int], Optional[int], Optional[int]) -> None
-        self._handler = None
-        self._breadcrumb_handler = None
-        self._sentry_logs_handler = None
-
-        if level is not None:
-            self._breadcrumb_handler = BreadcrumbHandler(level=level)
-
-        if sentry_logs_level is not None:
-            self._sentry_logs_handler = SentryLogsHandler(level=sentry_logs_level)
-
-        if event_level is not None:
-            self._handler = EventHandler(level=event_level)
-
-    def _handle_record(self, record):
-        # type: (LogRecord) -> None
-        if self._handler is not None and record.levelno >= self._handler.level:
-            self._handler.handle(record)
-
-        if (
-            self._breadcrumb_handler is not None
-            and record.levelno >= self._breadcrumb_handler.level
-        ):
-            self._breadcrumb_handler.handle(record)
-
-        if (
-            self._sentry_logs_handler is not None
-            and record.levelno >= self._sentry_logs_handler.level
-        ):
-            self._sentry_logs_handler.handle(record)
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        old_callhandlers = logging.Logger.callHandlers
-
-        def sentry_patched_callhandlers(self, record):
-            # type: (Any, LogRecord) -> Any
-            # keeping a local reference because the
-            # global might be discarded on shutdown
-            ignored_loggers = _IGNORED_LOGGERS
-
-            try:
-                return old_callhandlers(self, record)
-            finally:
-                # This check is done twice, once also here before we even get
-                # the integration.  Otherwise we have a high chance of getting
-                # into a recursion error when the integration is resolved
-                # (this also is slower).
-                if ignored_loggers is not None and record.name not in ignored_loggers:
-                    integration = sentry_sdk_alpha.get_client().get_integration(LoggingIntegration)
-                    if integration is not None:
-                        integration._handle_record(record)
-
-        logging.Logger.callHandlers = sentry_patched_callhandlers  # type: ignore
-
-
-class _BaseHandler(logging.Handler):
-    COMMON_RECORD_ATTRS = frozenset(
-        (
-            "args",
-            "created",
-            "exc_info",
-            "exc_text",
-            "filename",
-            "funcName",
-            "levelname",
-            "levelno",
-            "linenno",
-            "lineno",
-            "message",
-            "module",
-            "msecs",
-            "msg",
-            "name",
-            "pathname",
-            "process",
-            "processName",
-            "relativeCreated",
-            "stack",
-            "tags",
-            "taskName",
-            "thread",
-            "threadName",
-            "stack_info",
-        )
-    )
-
-    def _can_record(self, record):
-        # type: (LogRecord) -> bool
-        """Prevents ignored loggers from recording"""
-        for logger in _IGNORED_LOGGERS:
-            if fnmatch(record.name, logger):
-                return False
-        return True
-
-    def _logging_to_event_level(self, record):
-        # type: (LogRecord) -> str
-        return LOGGING_TO_EVENT_LEVEL.get(
-            record.levelno, record.levelname.lower() if record.levelname else ""
-        )
-
-    def _extra_from_record(self, record):
-        # type: (LogRecord) -> MutableMapping[str, object]
-        return {
-            k: v
-            for k, v in vars(record).items()
-            if k not in self.COMMON_RECORD_ATTRS
-            and (not isinstance(k, str) or not k.startswith("_"))
-        }
-
-
-class EventHandler(_BaseHandler):
-    """
-    A logging handler that emits Sentry events for each log record
-
-    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
-    """
-
-    def emit(self, record):
-        # type: (LogRecord) -> Any
-        with capture_internal_exceptions():
-            self.format(record)
-            return self._emit(record)
-
-    def _emit(self, record):
-        # type: (LogRecord) -> None
-        if not self._can_record(record):
-            return
-
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            return
-
-        client_options = client.options
-
-        # exc_info might be None or (None, None, None)
-        #
-        # exc_info may also be any falsy value due to Python stdlib being
-        # liberal with what it receives and Celery's billiard being "liberal"
-        # with what it sends. See
-        # https://github.com/getsentry/sentry-python/issues/904
-        if record.exc_info and record.exc_info[0] is not None:
-            event, hint = event_from_exception(
-                record.exc_info,
-                client_options=client_options,
-                mechanism={"type": "logging", "handled": True},
-            )
-        elif (record.exc_info and record.exc_info[0] is None) or record.stack_info:
-            event = {}
-            hint = {}
-            with capture_internal_exceptions():
-                event["threads"] = {
-                    "values": [
-                        {
-                            "stacktrace": current_stacktrace(
-                                include_local_variables=client_options["include_local_variables"],
-                                max_value_length=client_options["max_value_length"],
-                            ),
-                            "crashed": False,
-                            "current": True,
-                        }
-                    ]
-                }
-        else:
-            event = {}
-            hint = {}
-
-        hint["log_record"] = record
-
-        level = self._logging_to_event_level(record)
-        if level in {"debug", "info", "warning", "error", "critical", "fatal"}:
-            event["level"] = level  # type: ignore[typeddict-item]
-        event["logger"] = record.name
-
-        if sys.version_info < (3, 11) and record.name == "py.warnings" and record.msg == "%s":
-            # warnings module on Python 3.10 and below sets record.msg to "%s"
-            # and record.args[0] to the actual warning message.
-            # This was fixed in https://github.com/python/cpython/pull/30975.
-            message = record.args[0]
-            params = ()
-        else:
-            message = record.msg
-            params = record.args
-
-        event["logentry"] = {
-            "message": to_string(message),
-            "formatted": record.getMessage(),
-            "params": params,
-        }
-
-        event["extra"] = self._extra_from_record(record)
-
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-# Legacy name
-SentryHandler = EventHandler
-
-
-class BreadcrumbHandler(_BaseHandler):
-    """
-    A logging handler that records breadcrumbs for each log record.
-
-    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
-    """
-
-    def emit(self, record):
-        # type: (LogRecord) -> Any
-        with capture_internal_exceptions():
-            self.format(record)
-            return self._emit(record)
-
-    def _emit(self, record):
-        # type: (LogRecord) -> None
-        if not self._can_record(record):
-            return
-
-        sentry_sdk_alpha.add_breadcrumb(
-            self._breadcrumb_from_record(record), hint={"log_record": record}
-        )
-
-    def _breadcrumb_from_record(self, record):
-        # type: (LogRecord) -> Dict[str, Any]
-        return {
-            "type": "log",
-            "level": self._logging_to_event_level(record),
-            "category": record.name,
-            "message": record.message,
-            "timestamp": datetime.fromtimestamp(record.created, timezone.utc),
-            "data": self._extra_from_record(record),
-        }
-
-
-def _python_level_to_otel(record_level):
-    # type: (int) -> Tuple[int, str]
-    for py_level, otel_severity_number, otel_severity_text in [
-        (50, 21, "fatal"),
-        (40, 17, "error"),
-        (30, 13, "warn"),
-        (20, 9, "info"),
-        (10, 5, "debug"),
-        (5, 1, "trace"),
-    ]:
-        if record_level >= py_level:
-            return otel_severity_number, otel_severity_text
-    return 0, "default"
-
-
-class SentryLogsHandler(_BaseHandler):
-    """
-    A logging handler that records Sentry logs for each Python log record.
-
-    Note that you do not have to use this class if the logging integration is enabled, which it is by default.
-    """
-
-    def emit(self, record):
-        # type: (LogRecord) -> Any
-        with capture_internal_exceptions():
-            self.format(record)
-            if not self._can_record(record):
-                return
-
-            client = sentry_sdk_alpha.get_client()
-            if not client.is_active():
-                return
-
-            if not client.options["_experiments"].get("enable_logs", False):
-                return
-
-            SentryLogsHandler._capture_log_from_record(client, record)
-
-    @staticmethod
-    def _capture_log_from_record(client, record):
-        # type: (BaseClient, LogRecord) -> None
-        scope = sentry_sdk_alpha.get_current_scope()
-        otel_severity_number, otel_severity_text = _python_level_to_otel(record.levelno)
-        project_root = client.options["project_root"]
-        attrs = {
-            "sentry.origin": "auto.logger.log",
-        }  # type: dict[str, str | bool | float | int]
-        if isinstance(record.msg, str):
-            attrs["sentry.message.template"] = record.msg
-        if record.args is not None:
-            if isinstance(record.args, tuple):
-                for i, arg in enumerate(record.args):
-                    attrs[f"sentry.message.parameters.{i}"] = (
-                        arg
-                        if isinstance(arg, str)
-                        or isinstance(arg, float)
-                        or isinstance(arg, int)
-                        or isinstance(arg, bool)
-                        else safe_repr(arg)
-                    )
-        if record.lineno:
-            attrs["code.line.number"] = record.lineno
-        if record.pathname:
-            if project_root is not None and record.pathname.startswith(project_root):
-                attrs["code.file.path"] = record.pathname[len(project_root) + 1 :]
-            else:
-                attrs["code.file.path"] = record.pathname
-        if record.funcName:
-            attrs["code.function.name"] = record.funcName
-
-        if record.thread:
-            attrs["thread.id"] = record.thread
-        if record.threadName:
-            attrs["thread.name"] = record.threadName
-
-        if record.process:
-            attrs["process.pid"] = record.process
-        if record.processName:
-            attrs["process.executable.name"] = record.processName
-        if record.name:
-            attrs["logger.name"] = record.name
-
-        # noinspection PyProtectedMember
-        client._capture_experimental_log(
-            scope,
-            {
-                "severity_text": otel_severity_text,
-                "severity_number": otel_severity_number,
-                "body": record.message,
-                "attributes": attrs,
-                "time_unix_nano": int(record.created * 1e9),
-                "trace_id": None,
-            },
-        )
diff --git a/src/sentry_sdk_alpha/integrations/loguru.py b/src/sentry_sdk_alpha/integrations/loguru.py
deleted file mode 100644
index 9f93efc36c1558..00000000000000
--- a/src/sentry_sdk_alpha/integrations/loguru.py
+++ /dev/null
@@ -1,123 +0,0 @@
-import enum
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations.logging import BreadcrumbHandler, EventHandler, _BaseHandler
-
-if TYPE_CHECKING:
-    from logging import LogRecord
-    from typing import Any, Optional, Tuple
-
-try:
-    import loguru
-    from loguru import logger
-    from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT
-except ImportError:
-    raise DidNotEnable("LOGURU is not installed")
-
-
-class LoggingLevels(enum.IntEnum):
-    TRACE = 5
-    DEBUG = 10
-    INFO = 20
-    SUCCESS = 25
-    WARNING = 30
-    ERROR = 40
-    CRITICAL = 50
-
-
-SENTRY_LEVEL_FROM_LOGURU_LEVEL = {
-    "TRACE": "DEBUG",
-    "DEBUG": "DEBUG",
-    "INFO": "INFO",
-    "SUCCESS": "INFO",
-    "WARNING": "WARNING",
-    "ERROR": "ERROR",
-    "CRITICAL": "CRITICAL",
-}
-
-DEFAULT_LEVEL = LoggingLevels.INFO.value
-DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value
-# We need to save the handlers to be able to remove them later
-# in tests (they call `LoguruIntegration.__init__` multiple times,
-# and we can't use `setup_once` because it's called before
-# than we get configuration).
-_ADDED_HANDLERS = (None, None)  # type: Tuple[Optional[int], Optional[int]]
-
-
-class LoguruIntegration(Integration):
-    identifier = "loguru"
-
-    def __init__(
-        self,
-        level=DEFAULT_LEVEL,
-        event_level=DEFAULT_EVENT_LEVEL,
-        breadcrumb_format=DEFAULT_FORMAT,
-        event_format=DEFAULT_FORMAT,
-    ):
-        # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None
-        global _ADDED_HANDLERS
-        breadcrumb_handler, event_handler = _ADDED_HANDLERS
-
-        if breadcrumb_handler is not None:
-            logger.remove(breadcrumb_handler)
-            breadcrumb_handler = None
-        if event_handler is not None:
-            logger.remove(event_handler)
-            event_handler = None
-
-        if level is not None:
-            breadcrumb_handler = logger.add(
-                LoguruBreadcrumbHandler(level=level),
-                level=level,
-                format=breadcrumb_format,
-            )
-
-        if event_level is not None:
-            event_handler = logger.add(
-                LoguruEventHandler(level=event_level),
-                level=event_level,
-                format=event_format,
-            )
-
-        _ADDED_HANDLERS = (breadcrumb_handler, event_handler)
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        pass  # we do everything in __init__
-
-
-class _LoguruBaseHandler(_BaseHandler):
-    def _logging_to_event_level(self, record):
-        # type: (LogRecord) -> str
-        try:
-            return SENTRY_LEVEL_FROM_LOGURU_LEVEL[LoggingLevels(record.levelno).name].lower()
-        except (ValueError, KeyError):
-            return record.levelname.lower() if record.levelname else ""
-
-
-class LoguruEventHandler(_LoguruBaseHandler, EventHandler):
-    """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names."""
-
-    def __init__(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        if kwargs.get("level"):
-            kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get(
-                kwargs.get("level", ""), DEFAULT_LEVEL
-            )
-
-        super().__init__(*args, **kwargs)
-
-
-class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler):
-    """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names."""
-
-    def __init__(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        if kwargs.get("level"):
-            kwargs["level"] = SENTRY_LEVEL_FROM_LOGURU_LEVEL.get(
-                kwargs.get("level", ""), DEFAULT_LEVEL
-            )
-
-        super().__init__(*args, **kwargs)
diff --git a/src/sentry_sdk_alpha/integrations/modules.py b/src/sentry_sdk_alpha/integrations/modules.py
deleted file mode 100644
index fbd5efbd7bfd83..00000000000000
--- a/src/sentry_sdk_alpha/integrations/modules.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import _get_installed_modules
-
-if TYPE_CHECKING:
-    from typing import Any
-
-    from sentry_sdk_alpha._types import Event
-
-
-class ModulesIntegration(Integration):
-    identifier = "modules"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        @add_global_event_processor
-        def processor(event, hint):
-            # type: (Event, Any) -> Event
-            if event.get("type") == "transaction":
-                return event
-
-            if sentry_sdk_alpha.get_client().get_integration(ModulesIntegration) is None:
-                return event
-
-            event["modules"] = _get_installed_modules()
-            return event
diff --git a/src/sentry_sdk_alpha/integrations/openai.py b/src/sentry_sdk_alpha/integrations/openai.py
deleted file mode 100644
index d51e5c2da8c2e7..00000000000000
--- a/src/sentry_sdk_alpha/integrations/openai.py
+++ /dev/null
@@ -1,409 +0,0 @@
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import consts
-from sentry_sdk_alpha.ai.monitoring import record_token_usage
-from sentry_sdk_alpha.ai.utils import set_data_normalized
-from sentry_sdk_alpha.consts import SPANDATA
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import AsyncIterator, Callable, Iterable, Iterator
-    from typing import Any, List, Optional
-
-    from sentry_sdk_alpha.tracing import Span
-
-try:
-    from openai.resources import AsyncEmbeddings, Embeddings
-    from openai.resources.chat.completions import AsyncCompletions, Completions
-
-    if TYPE_CHECKING:
-        from openai.types.chat import ChatCompletionChunk, ChatCompletionMessageParam
-except ImportError:
-    raise DidNotEnable("OpenAI not installed")
-
-
-class OpenAIIntegration(Integration):
-    identifier = "openai"
-    origin = f"auto.ai.{identifier}"
-
-    def __init__(self, include_prompts=True, tiktoken_encoding_name=None):
-        # type: (OpenAIIntegration, bool, Optional[str]) -> None
-        self.include_prompts = include_prompts
-
-        self.tiktoken_encoding = None
-        if tiktoken_encoding_name is not None:
-            import tiktoken  # type: ignore
-
-            self.tiktoken_encoding = tiktoken.get_encoding(tiktoken_encoding_name)
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        Completions.create = _wrap_chat_completion_create(Completions.create)
-        Embeddings.create = _wrap_embeddings_create(Embeddings.create)
-
-        AsyncCompletions.create = _wrap_async_chat_completion_create(AsyncCompletions.create)
-        AsyncEmbeddings.create = _wrap_async_embeddings_create(AsyncEmbeddings.create)
-
-    def count_tokens(self, s):
-        # type: (OpenAIIntegration, str) -> int
-        if self.tiktoken_encoding is not None:
-            return len(self.tiktoken_encoding.encode_ordinary(s))
-        return 0
-
-
-def _capture_exception(exc):
-    # type: (Any) -> None
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "openai", "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _calculate_chat_completion_usage(
-    messages, response, span, streaming_message_responses, count_tokens
-):
-    # type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]], Callable[..., Any]) -> None
-    completion_tokens = 0  # type: Optional[int]
-    prompt_tokens = 0  # type: Optional[int]
-    total_tokens = 0  # type: Optional[int]
-    if hasattr(response, "usage"):
-        if hasattr(response.usage, "completion_tokens") and isinstance(
-            response.usage.completion_tokens, int
-        ):
-            completion_tokens = response.usage.completion_tokens
-        if hasattr(response.usage, "prompt_tokens") and isinstance(
-            response.usage.prompt_tokens, int
-        ):
-            prompt_tokens = response.usage.prompt_tokens
-        if hasattr(response.usage, "total_tokens") and isinstance(response.usage.total_tokens, int):
-            total_tokens = response.usage.total_tokens
-
-    if prompt_tokens == 0:
-        for message in messages:
-            if "content" in message:
-                prompt_tokens += count_tokens(message["content"])
-
-    if completion_tokens == 0:
-        if streaming_message_responses is not None:
-            for message in streaming_message_responses:
-                completion_tokens += count_tokens(message)
-        elif hasattr(response, "choices"):
-            for choice in response.choices:
-                if hasattr(choice, "message"):
-                    completion_tokens += count_tokens(choice.message)
-
-    if prompt_tokens == 0:
-        prompt_tokens = None
-    if completion_tokens == 0:
-        completion_tokens = None
-    if total_tokens == 0:
-        total_tokens = None
-    record_token_usage(span, prompt_tokens, completion_tokens, total_tokens)
-
-
-def _new_chat_completion_common(f, *args, **kwargs):
-    # type: (Any, *Any, **Any) -> Any
-    integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-    if integration is None:
-        return f(*args, **kwargs)
-
-    if "messages" not in kwargs:
-        # invalid call (in all versions of openai), let it return error
-        return f(*args, **kwargs)
-
-    try:
-        iter(kwargs["messages"])
-    except TypeError:
-        # invalid call (in all versions), messages must be iterable
-        return f(*args, **kwargs)
-
-    kwargs["messages"] = list(kwargs["messages"])
-    messages = kwargs["messages"]
-    model = kwargs.get("model")
-    streaming = kwargs.get("stream")
-
-    span = sentry_sdk_alpha.start_span(
-        op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE,
-        name="Chat Completion",
-        origin=OpenAIIntegration.origin,
-        only_if_parent=True,
-    )
-    span.__enter__()
-
-    res = yield f, args, kwargs
-
-    with capture_internal_exceptions():
-        if should_send_default_pii() and integration.include_prompts:
-            set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, messages)
-
-        set_data_normalized(span, SPANDATA.AI_MODEL_ID, model)
-        set_data_normalized(span, SPANDATA.AI_STREAMING, streaming)
-
-        if hasattr(res, "choices"):
-            if should_send_default_pii() and integration.include_prompts:
-                set_data_normalized(
-                    span,
-                    SPANDATA.AI_RESPONSES,
-                    list(map(lambda x: x.message, res.choices)),
-                )
-            _calculate_chat_completion_usage(messages, res, span, None, integration.count_tokens)
-            span.__exit__(None, None, None)
-        elif hasattr(res, "_iterator"):
-            data_buf: list[list[str]] = []  # one for each choice
-
-            old_iterator = res._iterator
-
-            def new_iterator():
-                # type: () -> Iterator[ChatCompletionChunk]
-                with capture_internal_exceptions():
-                    for x in old_iterator:
-                        if hasattr(x, "choices"):
-                            choice_index = 0
-                            for choice in x.choices:
-                                if hasattr(choice, "delta") and hasattr(choice.delta, "content"):
-                                    content = choice.delta.content
-                                    if len(data_buf) <= choice_index:
-                                        data_buf.append([])
-                                    data_buf[choice_index].append(content or "")
-                                choice_index += 1
-                        yield x
-                    if len(data_buf) > 0:
-                        all_responses = list(map(lambda chunk: "".join(chunk), data_buf))
-                        if should_send_default_pii() and integration.include_prompts:
-                            set_data_normalized(span, SPANDATA.AI_RESPONSES, all_responses)
-                        _calculate_chat_completion_usage(
-                            messages,
-                            res,
-                            span,
-                            all_responses,
-                            integration.count_tokens,
-                        )
-                span.__exit__(None, None, None)
-
-            async def new_iterator_async():
-                # type: () -> AsyncIterator[ChatCompletionChunk]
-                with capture_internal_exceptions():
-                    async for x in old_iterator:
-                        if hasattr(x, "choices"):
-                            choice_index = 0
-                            for choice in x.choices:
-                                if hasattr(choice, "delta") and hasattr(choice.delta, "content"):
-                                    content = choice.delta.content
-                                    if len(data_buf) <= choice_index:
-                                        data_buf.append([])
-                                    data_buf[choice_index].append(content or "")
-                                choice_index += 1
-                        yield x
-                    if len(data_buf) > 0:
-                        all_responses = list(map(lambda chunk: "".join(chunk), data_buf))
-                        if should_send_default_pii() and integration.include_prompts:
-                            set_data_normalized(span, SPANDATA.AI_RESPONSES, all_responses)
-                        _calculate_chat_completion_usage(
-                            messages,
-                            res,
-                            span,
-                            all_responses,
-                            integration.count_tokens,
-                        )
-                span.__exit__(None, None, None)
-
-            if str(type(res._iterator)) == "":
-                res._iterator = new_iterator_async()
-            else:
-                res._iterator = new_iterator()
-
-        else:
-            set_data_normalized(span, "unknown_response", True)
-            span.__exit__(None, None, None)
-    return res
-
-
-def _wrap_chat_completion_create(f):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-    def _execute_sync(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _new_chat_completion_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return e.value
-
-        try:
-            try:
-                result = f(*args, **kwargs)
-            except Exception as e:
-                _capture_exception(e)
-                raise e from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    def _sentry_patched_create_sync(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-        if integration is None or "messages" not in kwargs:
-            # no "messages" means invalid call (in all versions of openai), let it return error
-            return f(*args, **kwargs)
-
-        return _execute_sync(f, *args, **kwargs)
-
-    return _sentry_patched_create_sync
-
-
-def _wrap_async_chat_completion_create(f):
-    # type: (Callable[..., Any]) -> Callable[..., Any]
-    async def _execute_async(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _new_chat_completion_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return await e.value
-
-        try:
-            try:
-                result = await f(*args, **kwargs)
-            except Exception as e:
-                _capture_exception(e)
-                raise e from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    async def _sentry_patched_create_async(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-        if integration is None or "messages" not in kwargs:
-            # no "messages" means invalid call (in all versions of openai), let it return error
-            return await f(*args, **kwargs)
-
-        return await _execute_async(f, *args, **kwargs)
-
-    return _sentry_patched_create_async
-
-
-def _new_embeddings_create_common(f, *args, **kwargs):
-    # type: (Any, *Any, **Any) -> Any
-    integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-    if integration is None:
-        return f(*args, **kwargs)
-
-    with sentry_sdk_alpha.start_span(
-        op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
-        description="OpenAI Embedding Creation",
-        origin=OpenAIIntegration.origin,
-        only_if_parent=True,
-    ) as span:
-        if "input" in kwargs and (should_send_default_pii() and integration.include_prompts):
-            if isinstance(kwargs["input"], str):
-                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, [kwargs["input"]])
-            elif (
-                isinstance(kwargs["input"], list)
-                and len(kwargs["input"]) > 0
-                and isinstance(kwargs["input"][0], str)
-            ):
-                set_data_normalized(span, SPANDATA.AI_INPUT_MESSAGES, kwargs["input"])
-        if "model" in kwargs:
-            set_data_normalized(span, SPANDATA.AI_MODEL_ID, kwargs["model"])
-
-        response = yield f, args, kwargs
-
-        prompt_tokens = 0
-        total_tokens = 0
-        if hasattr(response, "usage"):
-            if hasattr(response.usage, "prompt_tokens") and isinstance(
-                response.usage.prompt_tokens, int
-            ):
-                prompt_tokens = response.usage.prompt_tokens
-            if hasattr(response.usage, "total_tokens") and isinstance(
-                response.usage.total_tokens, int
-            ):
-                total_tokens = response.usage.total_tokens
-
-        if prompt_tokens == 0:
-            prompt_tokens = integration.count_tokens(kwargs["input"] or "")
-
-        record_token_usage(span, prompt_tokens, None, total_tokens or prompt_tokens)
-
-        return response
-
-
-def _wrap_embeddings_create(f):
-    # type: (Any) -> Any
-    def _execute_sync(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _new_embeddings_create_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return e.value
-
-        try:
-            try:
-                result = f(*args, **kwargs)
-            except Exception as e:
-                _capture_exception(e)
-                raise e from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    def _sentry_patched_create_sync(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-        if integration is None:
-            return f(*args, **kwargs)
-
-        return _execute_sync(f, *args, **kwargs)
-
-    return _sentry_patched_create_sync
-
-
-def _wrap_async_embeddings_create(f):
-    # type: (Any) -> Any
-    async def _execute_async(f, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        gen = _new_embeddings_create_common(f, *args, **kwargs)
-
-        try:
-            f, args, kwargs = next(gen)
-        except StopIteration as e:
-            return await e.value
-
-        try:
-            try:
-                result = await f(*args, **kwargs)
-            except Exception as e:
-                _capture_exception(e)
-                raise e from None
-
-            return gen.send(result)
-        except StopIteration as e:
-            return e.value
-
-    @wraps(f)
-    async def _sentry_patched_create_async(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(OpenAIIntegration)
-        if integration is None:
-            return await f(*args, **kwargs)
-
-        return await _execute_async(f, *args, **kwargs)
-
-    return _sentry_patched_create_async
diff --git a/src/sentry_sdk_alpha/integrations/openfeature.py b/src/sentry_sdk_alpha/integrations/openfeature.py
deleted file mode 100644
index 26716fea32c5cd..00000000000000
--- a/src/sentry_sdk_alpha/integrations/openfeature.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-
-try:
-    from openfeature import api
-    from openfeature.hook import Hook
-
-    if TYPE_CHECKING:
-        from openfeature.flag_evaluation import FlagEvaluationDetails
-        from openfeature.hook import HookContext, HookHints
-except ImportError:
-    raise DidNotEnable("OpenFeature is not installed")
-
-
-class OpenFeatureIntegration(Integration):
-    identifier = "openfeature"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        # Register the hook within the global openfeature hooks list.
-        api.add_hooks(hooks=[OpenFeatureHook()])
-
-
-class OpenFeatureHook(Hook):
-
-    def after(self, hook_context, details, hints):
-        # type: (HookContext, FlagEvaluationDetails[bool], HookHints) -> None
-        if isinstance(details.value, bool):
-            add_feature_flag(details.flag_key, details.value)
-
-    def error(self, hook_context, exception, hints):
-        # type: (HookContext, Exception, HookHints) -> None
-        if isinstance(hook_context.default_value, bool):
-            add_feature_flag(hook_context.flag_key, hook_context.default_value)
diff --git a/src/sentry_sdk_alpha/integrations/pure_eval.py b/src/sentry_sdk_alpha/integrations/pure_eval.py
deleted file mode 100644
index bb8c2269099a8b..00000000000000
--- a/src/sentry_sdk_alpha/integrations/pure_eval.py
+++ /dev/null
@@ -1,136 +0,0 @@
-import ast
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import serializer
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.utils import iter_stacks, walk_exception_chain
-
-if TYPE_CHECKING:
-    from types import FrameType
-    from typing import Any, Dict, List, Optional, Tuple
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-try:
-    import executing
-except ImportError:
-    raise DidNotEnable("executing is not installed")
-
-try:
-    import pure_eval
-except ImportError:
-    raise DidNotEnable("pure_eval is not installed")
-
-try:
-    # Used implicitly, just testing it's available
-    import asttokens  # noqa
-except ImportError:
-    raise DidNotEnable("asttokens is not installed")
-
-
-class PureEvalIntegration(Integration):
-    identifier = "pure_eval"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        @add_global_event_processor
-        def add_executing_info(event, hint):
-            # type: (Event, Optional[Hint]) -> Optional[Event]
-            if sentry_sdk_alpha.get_client().get_integration(PureEvalIntegration) is None:
-                return event
-
-            if hint is None:
-                return event
-
-            exc_info = hint.get("exc_info", None)
-
-            if exc_info is None:
-                return event
-
-            exception = event.get("exception", None)
-
-            if exception is None:
-                return event
-
-            values = exception.get("values", None)
-
-            if values is None:
-                return event
-
-            for exception, (_exc_type, _exc_value, exc_tb) in zip(
-                reversed(values), walk_exception_chain(exc_info)
-            ):
-                sentry_frames = [
-                    frame
-                    for frame in exception.get("stacktrace", {}).get("frames", [])
-                    if frame.get("function")
-                ]
-                tbs = list(iter_stacks(exc_tb))
-                if len(sentry_frames) != len(tbs):
-                    continue
-
-                for sentry_frame, tb in zip(sentry_frames, tbs):
-                    sentry_frame["vars"] = pure_eval_frame(tb.tb_frame) or sentry_frame["vars"]
-            return event
-
-
-def pure_eval_frame(frame):
-    # type: (FrameType) -> Dict[str, Any]
-    source = executing.Source.for_frame(frame)
-    if not source.tree:
-        return {}
-
-    statements = source.statements_at_line(frame.f_lineno)
-    if not statements:
-        return {}
-
-    scope = stmt = list(statements)[0]
-    while True:
-        # Get the parent first in case the original statement is already
-        # a function definition, e.g. if we're calling a decorator
-        # In that case we still want the surrounding scope, not that function
-        scope = scope.parent
-        if isinstance(scope, (ast.FunctionDef, ast.ClassDef, ast.Module)):
-            break
-
-    evaluator = pure_eval.Evaluator.from_frame(frame)
-    expressions = evaluator.interesting_expressions_grouped(scope)
-
-    def closeness(expression):
-        # type: (Tuple[List[Any], Any]) -> Tuple[int, int]
-        # Prioritise expressions with a node closer to the statement executed
-        # without being after that statement
-        # A higher return value is better - the expression will appear
-        # earlier in the list of values and is less likely to be trimmed
-        nodes, _value = expression
-
-        def start(n):
-            # type: (ast.expr) -> Tuple[int, int]
-            return (n.lineno, n.col_offset)
-
-        nodes_before_stmt = [
-            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
-        ]
-        if nodes_before_stmt:
-            # The position of the last node before or in the statement
-            return max(start(node) for node in nodes_before_stmt)
-        else:
-            # The position of the first node after the statement
-            # Negative means it's always lower priority than nodes that come before
-            # Less negative means closer to the statement and higher priority
-            lineno, col_offset = min(start(node) for node in nodes)
-            return (-lineno, -col_offset)
-
-    # This adds the first_token and last_token attributes to nodes
-    atok = source.asttokens()
-
-    expressions.sort(key=closeness, reverse=True)
-    vars = {
-        atok.get_text(nodes[0]): value
-        for nodes, value in expressions[: serializer.MAX_DATABAG_BREADTH]
-    }
-    return serializer.serialize(vars, is_vars=True)
diff --git a/src/sentry_sdk_alpha/integrations/pymongo.py b/src/sentry_sdk_alpha/integrations/pymongo.py
deleted file mode 100644
index 27d343cce97749..00000000000000
--- a/src/sentry_sdk_alpha/integrations/pymongo.py
+++ /dev/null
@@ -1,203 +0,0 @@
-import copy
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import _serialize_span_attribute, capture_internal_exceptions
-
-try:
-    from pymongo import monitoring
-except ImportError:
-    raise DidNotEnable("Pymongo not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Union
-
-    from pymongo.monitoring import CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent
-
-
-SAFE_COMMAND_ATTRIBUTES = [
-    "insert",
-    "ordered",
-    "find",
-    "limit",
-    "singleBatch",
-    "aggregate",
-    "createIndexes",
-    "indexes",
-    "delete",
-    "findAndModify",
-    "renameCollection",
-    "to",
-    "drop",
-]
-
-
-def _strip_pii(command):
-    # type: (Dict[str, Any]) -> Dict[str, Any]
-    for key in command:
-        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
-        if is_safe_field:
-            # Skip if safe key
-            continue
-
-        update_db_command = key == "update" and "findAndModify" not in command
-        if update_db_command:
-            # Also skip "update" db command because it is save.
-            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
-            continue
-
-        # Special stripping for documents
-        is_document = key == "documents"
-        if is_document:
-            for doc in command[key]:
-                for doc_key in doc:
-                    doc[doc_key] = "%s"
-            continue
-
-        # Special stripping for dict style fields
-        is_dict_field = key in ["filter", "query", "update"]
-        if is_dict_field:
-            for item_key in command[key]:
-                command[key][item_key] = "%s"
-            continue
-
-        # For pipeline fields strip the `$match` dict
-        is_pipeline_field = key == "pipeline"
-        if is_pipeline_field:
-            for pipeline in command[key]:
-                for match_key in pipeline["$match"] if "$match" in pipeline else []:
-                    pipeline["$match"][match_key] = "%s"
-            continue
-
-        # Default stripping
-        command[key] = "%s"
-
-    return command
-
-
-def _get_db_data(event):
-    # type: (Any) -> Dict[str, Any]
-    data = {}
-
-    data[SPANDATA.DB_SYSTEM] = "mongodb"
-
-    db_name = event.database_name
-    if db_name is not None:
-        data[SPANDATA.DB_NAME] = db_name
-
-    server_address = event.connection_id[0]
-    if server_address is not None:
-        data[SPANDATA.SERVER_ADDRESS] = server_address
-
-    server_port = event.connection_id[1]
-    if server_port is not None:
-        data[SPANDATA.SERVER_PORT] = server_port
-
-    return data
-
-
-class CommandTracer(monitoring.CommandListener):
-    def __init__(self):
-        # type: () -> None
-        self._ongoing_operations = {}  # type: Dict[int, Span]
-
-    def _operation_key(self, event):
-        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
-        return event.request_id
-
-    def started(self, event):
-        # type: (CommandStartedEvent) -> None
-        if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None:
-            return
-
-        with capture_internal_exceptions():
-            command = dict(copy.deepcopy(event.command))
-
-            command.pop("$db", None)
-            command.pop("$clusterTime", None)
-            command.pop("$signature", None)
-
-            data = {
-                SPANDATA.DB_NAME: event.database_name,
-                SPANDATA.DB_SYSTEM: "mongodb",
-                SPANDATA.DB_OPERATION: event.command_name,
-                SPANDATA.DB_MONGODB_COLLECTION: command.get(event.command_name),
-            }
-
-            try:
-                data["net.peer.name"] = event.connection_id[0]
-                data["net.peer.port"] = str(event.connection_id[1])
-            except TypeError:
-                pass
-
-            try:
-                lsid = command.pop("lsid")["id"]
-                data["session_id"] = str(lsid)
-            except KeyError:
-                pass
-
-            if not should_send_default_pii():
-                command = _strip_pii(command)
-
-            query = _serialize_span_attribute(command)
-            span = sentry_sdk_alpha.start_span(
-                op=OP.DB,
-                name=query,
-                origin=PyMongoIntegration.origin,
-                only_if_parent=True,
-            )
-
-            with capture_internal_exceptions():
-                sentry_sdk_alpha.add_breadcrumb(
-                    message=query, category="query", type=OP.DB, data=data
-                )
-
-            for key, value in data.items():
-                span.set_attribute(key, value)
-
-            for key, value in _get_db_data(event).items():
-                span.set_attribute(key, value)
-
-            span.set_attribute("operation_id", event.operation_id)
-            span.set_attribute("request_id", event.request_id)
-
-            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
-
-    def failed(self, event):
-        # type: (CommandFailedEvent) -> None
-        if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None:
-            return
-
-        try:
-            span = self._ongoing_operations.pop(self._operation_key(event))
-            span.set_status(SPANSTATUS.INTERNAL_ERROR)
-            span.__exit__(None, None, None)
-        except KeyError:
-            return
-
-    def succeeded(self, event):
-        # type: (CommandSucceededEvent) -> None
-        if sentry_sdk_alpha.get_client().get_integration(PyMongoIntegration) is None:
-            return
-
-        try:
-            span = self._ongoing_operations.pop(self._operation_key(event))
-            span.set_status(SPANSTATUS.OK)
-            span.__exit__(None, None, None)
-        except KeyError:
-            pass
-
-
-class PyMongoIntegration(Integration):
-    identifier = "pymongo"
-    origin = f"auto.db.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        monitoring.register(CommandTracer())
diff --git a/src/sentry_sdk_alpha/integrations/pyramid.py b/src/sentry_sdk_alpha/integrations/pyramid.py
deleted file mode 100644
index 38da5cf723fa45..00000000000000
--- a/src/sentry_sdk_alpha/integrations/pyramid.py
+++ /dev/null
@@ -1,226 +0,0 @@
-import functools
-import os
-import sys
-import weakref
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    reraise,
-)
-
-try:
-    from pyramid.httpexceptions import HTTPException
-    from pyramid.request import Request
-except ImportError:
-    raise DidNotEnable("Pyramid not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, Optional
-
-    from pyramid.response import Response
-    from webob.cookies import RequestCookies
-    from webob.request import _FieldStorageWithFile
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.integrations.wsgi import _ScopedResponse
-    from sentry_sdk_alpha.utils import ExcInfo
-
-
-if getattr(Request, "authenticated_userid", None):
-
-    def authenticated_userid(request):
-        # type: (Request) -> Optional[Any]
-        return request.authenticated_userid
-
-else:
-    # bw-compat for pyramid < 1.5
-    from pyramid.security import authenticated_userid  # type: ignore
-
-
-TRANSACTION_STYLE_VALUES = ("route_name", "route_pattern")
-
-
-class PyramidIntegration(Integration):
-    identifier = "pyramid"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(self, transaction_style="route_name"):
-        # type: (str) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        from pyramid import router
-
-        old_call_view = router._call_view
-
-        @functools.wraps(old_call_view)
-        def sentry_patched_call_view(registry, request, *args, **kwargs):
-            # type: (Any, Request, *Any, **Any) -> Response
-            integration = sentry_sdk_alpha.get_client().get_integration(PyramidIntegration)
-            if integration is None:
-                return old_call_view(registry, request, *args, **kwargs)
-
-            _set_transaction_name_and_source(
-                sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request
-            )
-            scope = sentry_sdk_alpha.get_isolation_scope()
-            scope.add_event_processor(_make_event_processor(weakref.ref(request), integration))
-
-            return old_call_view(registry, request, *args, **kwargs)
-
-        router._call_view = sentry_patched_call_view
-
-        if hasattr(Request, "invoke_exception_view"):
-            old_invoke_exception_view = Request.invoke_exception_view
-
-            def sentry_patched_invoke_exception_view(self, *args, **kwargs):
-                # type: (Request, *Any, **Any) -> Any
-                rv = old_invoke_exception_view(self, *args, **kwargs)
-
-                if (
-                    self.exc_info
-                    and all(self.exc_info)
-                    and rv.status_int == 500
-                    and sentry_sdk_alpha.get_client().get_integration(PyramidIntegration)
-                    is not None
-                ):
-                    _capture_exception(self.exc_info)
-
-                return rv
-
-            Request.invoke_exception_view = sentry_patched_invoke_exception_view
-
-        old_wsgi_call = router.Router.__call__
-
-        @ensure_integration_enabled(PyramidIntegration, old_wsgi_call)
-        def sentry_patched_wsgi_call(self, environ, start_response):
-            # type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-            def sentry_patched_inner_wsgi_call(environ, start_response):
-                # type: (Dict[str, Any], Callable[..., Any]) -> Any
-                try:
-                    return old_wsgi_call(self, environ, start_response)
-                except Exception:
-                    einfo = sys.exc_info()
-                    _capture_exception(einfo)
-                    reraise(*einfo)
-
-            middleware = SentryWsgiMiddleware(
-                sentry_patched_inner_wsgi_call,
-                span_origin=PyramidIntegration.origin,
-            )
-            return middleware(environ, start_response)
-
-        router.Router.__call__ = sentry_patched_wsgi_call
-
-
-@ensure_integration_enabled(PyramidIntegration)
-def _capture_exception(exc_info):
-    # type: (ExcInfo) -> None
-    if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
-        return
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "pyramid", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, Request) -> None
-    try:
-        name_for_style = {
-            "route_name": request.matched_route.name,
-            "route_pattern": request.matched_route.pattern,
-        }
-        scope.set_transaction_name(
-            name_for_style[transaction_style],
-            source=SOURCE_FOR_STYLE[transaction_style],
-        )
-    except Exception:
-        pass
-
-
-class PyramidRequestExtractor(RequestExtractor):
-    def url(self):
-        # type: () -> str
-        return self.request.path_url
-
-    def env(self):
-        # type: () -> Dict[str, str]
-        return self.request.environ
-
-    def cookies(self):
-        # type: () -> RequestCookies
-        return self.request.cookies
-
-    def raw_data(self):
-        # type: () -> str
-        return self.request.text
-
-    def form(self):
-        # type: () -> Dict[str, str]
-        return {
-            key: value
-            for key, value in self.request.POST.items()
-            if not getattr(value, "filename", None)
-        }
-
-    def files(self):
-        # type: () -> Dict[str, _FieldStorageWithFile]
-        return {
-            key: value
-            for key, value in self.request.POST.items()
-            if getattr(value, "filename", None)
-        }
-
-    def size_of_file(self, postdata):
-        # type: (_FieldStorageWithFile) -> int
-        file = postdata.file
-        try:
-            return os.fstat(file.fileno()).st_size
-        except Exception:
-            return 0
-
-
-def _make_event_processor(weak_request, integration):
-    # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
-    def pyramid_event_processor(event, hint):
-        # type: (Event, Dict[str, Any]) -> Event
-        request = weak_request()
-        if request is None:
-            return event
-
-        with capture_internal_exceptions():
-            PyramidRequestExtractor(request).extract_into_event(event)
-
-        if should_send_default_pii():
-            with capture_internal_exceptions():
-                user_info = event.setdefault("user", {})
-                user_info.setdefault("id", authenticated_userid(request))
-
-        return event
-
-    return pyramid_event_processor
diff --git a/src/sentry_sdk_alpha/integrations/quart.py b/src/sentry_sdk_alpha/integrations/quart.py
deleted file mode 100644
index b42eb3d0e97af1..00000000000000
--- a/src/sentry_sdk_alpha/integrations/quart.py
+++ /dev/null
@@ -1,234 +0,0 @@
-import asyncio
-import inspect
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SOURCE_FOR_STYLE
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations._wsgi_common import _filter_headers
-from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-)
-
-if TYPE_CHECKING:
-    from typing import Any, Union
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-try:
-    import quart_auth  # type: ignore
-except ImportError:
-    quart_auth = None
-
-try:
-    from quart import (  # type: ignore
-        Quart,
-        Request,
-        has_request_context,
-        has_websocket_context,
-        request,
-        websocket,
-    )
-    from quart.signals import (  # type: ignore
-        got_background_exception,
-        got_request_exception,
-        got_websocket_exception,
-        request_started,
-        websocket_started,
-    )
-except ImportError:
-    raise DidNotEnable("Quart is not installed")
-else:
-    # Quart 0.19 is based on Flask and hence no longer has a Scaffold
-    try:
-        from quart.scaffold import Scaffold  # type: ignore
-    except ImportError:
-        from flask.sansio.scaffold import Scaffold  # type: ignore
-
-TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-
-
-class QuartIntegration(Integration):
-    identifier = "quart"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(self, transaction_style="endpoint"):
-        # type: (str) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-
-        request_started.connect(_request_websocket_started)
-        websocket_started.connect(_request_websocket_started)
-        got_background_exception.connect(_capture_exception)
-        got_request_exception.connect(_capture_exception)
-        got_websocket_exception.connect(_capture_exception)
-
-        patch_asgi_app()
-        patch_scaffold_route()
-
-
-def patch_asgi_app():
-    # type: () -> None
-    old_app = Quart.__call__
-
-    async def sentry_patched_asgi_app(self, scope, receive, send):
-        # type: (Any, Any, Any, Any) -> Any
-        if sentry_sdk_alpha.get_client().get_integration(QuartIntegration) is None:
-            return await old_app(self, scope, receive, send)
-
-        middleware = SentryAsgiMiddleware(
-            lambda *a, **kw: old_app(self, *a, **kw),
-            span_origin=QuartIntegration.origin,
-        )
-        middleware.__call__ = middleware._run_asgi3
-        return await middleware(scope, receive, send)
-
-    Quart.__call__ = sentry_patched_asgi_app
-
-
-def patch_scaffold_route():
-    # type: () -> None
-    old_route = Scaffold.route
-
-    def _sentry_route(*args, **kwargs):
-        # type: (*Any, **Any) -> Any
-        old_decorator = old_route(*args, **kwargs)
-
-        def decorator(old_func):
-            # type: (Any) -> Any
-
-            if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction(old_func):
-
-                @wraps(old_func)
-                @ensure_integration_enabled(QuartIntegration, old_func)
-                def _sentry_func(*args, **kwargs):
-                    # type: (*Any, **Any) -> Any
-                    current_scope = sentry_sdk_alpha.get_current_scope()
-                    if current_scope.root_span is not None:
-                        current_scope.root_span.update_active_thread()
-
-                    sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-                    if sentry_scope.profile is not None:
-                        sentry_scope.profile.update_active_thread_id()
-
-                    return old_func(*args, **kwargs)
-
-                return old_decorator(_sentry_func)
-
-            return old_decorator(old_func)
-
-        return decorator
-
-    Scaffold.route = _sentry_route
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, Request) -> None
-
-    try:
-        name_for_style = {
-            "url": request.url_rule.rule,
-            "endpoint": request.url_rule.endpoint,
-        }
-        scope.set_transaction_name(
-            name_for_style[transaction_style],
-            source=SOURCE_FOR_STYLE[transaction_style],
-        )
-    except Exception:
-        pass
-
-
-async def _request_websocket_started(app, **kwargs):
-    # type: (Quart, **Any) -> None
-    integration = sentry_sdk_alpha.get_client().get_integration(QuartIntegration)
-    if integration is None:
-        return
-
-    if has_request_context():
-        request_websocket = request._get_current_object()
-    if has_websocket_context():
-        request_websocket = websocket._get_current_object()
-
-    # Set the transaction name here, but rely on ASGI middleware
-    # to actually start the transaction
-    _set_transaction_name_and_source(
-        sentry_sdk_alpha.get_current_scope(), integration.transaction_style, request_websocket
-    )
-
-    scope = sentry_sdk_alpha.get_isolation_scope()
-    evt_processor = _make_request_event_processor(app, request_websocket, integration)
-    scope.add_event_processor(evt_processor)
-
-
-def _make_request_event_processor(app, request, integration):
-    # type: (Quart, Request, QuartIntegration) -> EventProcessor
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        # if the request is gone we are fine not logging the data from
-        # it.  This might happen if the processor is pushed away to
-        # another thread.
-        if request is None:
-            return event
-
-        with capture_internal_exceptions():
-            # TODO: Figure out what to do with request body. Methods on request
-            # are async, but event processors are not.
-
-            request_info = event.setdefault("request", {})
-            request_info["url"] = request.url
-            request_info["query_string"] = request.query_string
-            request_info["method"] = request.method
-            request_info["headers"] = _filter_headers(dict(request.headers))
-
-            if should_send_default_pii():
-                request_info["env"] = {"REMOTE_ADDR": request.access_route[0]}
-                _add_user_to_event(event)
-
-        return event
-
-    return inner
-
-
-async def _capture_exception(sender, exception, **kwargs):
-    # type: (Quart, Union[ValueError, BaseException], **Any) -> None
-    integration = sentry_sdk_alpha.get_client().get_integration(QuartIntegration)
-    if integration is None:
-        return
-
-    event, hint = event_from_exception(
-        exception,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "quart", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _add_user_to_event(event):
-    # type: (Event) -> None
-    if quart_auth is None:
-        return
-
-    user = quart_auth.current_user
-    if user is None:
-        return
-
-    with capture_internal_exceptions():
-        user_info = event.setdefault("user", {})
-
-        user_info["id"] = quart_auth.current_user._auth_id
diff --git a/src/sentry_sdk_alpha/integrations/ray.py b/src/sentry_sdk_alpha/integrations/ray.py
deleted file mode 100644
index c6400071544f9d..00000000000000
--- a/src/sentry_sdk_alpha/integrations/ray.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import inspect
-import sys
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    event_from_exception,
-    logger,
-    package_version,
-    qualname_from_function,
-    reraise,
-)
-
-try:
-    import ray  # type: ignore[import-not-found]
-except ImportError:
-    raise DidNotEnable("Ray not installed.")
-import functools
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional
-
-    from sentry_sdk_alpha.utils import ExcInfo
-
-DEFAULT_TRANSACTION_NAME = "unknown Ray function"
-
-
-def _check_sentry_initialized():
-    # type: () -> None
-    if sentry_sdk_alpha.get_client().is_active():
-        return
-
-    logger.debug(
-        "[Tracing] Sentry not initialized in ray cluster worker, performance data will be discarded."
-    )
-
-
-def _patch_ray_remote():
-    # type: () -> None
-    old_remote = ray.remote
-
-    @functools.wraps(old_remote)
-    def new_remote(f, *args, **kwargs):
-        # type: (Callable[..., Any], *Any, **Any) -> Callable[..., Any]
-        if inspect.isclass(f):
-            # Ray Actors
-            # (https://docs.ray.io/en/latest/ray-core/actors.html)
-            # are not supported
-            # (Only Ray Tasks are supported)
-            return old_remote(f, *args, *kwargs)
-
-        def _f(*f_args, _tracing=None, **f_kwargs):
-            # type: (Any, Optional[dict[str, Any]],  Any) -> Any
-            """
-            Ray Worker
-            """
-            _check_sentry_initialized()
-
-            root_span_name = qualname_from_function(f) or DEFAULT_TRANSACTION_NAME
-            sentry_sdk_alpha.get_current_scope().set_transaction_name(
-                root_span_name,
-                source=TransactionSource.TASK,
-            )
-            with sentry_sdk_alpha.continue_trace(_tracing or {}):
-                with sentry_sdk_alpha.start_span(
-                    op=OP.QUEUE_TASK_RAY,
-                    name=root_span_name,
-                    origin=RayIntegration.origin,
-                    source=TransactionSource.TASK,
-                ) as root_span:
-                    try:
-                        result = f(*f_args, **f_kwargs)
-                        root_span.set_status(SPANSTATUS.OK)
-                    except Exception:
-                        root_span.set_status(SPANSTATUS.INTERNAL_ERROR)
-                        exc_info = sys.exc_info()
-                        _capture_exception(exc_info)
-                        reraise(*exc_info)
-
-                    return result
-
-        rv = old_remote(_f, *args, *kwargs)
-        old_remote_method = rv.remote
-
-        def _remote_method_with_header_propagation(*args, **kwargs):
-            # type: (*Any, **Any) -> Any
-            """
-            Ray Client
-            """
-            with sentry_sdk_alpha.start_span(
-                op=OP.QUEUE_SUBMIT_RAY,
-                name=qualname_from_function(f),
-                origin=RayIntegration.origin,
-                only_if_parent=True,
-            ) as span:
-                tracing = {
-                    k: v
-                    for k, v in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers()
-                }
-                try:
-                    result = old_remote_method(*args, **kwargs, _tracing=tracing)
-                    span.set_status(SPANSTATUS.OK)
-                except Exception:
-                    span.set_status(SPANSTATUS.INTERNAL_ERROR)
-                    exc_info = sys.exc_info()
-                    _capture_exception(exc_info)
-                    reraise(*exc_info)
-
-                return result
-
-        rv.remote = _remote_method_with_header_propagation
-
-        return rv
-
-    ray.remote = new_remote
-
-
-def _capture_exception(exc_info, **kwargs):
-    # type: (ExcInfo, **Any) -> None
-    client = sentry_sdk_alpha.get_client()
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=client.options,
-        mechanism={
-            "handled": False,
-            "type": RayIntegration.identifier,
-        },
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-class RayIntegration(Integration):
-    identifier = "ray"
-    origin = f"auto.queue.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = package_version("ray")
-        _check_minimum_version(RayIntegration, version)
-
-        _patch_ray_remote()
diff --git a/src/sentry_sdk_alpha/integrations/redis/__init__.py b/src/sentry_sdk_alpha/integrations/redis/__init__.py
deleted file mode 100644
index f69b8e926404e8..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/__init__.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations.redis.consts import _DEFAULT_MAX_DATA_SIZE
-from sentry_sdk_alpha.integrations.redis.rb import _patch_rb
-from sentry_sdk_alpha.integrations.redis.redis import _patch_redis
-from sentry_sdk_alpha.integrations.redis.redis_cluster import _patch_redis_cluster
-from sentry_sdk_alpha.integrations.redis.redis_py_cluster_legacy import _patch_rediscluster
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-
-class RedisIntegration(Integration):
-    identifier = "redis"
-
-    def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE, cache_prefixes=None):
-        # type: (int, Optional[list[str]]) -> None
-        self.max_data_size = max_data_size
-        self.cache_prefixes = cache_prefixes if cache_prefixes is not None else []
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        try:
-            from redis import StrictRedis, client
-        except ImportError:
-            raise DidNotEnable("Redis client not installed")
-
-        _patch_redis(StrictRedis, client)
-        _patch_redis_cluster()
-        _patch_rb()
-
-        try:
-            _patch_rediscluster()
-        except Exception:
-            logger.exception("Error occurred while patching `rediscluster` library")
diff --git a/src/sentry_sdk_alpha/integrations/redis/_async_common.py b/src/sentry_sdk_alpha/integrations/redis/_async_common.py
deleted file mode 100644
index 8b6f9bb462a675..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/_async_common.py
+++ /dev/null
@@ -1,115 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN
-from sentry_sdk_alpha.integrations.redis.modules.caches import (
-    _compile_cache_span_properties,
-    _get_cache_data,
-)
-from sentry_sdk_alpha.integrations.redis.modules.queries import _compile_db_span_properties
-from sentry_sdk_alpha.integrations.redis.utils import (
-    _create_breadcrumb,
-    _get_client_data,
-    _get_pipeline_data,
-    _update_span,
-)
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Union
-
-    from redis.asyncio.client import Pipeline, StrictRedis
-    from redis.asyncio.cluster import ClusterPipeline, RedisCluster
-
-
-def patch_redis_async_pipeline(pipeline_cls, is_cluster, get_command_args_fn, get_db_data_fn):
-    # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Any], dict[str, Any]]) -> None
-    old_execute = pipeline_cls.execute
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-    async def _sentry_execute(self, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        if sentry_sdk_alpha.get_client().get_integration(RedisIntegration) is None:
-            return await old_execute(self, *args, **kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.DB_REDIS,
-            name="redis.pipeline.execute",
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            with capture_internal_exceptions():
-                span_data = get_db_data_fn(self)
-                pipeline_data = _get_pipeline_data(
-                    is_cluster=is_cluster,
-                    get_command_args_fn=get_command_args_fn,
-                    is_transaction=False if is_cluster else self.is_transaction,
-                    command_stack=(self._command_stack if is_cluster else self.command_stack),
-                )
-                _update_span(span, span_data, pipeline_data)
-                _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data)
-
-            return await old_execute(self, *args, **kwargs)
-
-    pipeline_cls.execute = _sentry_execute  # type: ignore
-
-
-def patch_redis_async_client(cls, is_cluster, get_db_data_fn):
-    # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Any], dict[str, Any]]) -> None
-    old_execute_command = cls.execute_command
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-    async def _sentry_execute_command(self, name, *args, **kwargs):
-        # type: (Any, str, *Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(RedisIntegration)
-        if integration is None:
-            return await old_execute_command(self, name, *args, **kwargs)
-
-        cache_properties = _compile_cache_span_properties(
-            name,
-            args,
-            kwargs,
-            integration,
-        )
-
-        cache_span = None
-        if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
-            cache_span = sentry_sdk_alpha.start_span(
-                op=cache_properties["op"],
-                name=cache_properties["description"],
-                origin=SPAN_ORIGIN,
-                only_if_parent=True,
-            )
-            cache_span.__enter__()
-
-        db_properties = _compile_db_span_properties(integration, name, args)
-
-        db_span = sentry_sdk_alpha.start_span(
-            op=db_properties["op"],
-            name=db_properties["description"],
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        )
-        db_span.__enter__()
-
-        db_span_data = get_db_data_fn(self)
-        db_client_span_data = _get_client_data(is_cluster, name, *args)
-        _update_span(db_span, db_span_data, db_client_span_data)
-        _create_breadcrumb(db_properties["description"], db_span_data, db_client_span_data)
-
-        value = await old_execute_command(self, name, *args, **kwargs)
-
-        db_span.__exit__(None, None, None)
-
-        if cache_span:
-            cache_span_data = _get_cache_data(self, cache_properties, value)
-            _update_span(cache_span, cache_span_data)
-            cache_span.__exit__(None, None, None)
-
-        return value
-
-    cls.execute_command = _sentry_execute_command  # type: ignore
diff --git a/src/sentry_sdk_alpha/integrations/redis/_sync_common.py b/src/sentry_sdk_alpha/integrations/redis/_sync_common.py
deleted file mode 100644
index d9ea72b9aff661..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/_sync_common.py
+++ /dev/null
@@ -1,121 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations.redis.consts import SPAN_ORIGIN
-from sentry_sdk_alpha.integrations.redis.modules.caches import (
-    _compile_cache_span_properties,
-    _get_cache_data,
-)
-from sentry_sdk_alpha.integrations.redis.modules.queries import _compile_db_span_properties
-from sentry_sdk_alpha.integrations.redis.utils import (
-    _create_breadcrumb,
-    _get_client_data,
-    _get_pipeline_data,
-    _update_span,
-)
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any
-
-
-def patch_redis_pipeline(
-    pipeline_cls,
-    is_cluster,
-    get_command_args_fn,
-    get_db_data_fn,
-):
-    # type: (Any, bool, Any, Callable[[Any], dict[str, Any]]) -> None
-    old_execute = pipeline_cls.execute
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-    def sentry_patched_execute(self, *args, **kwargs):
-        # type: (Any, *Any, **Any) -> Any
-        if sentry_sdk_alpha.get_client().get_integration(RedisIntegration) is None:
-            return old_execute(self, *args, **kwargs)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.DB_REDIS,
-            name="redis.pipeline.execute",
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        ) as span:
-            with capture_internal_exceptions():
-                span_data = get_db_data_fn(self)
-                pipeline_data = _get_pipeline_data(
-                    is_cluster=is_cluster,
-                    get_command_args_fn=get_command_args_fn,
-                    is_transaction=False if is_cluster else self.transaction,
-                    command_stack=self.command_stack,
-                )
-                _update_span(span, span_data, pipeline_data)
-                _create_breadcrumb("redis.pipeline.execute", span_data, pipeline_data)
-
-            return old_execute(self, *args, **kwargs)
-
-    pipeline_cls.execute = sentry_patched_execute
-
-
-def patch_redis_client(cls, is_cluster, get_db_data_fn):
-    # type: (Any, bool, Callable[[Any], dict[str, Any]]) -> None
-    """
-    This function can be used to instrument custom redis client classes or
-    subclasses.
-    """
-    old_execute_command = cls.execute_command
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-    def sentry_patched_execute_command(self, name, *args, **kwargs):
-        # type: (Any, str, *Any, **Any) -> Any
-        integration = sentry_sdk_alpha.get_client().get_integration(RedisIntegration)
-        if integration is None:
-            return old_execute_command(self, name, *args, **kwargs)
-
-        cache_properties = _compile_cache_span_properties(
-            name,
-            args,
-            kwargs,
-            integration,
-        )
-
-        cache_span = None
-        if cache_properties["is_cache_key"] and cache_properties["op"] is not None:
-            cache_span = sentry_sdk_alpha.start_span(
-                op=cache_properties["op"],
-                name=cache_properties["description"],
-                origin=SPAN_ORIGIN,
-                only_if_parent=True,
-            )
-            cache_span.__enter__()
-
-        db_properties = _compile_db_span_properties(integration, name, args)
-
-        db_span = sentry_sdk_alpha.start_span(
-            op=db_properties["op"],
-            name=db_properties["description"],
-            origin=SPAN_ORIGIN,
-            only_if_parent=True,
-        )
-        db_span.__enter__()
-
-        db_span_data = get_db_data_fn(self)
-        db_client_span_data = _get_client_data(is_cluster, name, *args)
-        _update_span(db_span, db_span_data, db_client_span_data)
-        _create_breadcrumb(db_properties["description"], db_span_data, db_client_span_data)
-
-        value = old_execute_command(self, name, *args, **kwargs)
-
-        db_span.__exit__(None, None, None)
-
-        if cache_span:
-            cache_span_data = _get_cache_data(self, cache_properties, value)
-            _update_span(cache_span, cache_span_data)
-            cache_span.__exit__(None, None, None)
-
-        return value
-
-    cls.execute_command = sentry_patched_execute_command
diff --git a/src/sentry_sdk_alpha/integrations/redis/consts.py b/src/sentry_sdk_alpha/integrations/redis/consts.py
deleted file mode 100644
index 737e8297352158..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/consts.py
+++ /dev/null
@@ -1,19 +0,0 @@
-SPAN_ORIGIN = "auto.db.redis"
-
-_SINGLE_KEY_COMMANDS = frozenset(
-    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"],
-)
-_MULTI_KEY_COMMANDS = frozenset(
-    [
-        "del",
-        "touch",
-        "unlink",
-        "mget",
-    ],
-)
-_COMMANDS_INCLUDING_SENSITIVE_DATA = [
-    "auth",
-]
-_MAX_NUM_ARGS = 10  # Trim argument lists to this many values
-_MAX_NUM_COMMANDS = 10  # Trim command lists to this many values
-_DEFAULT_MAX_DATA_SIZE = 1024
diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/__init__.py b/src/sentry_sdk_alpha/integrations/redis/modules/__init__.py
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/caches.py b/src/sentry_sdk_alpha/integrations/redis/modules/caches.py
deleted file mode 100644
index 89607c14c2f8c3..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/modules/caches.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""
-Code used for the Caches module in Sentry
-"""
-
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations.redis.utils import _get_safe_key, _key_as_string
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-GET_COMMANDS = ("get", "mget")
-SET_COMMANDS = ("set", "setex")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Optional
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-
-def _get_op(name):
-    # type: (str) -> Optional[str]
-    op = None
-    if name.lower() in GET_COMMANDS:
-        op = OP.CACHE_GET
-    elif name.lower() in SET_COMMANDS:
-        op = OP.CACHE_PUT
-
-    return op
-
-
-def _compile_cache_span_properties(redis_command, args, kwargs, integration):
-    # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> dict[str, Any]
-    key = _get_safe_key(redis_command, args, kwargs)
-    key_as_string = _key_as_string(key)
-    keys_as_string = key_as_string.split(", ")
-
-    is_cache_key = False
-    for prefix in integration.cache_prefixes:
-        for kee in keys_as_string:
-            if kee.startswith(prefix):
-                is_cache_key = True
-                break
-        if is_cache_key:
-            break
-
-    value = None
-    if redis_command.lower() in SET_COMMANDS:
-        value = args[-1]
-
-    properties = {
-        "op": _get_op(redis_command),
-        "description": _get_cache_span_description(redis_command, args, kwargs, integration),
-        "key": key,
-        "key_as_string": key_as_string,
-        "redis_command": redis_command.lower(),
-        "is_cache_key": is_cache_key,
-        "value": value,
-    }
-
-    return properties
-
-
-def _get_cache_span_description(redis_command, args, kwargs, integration):
-    # type: (str, tuple[Any, ...], dict[str, Any], RedisIntegration) -> str
-    description = _key_as_string(_get_safe_key(redis_command, args, kwargs))
-
-    data_should_be_truncated = (
-        integration.max_data_size and len(description) > integration.max_data_size
-    )
-    if data_should_be_truncated:
-        description = description[: integration.max_data_size - len("...")] + "..."
-
-    return description
-
-
-def _get_cache_data(redis_client, properties, return_value):
-    # type: (Any, dict[str, Any], Optional[Any]) -> dict[str, Any]
-    data = {}
-
-    with capture_internal_exceptions():
-        data[SPANDATA.CACHE_KEY] = properties["key"]
-
-        if properties["redis_command"] in GET_COMMANDS:
-            if return_value is not None:
-                data[SPANDATA.CACHE_HIT] = True
-                size = (
-                    len(str(return_value).encode("utf-8"))
-                    if not isinstance(return_value, bytes)
-                    else len(return_value)
-                )
-                data[SPANDATA.CACHE_ITEM_SIZE] = size
-            else:
-                data[SPANDATA.CACHE_HIT] = False
-
-        elif properties["redis_command"] in SET_COMMANDS:
-            if properties["value"] is not None:
-                size = (
-                    len(properties["value"].encode("utf-8"))
-                    if not isinstance(properties["value"], bytes)
-                    else len(properties["value"])
-                )
-                data[SPANDATA.CACHE_ITEM_SIZE] = size
-
-        try:
-            connection_params = redis_client.connection_pool.connection_kwargs
-        except AttributeError:
-            # If it is a cluster, there is no connection_pool attribute so we
-            # need to get the default node from the cluster instance
-            default_node = redis_client.get_default_node()
-            connection_params = {
-                "host": default_node.host,
-                "port": default_node.port,
-            }
-
-        host = connection_params.get("host")
-        if host is not None:
-            data[SPANDATA.NETWORK_PEER_ADDRESS] = host
-
-        port = connection_params.get("port")
-        if port is not None:
-            data[SPANDATA.NETWORK_PEER_PORT] = port
-
-    return data
diff --git a/src/sentry_sdk_alpha/integrations/redis/modules/queries.py b/src/sentry_sdk_alpha/integrations/redis/modules/queries.py
deleted file mode 100644
index 91bb11f9ce15c8..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/modules/queries.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""
-Code used for the Queries module in Sentry
-"""
-
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations.redis.utils import _get_safe_command
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-if TYPE_CHECKING:
-    from typing import Any
-
-    from redis import Redis
-
-    from sentry_sdk_alpha.integrations.redis import RedisIntegration
-
-
-def _compile_db_span_properties(integration, redis_command, args):
-    # type: (RedisIntegration, str, tuple[Any, ...]) -> dict[str, Any]
-    description = _get_db_span_description(integration, redis_command, args)
-
-    properties = {
-        "op": OP.DB_REDIS,
-        "description": description,
-    }
-
-    return properties
-
-
-def _get_db_span_description(integration, command_name, args):
-    # type: (RedisIntegration, str, tuple[Any, ...]) -> str
-    description = command_name
-
-    with capture_internal_exceptions():
-        description = _get_safe_command(command_name, args)
-
-    data_should_be_truncated = (
-        integration.max_data_size and len(description) > integration.max_data_size
-    )
-    if data_should_be_truncated:
-        description = description[: integration.max_data_size - len("...")] + "..."
-
-    return description
-
-
-def _get_connection_data(connection_params):
-    # type: (dict[str, Any]) -> dict[str, Any]
-    data = {
-        SPANDATA.DB_SYSTEM: "redis",
-    }
-
-    db = connection_params.get("db")
-    if db is not None:
-        data[SPANDATA.DB_NAME] = str(db)
-
-    host = connection_params.get("host")
-    if host is not None:
-        data[SPANDATA.SERVER_ADDRESS] = host
-
-    port = connection_params.get("port")
-    if port is not None:
-        data[SPANDATA.SERVER_PORT] = port
-
-    return data
-
-
-def _get_db_data(redis_instance):
-    # type: (Redis[Any]) -> dict[str, Any]
-    try:
-        return _get_connection_data(redis_instance.connection_pool.connection_kwargs)
-    except AttributeError:
-        return {}  # connections_kwargs may be missing in some cases
diff --git a/src/sentry_sdk_alpha/integrations/redis/rb.py b/src/sentry_sdk_alpha/integrations/redis/rb.py
deleted file mode 100644
index 9a8e9af2283b50..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/rb.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-Instrumentation for Redis Blaster (rb)
-
-https://github.com/getsentry/rb
-"""
-
-from sentry_sdk_alpha.integrations.redis._sync_common import patch_redis_client
-from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data
-
-
-def _patch_rb():
-    # type: () -> None
-    try:
-        import rb.clients  # type: ignore
-    except ImportError:
-        pass
-    else:
-        patch_redis_client(
-            rb.clients.FanoutClient,
-            is_cluster=False,
-            get_db_data_fn=_get_db_data,
-        )
-        patch_redis_client(
-            rb.clients.MappingClient,
-            is_cluster=False,
-            get_db_data_fn=_get_db_data,
-        )
-        patch_redis_client(
-            rb.clients.RoutingClient,
-            is_cluster=False,
-            get_db_data_fn=_get_db_data,
-        )
diff --git a/src/sentry_sdk_alpha/integrations/redis/redis.py b/src/sentry_sdk_alpha/integrations/redis/redis.py
deleted file mode 100644
index ddd4ec507fd894..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/redis.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
-Instrumentation for Redis
-
-https://github.com/redis/redis-py
-"""
-
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations.redis._sync_common import (
-    patch_redis_client,
-    patch_redis_pipeline,
-)
-from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data
-
-if TYPE_CHECKING:
-    from collections.abc import Sequence
-    from typing import Any
-
-
-def _get_redis_command_args(command):
-    # type: (Any) -> Sequence[Any]
-    return command[0]
-
-
-def _patch_redis(StrictRedis, client):  # noqa: N803
-    # type: (Any, Any) -> None
-    patch_redis_client(
-        StrictRedis,
-        is_cluster=False,
-        get_db_data_fn=_get_db_data,
-    )
-    patch_redis_pipeline(
-        client.Pipeline,
-        is_cluster=False,
-        get_command_args_fn=_get_redis_command_args,
-        get_db_data_fn=_get_db_data,
-    )
-    try:
-        strict_pipeline = client.StrictPipeline
-    except AttributeError:
-        pass
-    else:
-        patch_redis_pipeline(
-            strict_pipeline,
-            is_cluster=False,
-            get_command_args_fn=_get_redis_command_args,
-            get_db_data_fn=_get_db_data,
-        )
-
-    try:
-        import redis.asyncio
-    except ImportError:
-        pass
-    else:
-        from sentry_sdk_alpha.integrations.redis._async_common import (
-            patch_redis_async_client,
-            patch_redis_async_pipeline,
-        )
-
-        patch_redis_async_client(
-            redis.asyncio.client.StrictRedis,
-            is_cluster=False,
-            get_db_data_fn=_get_db_data,
-        )
-        patch_redis_async_pipeline(
-            redis.asyncio.client.Pipeline,
-            False,
-            _get_redis_command_args,
-            get_db_data_fn=_get_db_data,
-        )
diff --git a/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py b/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
deleted file mode 100644
index 9f7375b4d38515..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/redis_cluster.py
+++ /dev/null
@@ -1,99 +0,0 @@
-"""
-Instrumentation for RedisCluster
-This is part of the main redis-py client.
-
-https://github.com/redis/redis-py/blob/master/redis/cluster.py
-"""
-
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.integrations.redis._sync_common import (
-    patch_redis_client,
-    patch_redis_pipeline,
-)
-from sentry_sdk_alpha.integrations.redis.modules.queries import _get_connection_data
-from sentry_sdk_alpha.integrations.redis.utils import _parse_rediscluster_command
-from sentry_sdk_alpha.utils import capture_internal_exceptions
-
-if TYPE_CHECKING:
-    from typing import Any
-
-    from redis import RedisCluster
-    from redis.asyncio.cluster import ClusterPipeline as AsyncClusterPipeline
-    from redis.asyncio.cluster import RedisCluster as AsyncRedisCluster
-
-
-def _get_async_cluster_db_data(async_redis_cluster_instance):
-    # type: (AsyncRedisCluster[Any]) -> dict[str, Any]
-    default_node = async_redis_cluster_instance.get_default_node()
-    if default_node is not None and default_node.connection_kwargs is not None:
-        return _get_connection_data(default_node.connection_kwargs)
-    else:
-        return {}
-
-
-def _get_async_cluster_pipeline_db_data(async_redis_cluster_pipeline_instance):
-    # type: (AsyncClusterPipeline[Any]) -> dict[str, Any]
-    with capture_internal_exceptions():
-        return _get_async_cluster_db_data(
-            # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy
-            # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386
-            async_redis_cluster_pipeline_instance._client,  # type: ignore[attr-defined]
-        )
-
-
-def _get_cluster_db_data(redis_cluster_instance):
-    # type: (RedisCluster[Any]) -> dict[str, Any]
-    default_node = redis_cluster_instance.get_default_node()
-
-    if default_node is not None:
-        connection_params = {
-            "host": default_node.host,
-            "port": default_node.port,
-        }
-        return _get_connection_data(connection_params)
-    else:
-        return {}
-
-
-def _patch_redis_cluster():
-    # type: () -> None
-    """Patches the cluster module on redis SDK (as opposed to rediscluster library)"""
-    try:
-        from redis import RedisCluster, cluster
-    except ImportError:
-        pass
-    else:
-        patch_redis_client(
-            RedisCluster,
-            is_cluster=True,
-            get_db_data_fn=_get_cluster_db_data,
-        )
-        patch_redis_pipeline(
-            cluster.ClusterPipeline,
-            is_cluster=True,
-            get_command_args_fn=_parse_rediscluster_command,
-            get_db_data_fn=_get_cluster_db_data,
-        )
-
-    try:
-        from redis.asyncio import cluster as async_cluster
-    except ImportError:
-        pass
-    else:
-        from sentry_sdk_alpha.integrations.redis._async_common import (
-            patch_redis_async_client,
-            patch_redis_async_pipeline,
-        )
-
-        patch_redis_async_client(
-            async_cluster.RedisCluster,
-            is_cluster=True,
-            get_db_data_fn=_get_async_cluster_db_data,
-        )
-        patch_redis_async_pipeline(
-            async_cluster.ClusterPipeline,
-            is_cluster=True,
-            get_command_args_fn=_parse_rediscluster_command,
-            get_db_data_fn=_get_async_cluster_pipeline_db_data,
-        )
diff --git a/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py b/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py
deleted file mode 100644
index 5380b3d03a56eb..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/redis_py_cluster_legacy.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""
-Instrumentation for redis-py-cluster
-The project redis-py-cluster is EOL and was integrated into redis-py starting from version 4.1.0 (Dec 26, 2021).
-
-https://github.com/grokzen/redis-py-cluster
-"""
-
-from sentry_sdk_alpha.integrations.redis._sync_common import (
-    patch_redis_client,
-    patch_redis_pipeline,
-)
-from sentry_sdk_alpha.integrations.redis.modules.queries import _get_db_data
-from sentry_sdk_alpha.integrations.redis.utils import _parse_rediscluster_command
-
-
-def _patch_rediscluster():
-    # type: () -> None
-    try:
-        import rediscluster  # type: ignore
-    except ImportError:
-        return
-
-    patch_redis_client(
-        rediscluster.RedisCluster,
-        is_cluster=True,
-        get_db_data_fn=_get_db_data,
-    )
-
-    # up to v1.3.6, __version__ attribute is a tuple
-    # from v2.0.0, __version__ is a string and VERSION a tuple
-    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
-
-    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
-    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
-    if (0, 2, 0) < version < (2, 0, 0):
-        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
-        patch_redis_client(
-            rediscluster.StrictRedisCluster,
-            is_cluster=True,
-            get_db_data_fn=_get_db_data,
-        )
-    else:
-        pipeline_cls = rediscluster.pipeline.ClusterPipeline
-
-    patch_redis_pipeline(
-        pipeline_cls,
-        is_cluster=True,
-        get_command_args_fn=_parse_rediscluster_command,
-        get_db_data_fn=_get_db_data,
-    )
diff --git a/src/sentry_sdk_alpha/integrations/redis/utils.py b/src/sentry_sdk_alpha/integrations/redis/utils.py
deleted file mode 100644
index 2b50fa893d3e91..00000000000000
--- a/src/sentry_sdk_alpha/integrations/redis/utils.py
+++ /dev/null
@@ -1,190 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import SPANDATA
-from sentry_sdk_alpha.integrations.redis.consts import (
-    _COMMANDS_INCLUDING_SENSITIVE_DATA,
-    _MAX_NUM_ARGS,
-    _MAX_NUM_COMMANDS,
-    _MULTI_KEY_COMMANDS,
-    _SINGLE_KEY_COMMANDS,
-)
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE
-
-if TYPE_CHECKING:
-    from collections.abc import Sequence
-    from typing import Any, Optional
-
-    from sentry_sdk_alpha.tracing import Span
-
-
-TAG_KEYS = [
-    "redis.command",
-    "redis.is_cluster",
-    "redis.key",
-    "redis.transaction",
-    SPANDATA.DB_OPERATION,
-]
-
-
-def _update_span(span, *data_bags):
-    # type: (Span, *dict[str, Any]) -> None
-    """
-    Set tags and data on the given span to data from the given data bags.
-    """
-    for data in data_bags:
-        for key, value in data.items():
-            if key in TAG_KEYS:
-                span.set_tag(key, value)
-            else:
-                span.set_attribute(key, value)
-
-
-def _create_breadcrumb(message, *data_bags):
-    # type: (str, *dict[str, Any]) -> None
-    """
-    Create a breadcrumb containing the tags data from the given data bags.
-    """
-    data = {}
-    for data in data_bags:
-        for key, value in data.items():
-            if key in TAG_KEYS:
-                data[key] = value
-
-    sentry_sdk_alpha.add_breadcrumb(
-        message=message,
-        type="redis",
-        category="redis",
-        data=data,
-    )
-
-
-def _get_safe_command(name, args):
-    # type: (str, Sequence[Any]) -> str
-    command_parts = [name]
-
-    for i, arg in enumerate(args):
-        if i > _MAX_NUM_ARGS:
-            break
-
-        name_low = name.lower()
-
-        if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
-            command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
-            continue
-
-        arg_is_the_key = i == 0
-        if arg_is_the_key:
-            command_parts.append(repr(arg))
-
-        else:
-            if should_send_default_pii():
-                command_parts.append(repr(arg))
-            else:
-                command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
-
-    command = " ".join(command_parts)
-    return command
-
-
-def _safe_decode(key):
-    # type: (Any) -> str
-    if isinstance(key, bytes):
-        try:
-            return key.decode()
-        except UnicodeDecodeError:
-            return ""
-
-    return str(key)
-
-
-def _key_as_string(key):
-    # type: (Any) -> str
-    if isinstance(key, (dict, list, tuple)):
-        key = ", ".join(_safe_decode(x) for x in key)
-    elif isinstance(key, bytes):
-        key = _safe_decode(key)
-    elif key is None:
-        key = ""
-    else:
-        key = str(key)
-
-    return key
-
-
-def _get_safe_key(method_name, args, kwargs):
-    # type: (str, Optional[tuple[Any, ...]], Optional[dict[str, Any]]) -> Optional[tuple[str, ...]]
-    """
-    Gets the key (or keys) from the given method_name.
-    The method_name could be a redis command or a django caching command
-    """
-    key = None
-
-    if args is not None and method_name.lower() in _MULTI_KEY_COMMANDS:
-        # for example redis "mget"
-        key = tuple(args)
-
-    elif args is not None and len(args) >= 1:
-        # for example django "set_many/get_many" or redis "get"
-        if isinstance(args[0], (dict, list, tuple)):
-            key = tuple(args[0])
-        else:
-            key = (args[0],)
-
-    elif kwargs is not None and "key" in kwargs:
-        # this is a legacy case for older versions of Django
-        if isinstance(kwargs["key"], (list, tuple)):
-            if len(kwargs["key"]) > 0:
-                key = tuple(kwargs["key"])
-        else:
-            if kwargs["key"] is not None:
-                key = (kwargs["key"],)
-
-    return key
-
-
-def _parse_rediscluster_command(command):
-    # type: (Any) -> Sequence[Any]
-    return command.args
-
-
-def _get_pipeline_data(is_cluster, get_command_args_fn, is_transaction, command_stack):
-    # type: (bool, Any, bool, Sequence[Any]) -> dict[str, Any]
-    data = {
-        "redis.is_cluster": is_cluster,
-        "redis.transaction": is_transaction,
-    }  # type: dict[str, Any]
-
-    commands = []
-    for i, arg in enumerate(command_stack):
-        if i >= _MAX_NUM_COMMANDS:
-            break
-
-        command = get_command_args_fn(arg)
-        commands.append(_get_safe_command(command[0], command[1:]))
-
-    data["redis.commands.count"] = len(command_stack)
-    data["redis.commands.first_ten"] = commands
-
-    return data
-
-
-def _get_client_data(is_cluster, name, *args):
-    # type: (bool, str, *Any) -> dict[str, Any]
-    data = {
-        "redis.is_cluster": is_cluster,
-    }  # type: dict[str, Any]
-
-    if name:
-        data["redis.command"] = name
-        data[SPANDATA.DB_OPERATION] = name
-
-    if name and args:
-        name_low = name.lower()
-        if (name_low in _SINGLE_KEY_COMMANDS) or (
-            name_low in _MULTI_KEY_COMMANDS and len(args) == 1
-        ):
-            data["redis.key"] = args[0]
-
-    return data
diff --git a/src/sentry_sdk_alpha/integrations/rq.py b/src/sentry_sdk_alpha/integrations/rq.py
deleted file mode 100644
index f9142e61aee402..00000000000000
--- a/src/sentry_sdk_alpha/integrations/rq.py
+++ /dev/null
@@ -1,199 +0,0 @@
-import weakref
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    format_timestamp,
-    parse_version,
-)
-
-try:
-    from rq.job import JobStatus
-    from rq.queue import Queue
-    from rq.timeouts import JobTimeoutException
-    from rq.version import VERSION as RQ_VERSION
-    from rq.worker import Worker
-except ImportError:
-    raise DidNotEnable("RQ not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any
-
-    from rq.job import Job
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.utils import ExcInfo
-
-DEFAULT_TRANSACTION_NAME = "unknown RQ task"
-
-
-JOB_PROPERTY_TO_ATTRIBUTE = {
-    "id": "messaging.message.id",
-}
-
-QUEUE_PROPERTY_TO_ATTRIBUTE = {
-    "name": "messaging.destination.name",
-}
-
-
-class RqIntegration(Integration):
-    identifier = "rq"
-    origin = f"auto.queue.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(RQ_VERSION)
-        _check_minimum_version(RqIntegration, version)
-
-        old_perform_job = Worker.perform_job
-
-        @ensure_integration_enabled(RqIntegration, old_perform_job)
-        def sentry_patched_perform_job(self, job, queue, *args, **kwargs):
-            # type: (Any, Job, Queue, *Any, **Any) -> bool
-            with sentry_sdk_alpha.new_scope() as scope:
-                try:
-                    transaction_name = job.func_name or DEFAULT_TRANSACTION_NAME
-                except AttributeError:
-                    transaction_name = DEFAULT_TRANSACTION_NAME
-
-                scope.set_transaction_name(transaction_name, source=TransactionSource.TASK)
-                scope.clear_breadcrumbs()
-                scope.add_event_processor(_make_event_processor(weakref.ref(job)))
-
-                with sentry_sdk_alpha.continue_trace(job.meta.get("_sentry_trace_headers") or {}):
-                    with sentry_sdk_alpha.start_span(
-                        op=OP.QUEUE_TASK_RQ,
-                        name=transaction_name,
-                        source=TransactionSource.TASK,
-                        origin=RqIntegration.origin,
-                        attributes=_prepopulate_attributes(job, queue),
-                    ):
-                        rv = old_perform_job(self, job, queue, *args, **kwargs)
-
-            if self.is_horse:
-                # We're inside of a forked process and RQ is
-                # about to call `os._exit`. Make sure that our
-                # events get sent out.
-                sentry_sdk_alpha.get_client().flush()
-
-            return rv
-
-        Worker.perform_job = sentry_patched_perform_job
-
-        old_handle_exception = Worker.handle_exception
-
-        def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
-            # type: (Worker, Any, *Any, **Any) -> Any
-            retry = hasattr(job, "retries_left") and job.retries_left and job.retries_left > 0
-            failed = job._status == JobStatus.FAILED or job.is_failed
-            if failed and not retry:
-                _capture_exception(exc_info)
-
-            return old_handle_exception(self, job, *exc_info, **kwargs)
-
-        Worker.handle_exception = sentry_patched_handle_exception
-
-        old_enqueue_job = Queue.enqueue_job
-
-        @ensure_integration_enabled(RqIntegration, old_enqueue_job)
-        def sentry_patched_enqueue_job(self, job, **kwargs):
-            # type: (Queue, Any, **Any) -> Any
-            job.meta["_sentry_trace_headers"] = dict(
-                sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers()
-            )
-
-            return old_enqueue_job(self, job, **kwargs)
-
-        Queue.enqueue_job = sentry_patched_enqueue_job
-
-        ignore_logger("rq.worker")
-
-
-def _make_event_processor(weak_job):
-    # type: (Callable[[], Job]) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        job = weak_job()
-        if job is not None:
-            with capture_internal_exceptions():
-                extra = event.setdefault("extra", {})
-                rq_job = {
-                    "job_id": job.id,
-                    "func": job.func_name,
-                    "args": job.args,
-                    "kwargs": job.kwargs,
-                    "description": job.description,
-                }
-
-                if job.enqueued_at:
-                    rq_job["enqueued_at"] = format_timestamp(job.enqueued_at)
-                if job.started_at:
-                    rq_job["started_at"] = format_timestamp(job.started_at)
-
-                extra["rq-job"] = rq_job
-
-        if "exc_info" in hint:
-            with capture_internal_exceptions():
-                if issubclass(hint["exc_info"][0], JobTimeoutException):
-                    event["fingerprint"] = ["rq", "JobTimeoutException", job.func_name]
-
-        return event
-
-    return event_processor
-
-
-def _capture_exception(exc_info, **kwargs):
-    # type: (ExcInfo, **Any) -> None
-    client = sentry_sdk_alpha.get_client()
-
-    event, hint = event_from_exception(
-        exc_info,
-        client_options=client.options,
-        mechanism={"type": "rq", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _prepopulate_attributes(job, queue):
-    # type: (Job, Queue) -> dict[str, Any]
-    attributes = {
-        "messaging.system": "rq",
-        "rq.job.id": job.id,
-    }
-
-    for prop, attr in JOB_PROPERTY_TO_ATTRIBUTE.items():
-        if getattr(job, prop, None) is not None:
-            attributes[attr] = getattr(job, prop)
-
-    for prop, attr in QUEUE_PROPERTY_TO_ATTRIBUTE.items():
-        if getattr(queue, prop, None) is not None:
-            attributes[attr] = getattr(queue, prop)
-
-    if getattr(job, "args", None):
-        for i, arg in enumerate(job.args):
-            with capture_internal_exceptions():
-                attributes[f"rq.job.args.{i}"] = str(arg)
-
-    if getattr(job, "kwargs", None):
-        for kwarg, value in job.kwargs.items():
-            with capture_internal_exceptions():
-                attributes[f"rq.job.kwargs.{kwarg}"] = str(value)
-
-    func = job.func
-    if callable(func):
-        func = func.__name__
-
-    attributes["rq.job.func"] = str(func)
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/rust_tracing.py b/src/sentry_sdk_alpha/integrations/rust_tracing.py
deleted file mode 100644
index bbae321b27c1e8..00000000000000
--- a/src/sentry_sdk_alpha/integrations/rust_tracing.py
+++ /dev/null
@@ -1,270 +0,0 @@
-"""
-This integration ingests tracing data from native extensions written in Rust.
-
-Using it requires additional setup on the Rust side to accept a
-`RustTracingLayer` Python object and register it with the `tracing-subscriber`
-using an adapter from the `pyo3-python-tracing-subscriber` crate. For example:
-```rust
-#[pyfunction]
-pub fn initialize_tracing(py_impl: Bound<'_, PyAny>) {
-    tracing_subscriber::registry()
-        .with(pyo3_python_tracing_subscriber::PythonCallbackLayerBridge::new(py_impl))
-        .init();
-}
-```
-
-Usage in Python would then look like:
-```
-sentry_sdk.init(
-    dsn=sentry_dsn,
-    integrations=[
-        RustTracingIntegration(
-            "demo_rust_extension",
-            demo_rust_extension.initialize_tracing,
-            event_type_mapping=event_type_mapping,
-        )
-    ],
-)
-```
-
-Each native extension requires its own integration.
-"""
-
-import json
-from collections.abc import Callable
-from enum import Enum, auto
-from typing import Any, Dict, Optional
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import Span
-from sentry_sdk_alpha.utils import SENSITIVE_DATA_SUBSTITUTE
-
-
-class RustTracingLevel(Enum):
-    Trace = "TRACE"
-    Debug = "DEBUG"
-    Info = "INFO"
-    Warn = "WARN"
-    Error = "ERROR"
-
-
-class EventTypeMapping(Enum):
-    Ignore = auto()
-    Exc = auto()
-    Breadcrumb = auto()
-    Event = auto()
-
-
-def tracing_level_to_sentry_level(level):
-    # type: (str) -> sentry_sdk._types.LogLevelStr
-    level = RustTracingLevel(level)
-    if level in (RustTracingLevel.Trace, RustTracingLevel.Debug):
-        return "debug"
-    elif level == RustTracingLevel.Info:
-        return "info"
-    elif level == RustTracingLevel.Warn:
-        return "warning"
-    elif level == RustTracingLevel.Error:
-        return "error"
-    else:
-        # Better this than crashing
-        return "info"
-
-
-def extract_contexts(event: dict[str, Any]) -> dict[str, Any]:
-    metadata = event.get("metadata", {})
-    contexts = {}
-
-    location = {}
-    for field in ["module_path", "file", "line"]:
-        if field in metadata:
-            location[field] = metadata[field]
-    if len(location) > 0:
-        contexts["rust_tracing_location"] = location
-
-    fields = {}
-    for field in metadata.get("fields", []):
-        fields[field] = event.get(field)
-    if len(fields) > 0:
-        contexts["rust_tracing_fields"] = fields
-
-    return contexts
-
-
-def process_event(event: dict[str, Any]) -> None:
-    metadata = event.get("metadata", {})
-
-    logger = metadata.get("target")
-    level = tracing_level_to_sentry_level(metadata.get("level"))
-    message = event.get("message")  # type: sentry_sdk._types.Any
-    contexts = extract_contexts(event)
-
-    sentry_event = {
-        "logger": logger,
-        "level": level,
-        "message": message,
-        "contexts": contexts,
-    }  # type: sentry_sdk._types.Event
-
-    sentry_sdk_alpha.capture_event(sentry_event)
-
-
-def process_exception(event: dict[str, Any]) -> None:
-    process_event(event)
-
-
-def process_breadcrumb(event: dict[str, Any]) -> None:
-    level = tracing_level_to_sentry_level(event.get("metadata", {}).get("level"))
-    message = event.get("message")
-
-    sentry_sdk_alpha.add_breadcrumb(level=level, message=message)
-
-
-def default_span_filter(metadata: dict[str, Any]) -> bool:
-    return RustTracingLevel(metadata.get("level")) in (
-        RustTracingLevel.Error,
-        RustTracingLevel.Warn,
-        RustTracingLevel.Info,
-    )
-
-
-def default_event_type_mapping(metadata: dict[str, Any]) -> EventTypeMapping:
-    level = RustTracingLevel(metadata.get("level"))
-    if level == RustTracingLevel.Error:
-        return EventTypeMapping.Exc
-    elif level in (RustTracingLevel.Warn, RustTracingLevel.Info):
-        return EventTypeMapping.Breadcrumb
-    elif level in (RustTracingLevel.Debug, RustTracingLevel.Trace):
-        return EventTypeMapping.Ignore
-    else:
-        return EventTypeMapping.Ignore
-
-
-class RustTracingLayer:
-    def __init__(
-        self,
-        origin: str,
-        event_type_mapping: Callable[
-            [dict[str, Any]], EventTypeMapping
-        ] = default_event_type_mapping,
-        span_filter: Callable[[dict[str, Any]], bool] = default_span_filter,
-        include_tracing_fields: bool | None = None,
-    ):
-        self.origin = origin
-        self.event_type_mapping = event_type_mapping
-        self.span_filter = span_filter
-        self.include_tracing_fields = include_tracing_fields
-
-    def _include_tracing_fields(self) -> bool:
-        """
-        By default, the values of tracing fields are not included in case they
-        contain PII. A user may override that by passing `True` for the
-        `include_tracing_fields` keyword argument of this integration or by
-        setting `send_default_pii` to `True` in their Sentry client options.
-        """
-        return (
-            should_send_default_pii()
-            if self.include_tracing_fields is None
-            else self.include_tracing_fields
-        )
-
-    def on_event(self, event: str, _span_state: Span | None) -> None:
-        deserialized_event = json.loads(event)
-        metadata = deserialized_event.get("metadata", {})
-
-        event_type = self.event_type_mapping(metadata)
-        if event_type == EventTypeMapping.Ignore:
-            return
-        elif event_type == EventTypeMapping.Exc:
-            process_exception(deserialized_event)
-        elif event_type == EventTypeMapping.Breadcrumb:
-            process_breadcrumb(deserialized_event)
-        elif event_type == EventTypeMapping.Event:
-            process_event(deserialized_event)
-
-    def on_new_span(self, attrs: str, span_id: str) -> Span | None:
-        attrs = json.loads(attrs)
-        metadata = attrs.get("metadata", {})
-
-        if not self.span_filter(metadata):
-            return None
-
-        module_path = metadata.get("module_path")
-        name = metadata.get("name")
-        message = attrs.get("message")
-
-        if message is not None:
-            sentry_span_name = message
-        elif module_path is not None and name is not None:
-            sentry_span_name = f"{module_path}::{name}"  # noqa: E231
-        elif name is not None:
-            sentry_span_name = name
-        else:
-            sentry_span_name = ""
-
-        span = sentry_sdk_alpha.start_span(
-            op="function",
-            name=sentry_span_name,
-            origin=self.origin,
-            only_if_parent=True,
-        )
-        span.__enter__()
-
-        fields = metadata.get("fields", [])
-        for field in fields:
-            if self._include_tracing_fields():
-                span.set_attribute(field, attrs.get(field))
-            else:
-                span.set_attribute(field, SENSITIVE_DATA_SUBSTITUTE)
-
-        return span
-
-    def on_close(self, span_id: str, span: Span | None) -> None:
-        if span is not None:
-            span.__exit__(None, None, None)
-
-    def on_record(self, span_id: str, values: str, span: Span | None) -> None:
-        if span is not None:
-            deserialized_values = json.loads(values)
-            for key, value in deserialized_values.items():
-                if self._include_tracing_fields():
-                    span.set_attribute(key, value)
-                else:
-                    span.set_attribute(key, SENSITIVE_DATA_SUBSTITUTE)
-
-
-class RustTracingIntegration(Integration):
-    """
-    Ingests tracing data from a Rust native extension's `tracing` instrumentation.
-
-    If a project uses more than one Rust native extension, each one will need
-    its own instance of `RustTracingIntegration` with an initializer function
-    specific to that extension.
-
-    Since all of the setup for this integration requires instance-specific state
-    which is not available in `setup_once()`, setup instead happens in `__init__()`.
-    """
-
-    def __init__(
-        self,
-        identifier: str,
-        initializer: Callable[[RustTracingLayer], None],
-        event_type_mapping: Callable[
-            [dict[str, Any]], EventTypeMapping
-        ] = default_event_type_mapping,
-        span_filter: Callable[[dict[str, Any]], bool] = default_span_filter,
-        include_tracing_fields: bool | None = None,
-    ):
-        self.identifier = identifier
-        origin = f"auto.function.rust_tracing.{identifier}"
-        self.tracing_layer = RustTracingLayer(
-            origin, event_type_mapping, span_filter, include_tracing_fields
-        )
-
-        initializer(self.tracing_layer)
-
-    @staticmethod
-    def setup_once() -> None:
-        pass
diff --git a/src/sentry_sdk_alpha/integrations/sanic.py b/src/sentry_sdk_alpha/integrations/sanic.py
deleted file mode 100644
index 65f2d1d0da1a11..00000000000000
--- a/src/sentry_sdk_alpha/integrations/sanic.py
+++ /dev/null
@@ -1,366 +0,0 @@
-import sys
-import weakref
-from inspect import isawaitable
-from typing import TYPE_CHECKING
-from urllib.parse import urlsplit
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import RequestExtractor, _filter_headers
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    CONTEXTVARS_ERROR_MESSAGE,
-    HAS_REAL_CONTEXTVARS,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    parse_version,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Container
-    from typing import Any, Dict, Optional, Union
-
-    from sanic.request import Request, RequestParameters
-    from sanic.response import BaseHTTPResponse
-    from sanic.router import Route
-
-    from sentry_sdk_alpha._types import Event, EventProcessor, ExcInfo, Hint
-
-try:
-    from sanic import Sanic
-    from sanic import __version__ as SANIC_VERSION
-    from sanic.exceptions import SanicException
-    from sanic.handlers import ErrorHandler
-    from sanic.router import Router
-except ImportError:
-    raise DidNotEnable("Sanic not installed")
-
-old_error_handler_lookup = ErrorHandler.lookup
-old_handle_request = Sanic.handle_request
-old_router_get = Router.get
-
-try:
-    # This method was introduced in Sanic v21.9
-    old_startup = Sanic._startup
-except AttributeError:
-    pass
-
-
-class SanicIntegration(Integration):
-    identifier = "sanic"
-    origin = f"auto.http.{identifier}"
-    version = None
-
-    def __init__(self, unsampled_statuses=frozenset({404})):
-        # type: (Optional[Container[int]]) -> None
-        """
-        The unsampled_statuses parameter can be used to specify for which HTTP statuses the
-        transactions should not be sent to Sentry. By default, transactions are sent for all
-        HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all
-        HTTP statuses, including 404.
-        """
-        self._unsampled_statuses = unsampled_statuses or set()
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        SanicIntegration.version = parse_version(SANIC_VERSION)
-        _check_minimum_version(SanicIntegration, SanicIntegration.version)
-
-        if not HAS_REAL_CONTEXTVARS:
-            # We better have contextvars or we're going to leak state between
-            # requests.
-            raise DidNotEnable(
-                "The sanic integration for Sentry requires Python 3.7+ "
-                " or the aiocontextvars package." + CONTEXTVARS_ERROR_MESSAGE
-            )
-
-        if SANIC_VERSION.startswith("0.8."):
-            # Sanic 0.8 and older creates a logger named "root" and puts a
-            # stringified version of every exception in there (without exc_info),
-            # which our error deduplication can't detect.
-            #
-            # We explicitly check the version here because it is a very
-            # invasive step to ignore this logger and not necessary in newer
-            # versions at all.
-            #
-            # https://github.com/huge-success/sanic/issues/1332
-            ignore_logger("root")
-
-        if SanicIntegration.version is not None and SanicIntegration.version < (21, 9):
-            _setup_legacy_sanic()
-            return
-
-        _setup_sanic()
-
-
-class SanicRequestExtractor(RequestExtractor):
-    def content_length(self):
-        # type: () -> int
-        if self.request.body is None:
-            return 0
-        return len(self.request.body)
-
-    def cookies(self):
-        # type: () -> Dict[str, str]
-        return dict(self.request.cookies)
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.body
-
-    def form(self):
-        # type: () -> RequestParameters
-        return self.request.form
-
-    def is_json(self):
-        # type: () -> bool
-        raise NotImplementedError()
-
-    def json(self):
-        # type: () -> Optional[Any]
-        return self.request.json
-
-    def files(self):
-        # type: () -> RequestParameters
-        return self.request.files
-
-    def size_of_file(self, file):
-        # type: (Any) -> int
-        return len(file.body or ())
-
-
-def _setup_sanic():
-    # type: () -> None
-    Sanic._startup = _startup
-    ErrorHandler.lookup = _sentry_error_handler_lookup
-
-
-def _setup_legacy_sanic():
-    # type: () -> None
-    Sanic.handle_request = _legacy_handle_request
-    Router.get = _legacy_router_get
-    ErrorHandler.lookup = _sentry_error_handler_lookup
-
-
-async def _startup(self):
-    # type: (Sanic) -> None
-    # This happens about as early in the lifecycle as possible, just after the
-    # Request object is created. The body has not yet been consumed.
-    self.signal("http.lifecycle.request")(_context_enter)
-
-    # This happens after the handler is complete. In v21.9 this signal is not
-    # dispatched when there is an exception. Therefore we need to close out
-    # and call _context_exit from the custom exception handler as well.
-    # See https://github.com/sanic-org/sanic/issues/2297
-    self.signal("http.lifecycle.response")(_context_exit)
-
-    # This happens inside of request handling immediately after the route
-    # has been identified by the router.
-    self.signal("http.routing.after")(_set_transaction)
-
-    # The above signals need to be declared before this can be called.
-    await old_startup(self)
-
-
-async def _context_enter(request):
-    # type: (Request) -> None
-    request.ctx._sentry_do_integration = (
-        sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is not None
-    )
-
-    if not request.ctx._sentry_do_integration:
-        return
-
-    weak_request = weakref.ref(request)
-    request.ctx._sentry_scope_manager = sentry_sdk_alpha.isolation_scope()
-    scope = request.ctx._sentry_scope_manager.__enter__()
-    request.ctx._sentry_scope = scope
-
-    scope.set_transaction_name(request.path, TransactionSource.URL)
-    scope.clear_breadcrumbs()
-    scope.add_event_processor(_make_request_processor(weak_request))
-
-    # TODO-neel-potel test if this works
-    request.ctx._sentry_continue_trace = sentry_sdk_alpha.continue_trace(dict(request.headers))
-    request.ctx._sentry_continue_trace.__enter__()
-    request.ctx._sentry_transaction = sentry_sdk_alpha.start_span(
-        op=OP.HTTP_SERVER,
-        # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction
-        name=request.path,
-        source=TransactionSource.URL,
-        origin=SanicIntegration.origin,
-    ).__enter__()
-
-
-async def _context_exit(request, response=None):
-    # type: (Request, Optional[BaseHTTPResponse]) -> None
-    with capture_internal_exceptions():
-        if not request.ctx._sentry_do_integration:
-            return
-
-        integration = sentry_sdk_alpha.get_client().get_integration(SanicIntegration)
-
-        response_status = None if response is None else response.status
-
-        # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception
-        # happens while trying to end the transaction, we still attempt to exit the scope.
-        with capture_internal_exceptions():
-            request.ctx._sentry_transaction.set_http_status(response_status)
-
-            if (
-                isinstance(integration, SanicIntegration)
-                and response_status in integration._unsampled_statuses
-            ):
-                # drop the event in an event processor
-                request.ctx._sentry_scope.add_event_processor(lambda _event, _hint: None)
-
-            request.ctx._sentry_transaction.__exit__(None, None, None)
-            request.ctx._sentry_continue_trace.__exit__(None, None, None)
-
-        request.ctx._sentry_scope_manager.__exit__(None, None, None)
-
-
-async def _set_transaction(request, route, **_):
-    # type: (Request, Route, **Any) -> None
-    if request.ctx._sentry_do_integration:
-        with capture_internal_exceptions():
-            scope = sentry_sdk_alpha.get_current_scope()
-            route_name = route.name.replace(request.app.name, "").strip(".")
-            scope.set_transaction_name(route_name, source=TransactionSource.COMPONENT)
-
-
-def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
-    # type: (Any, Exception, *Any, **Any) -> Optional[object]
-    _capture_exception(exception)
-    old_error_handler = old_error_handler_lookup(self, exception, *args, **kwargs)
-
-    if old_error_handler is None:
-        return None
-
-    if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is None:
-        return old_error_handler
-
-    async def sentry_wrapped_error_handler(request, exception):
-        # type: (Request, Exception) -> Any
-        try:
-            response = old_error_handler(request, exception)
-            if isawaitable(response):
-                response = await response
-            return response
-        except Exception:
-            # Report errors that occur in Sanic error handler. These
-            # exceptions will not even show up in Sanic's
-            # `sanic.exceptions` logger.
-            exc_info = sys.exc_info()
-            _capture_exception(exc_info)
-            reraise(*exc_info)
-        finally:
-            # As mentioned in previous comment in _startup, this can be removed
-            # after https://github.com/sanic-org/sanic/issues/2297 is resolved
-            if SanicIntegration.version and SanicIntegration.version == (21, 9):
-                await _context_exit(request)
-
-    return sentry_wrapped_error_handler
-
-
-async def _legacy_handle_request(self, request, *args, **kwargs):
-    # type: (Any, Request, *Any, **Any) -> Any
-    if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is None:
-        return await old_handle_request(self, request, *args, **kwargs)
-
-    weak_request = weakref.ref(request)
-
-    with sentry_sdk_alpha.isolation_scope() as scope:
-        scope.clear_breadcrumbs()
-        scope.add_event_processor(_make_request_processor(weak_request))
-
-        response = old_handle_request(self, request, *args, **kwargs)
-        if isawaitable(response):
-            response = await response
-
-        return response
-
-
-def _legacy_router_get(self, *args):
-    # type: (Any, Union[Any, Request]) -> Any
-    rv = old_router_get(self, *args)
-    if sentry_sdk_alpha.get_client().get_integration(SanicIntegration) is not None:
-        with capture_internal_exceptions():
-            scope = sentry_sdk_alpha.get_isolation_scope()
-            if SanicIntegration.version and SanicIntegration.version >= (21, 3):
-                # Sanic versions above and including 21.3 append the app name to the
-                # route name, and so we need to remove it from Route name so the
-                # transaction name is consistent across all versions
-                sanic_app_name = self.ctx.app.name
-                sanic_route = rv[0].name
-
-                if sanic_route.startswith("%s." % sanic_app_name):
-                    # We add a 1 to the len of the sanic_app_name because there is a dot
-                    # that joins app name and the route name
-                    # Format: app_name.route_name
-                    sanic_route = sanic_route[len(sanic_app_name) + 1 :]
-
-                scope.set_transaction_name(sanic_route, source=TransactionSource.COMPONENT)
-            else:
-                scope.set_transaction_name(rv[0].__name__, source=TransactionSource.COMPONENT)
-
-    return rv
-
-
-@ensure_integration_enabled(SanicIntegration)
-def _capture_exception(exception):
-    # type: (Union[ExcInfo, BaseException]) -> None
-    with capture_internal_exceptions():
-        event, hint = event_from_exception(
-            exception,
-            client_options=sentry_sdk_alpha.get_client().options,
-            mechanism={"type": "sanic", "handled": False},
-        )
-
-        if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet:
-            return
-
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _make_request_processor(weak_request):
-    # type: (Callable[[], Request]) -> EventProcessor
-    def sanic_processor(event, hint):
-        # type: (Event, Optional[Hint]) -> Optional[Event]
-
-        try:
-            if hint and issubclass(hint["exc_info"][0], SanicException):
-                return None
-        except KeyError:
-            pass
-
-        request = weak_request()
-        if request is None:
-            return event
-
-        with capture_internal_exceptions():
-            extractor = SanicRequestExtractor(request)
-            extractor.extract_into_event(event)
-
-            request_info = event["request"]
-            urlparts = urlsplit(request.url)
-
-            request_info["url"] = "{}://{}{}".format(
-                urlparts.scheme,
-                urlparts.netloc,
-                urlparts.path,
-            )
-
-            request_info["query_string"] = urlparts.query
-            request_info["method"] = request.method
-            request_info["env"] = {"REMOTE_ADDR": request.remote_addr}
-            request_info["headers"] = _filter_headers(dict(request.headers))
-
-        return event
-
-    return sanic_processor
diff --git a/src/sentry_sdk_alpha/integrations/serverless.py b/src/sentry_sdk_alpha/integrations/serverless.py
deleted file mode 100644
index 50239d307d36ee..00000000000000
--- a/src/sentry_sdk_alpha/integrations/serverless.py
+++ /dev/null
@@ -1,71 +0,0 @@
-import sys
-from functools import wraps
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import event_from_exception, reraise
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional, TypeVar, Union, overload
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-else:
-
-    def overload(x):
-        # type: (F) -> F
-        return x
-
-
-@overload
-def serverless_function(f, flush=True):
-    # type: (F, bool) -> F
-    pass
-
-
-@overload
-def serverless_function(f=None, flush=True):  # noqa: F811
-    # type: (None, bool) -> Callable[[F], F]
-    pass
-
-
-def serverless_function(f=None, flush=True):  # noqa
-    # type: (Optional[F], bool) -> Union[F, Callable[[F], F]]
-    def wrapper(f):
-        # type: (F) -> F
-        @wraps(f)
-        def inner(*args, **kwargs):
-            # type: (*Any, **Any) -> Any
-            with sentry_sdk_alpha.isolation_scope() as scope:
-                scope.clear_breadcrumbs()
-
-                try:
-                    return f(*args, **kwargs)
-                except Exception:
-                    _capture_and_reraise()
-                finally:
-                    if flush:
-                        sentry_sdk_alpha.flush()
-
-        return inner  # type: ignore
-
-    if f is None:
-        return wrapper
-    else:
-        return wrapper(f)
-
-
-def _capture_and_reraise():
-    # type: () -> None
-    exc_info = sys.exc_info()
-    client = sentry_sdk_alpha.get_client()
-    if client.is_active():
-        event, hint = event_from_exception(
-            exc_info,
-            client_options=client.options,
-            mechanism={"type": "serverless", "handled": False},
-        )
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-    reraise(*exc_info)
diff --git a/src/sentry_sdk_alpha/integrations/socket.py b/src/sentry_sdk_alpha/integrations/socket.py
deleted file mode 100644
index ee1950e3dd9f92..00000000000000
--- a/src/sentry_sdk_alpha/integrations/socket.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import socket
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._types import MYPY
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import Integration
-
-if MYPY:
-    from socket import AddressFamily, SocketKind
-    from typing import List, Optional, Tuple, Union
-
-__all__ = ["SocketIntegration"]
-
-
-class SocketIntegration(Integration):
-    identifier = "socket"
-    origin = f"auto.socket.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        """
-        patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver)
-        """
-        _patch_create_connection()
-        _patch_getaddrinfo()
-
-
-def _get_span_description(host, port):
-    # type: (Union[bytes, str, None], Union[bytes, str, int, None]) -> str
-
-    try:
-        host = host.decode()  # type: ignore
-    except (UnicodeDecodeError, AttributeError):
-        pass
-
-    try:
-        port = port.decode()  # type: ignore
-    except (UnicodeDecodeError, AttributeError):
-        pass
-
-    description = f"{host}:{port}"  # type: ignore
-    return description
-
-
-def _patch_create_connection():
-    # type: () -> None
-    real_create_connection = socket.create_connection
-
-    def create_connection(
-        address,
-        timeout=socket._GLOBAL_DEFAULT_TIMEOUT,  # type: ignore
-        source_address=None,
-    ):
-        # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket
-        integration = sentry_sdk_alpha.get_client().get_integration(SocketIntegration)
-        if integration is None:
-            return real_create_connection(address, timeout, source_address)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.SOCKET_CONNECTION,
-            name=_get_span_description(address[0], address[1]),
-            origin=SocketIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            host, port = address
-            span.set_attribute("address.host", host)
-            span.set_attribute("address.port", port)
-            span.set_attribute("timeout", timeout)
-            span.set_attribute("source_address", source_address)
-
-            return real_create_connection(
-                address=address, timeout=timeout, source_address=source_address
-            )
-
-    socket.create_connection = create_connection  # type: ignore
-
-
-def _patch_getaddrinfo():
-    # type: () -> None
-    real_getaddrinfo = socket.getaddrinfo
-
-    def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
-        # type: (Union[bytes, str, None], Union[bytes, str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int], Tuple[int, bytes]]]]
-        integration = sentry_sdk_alpha.get_client().get_integration(SocketIntegration)
-        if integration is None:
-            return real_getaddrinfo(host, port, family, type, proto, flags)
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.SOCKET_DNS,
-            name=_get_span_description(host, port),
-            origin=SocketIntegration.origin,
-            only_if_parent=True,
-        ) as span:
-            span.set_attribute("host", host)
-            span.set_attribute("port", port)
-
-            return real_getaddrinfo(host, port, family, type, proto, flags)
-
-    socket.getaddrinfo = getaddrinfo
diff --git a/src/sentry_sdk_alpha/integrations/spark/__init__.py b/src/sentry_sdk_alpha/integrations/spark/__init__.py
deleted file mode 100644
index 6f573a5e012a32..00000000000000
--- a/src/sentry_sdk_alpha/integrations/spark/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from sentry_sdk_alpha.integrations.spark.spark_driver import SparkIntegration
-from sentry_sdk_alpha.integrations.spark.spark_worker import SparkWorkerIntegration
-
-__all__ = ["SparkIntegration", "SparkWorkerIntegration"]
diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_driver.py b/src/sentry_sdk_alpha/integrations/spark/spark_driver.py
deleted file mode 100644
index d7cba5d0065b77..00000000000000
--- a/src/sentry_sdk_alpha/integrations/spark/spark_driver.py
+++ /dev/null
@@ -1,315 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import capture_internal_exceptions, ensure_integration_enabled
-
-if TYPE_CHECKING:
-    from typing import Any, Optional
-
-    from pyspark import SparkContext
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-class SparkIntegration(Integration):
-    identifier = "spark"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _setup_sentry_tracing()
-
-
-def _set_app_properties():
-    # type: () -> None
-    """
-    Set properties in driver that propagate to worker processes, allowing for workers to have access to those properties.
-    This allows worker integration to have access to app_name and application_id.
-    """
-    from pyspark import SparkContext
-
-    spark_context = SparkContext._active_spark_context
-    if spark_context:
-        spark_context.setLocalProperty(
-            "sentry_app_name",
-            spark_context.appName,
-        )
-        spark_context.setLocalProperty(
-            "sentry_application_id",
-            spark_context.applicationId,
-        )
-
-
-def _start_sentry_listener(sc):
-    # type: (SparkContext) -> None
-    """
-    Start java gateway server to add custom `SparkListener`
-    """
-    from pyspark.java_gateway import ensure_callback_server_started
-
-    gw = sc._gateway
-    ensure_callback_server_started(gw)
-    listener = SentryListener()
-    sc._jsc.sc().addSparkListener(listener)
-
-
-def _add_event_processor(sc):
-    # type: (SparkContext) -> None
-    scope = sentry_sdk_alpha.get_isolation_scope()
-
-    @scope.add_event_processor
-    def process_event(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-        with capture_internal_exceptions():
-            if sentry_sdk_alpha.get_client().get_integration(SparkIntegration) is None:
-                return event
-
-            if sc._active_spark_context is None:
-                return event
-
-            event.setdefault("user", {}).setdefault("id", sc.sparkUser())
-
-            event.setdefault("tags", {}).setdefault(
-                "executor.id", sc._conf.get("spark.executor.id")
-            )
-            event["tags"].setdefault(
-                "spark-submit.deployMode",
-                sc._conf.get("spark.submit.deployMode"),
-            )
-            event["tags"].setdefault("driver.host", sc._conf.get("spark.driver.host"))
-            event["tags"].setdefault("driver.port", sc._conf.get("spark.driver.port"))
-            event["tags"].setdefault("spark_version", sc.version)
-            event["tags"].setdefault("app_name", sc.appName)
-            event["tags"].setdefault("application_id", sc.applicationId)
-            event["tags"].setdefault("master", sc.master)
-            event["tags"].setdefault("spark_home", sc.sparkHome)
-
-            event.setdefault("extra", {}).setdefault("web_url", sc.uiWebUrl)
-
-        return event
-
-
-def _activate_integration(sc):
-    # type: (SparkContext) -> None
-
-    _start_sentry_listener(sc)
-    _set_app_properties()
-    _add_event_processor(sc)
-
-
-def _patch_spark_context_init():
-    # type: () -> None
-    from pyspark import SparkContext
-
-    spark_context_init = SparkContext._do_init
-
-    @ensure_integration_enabled(SparkIntegration, spark_context_init)
-    def _sentry_patched_spark_context_init(self, *args, **kwargs):
-        # type: (SparkContext, *Any, **Any) -> Optional[Any]
-        rv = spark_context_init(self, *args, **kwargs)
-        _activate_integration(self)
-        return rv
-
-    SparkContext._do_init = _sentry_patched_spark_context_init
-
-
-def _setup_sentry_tracing():
-    # type: () -> None
-    from pyspark import SparkContext
-
-    if SparkContext._active_spark_context is not None:
-        _activate_integration(SparkContext._active_spark_context)
-        return
-    _patch_spark_context_init()
-
-
-class SparkListener:
-    def onApplicationEnd(self, applicationEnd):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onApplicationStart(self, applicationStart):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onBlockManagerAdded(self, blockManagerAdded):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onBlockManagerRemoved(self, blockManagerRemoved):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onBlockUpdated(self, blockUpdated):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onEnvironmentUpdate(self, environmentUpdate):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onExecutorAdded(self, executorAdded):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onExecutorBlacklisted(self, executorBlacklisted):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onExecutorBlacklistedForStage(  # noqa: N802
-        self, executorBlacklistedForStage  # noqa: N803
-    ):
-        # type: (Any) -> None
-        pass
-
-    def onExecutorMetricsUpdate(self, executorMetricsUpdate):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onExecutorRemoved(self, executorRemoved):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onJobEnd(self, jobEnd):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onJobStart(self, jobStart):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onNodeBlacklisted(self, nodeBlacklisted):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onNodeBlacklistedForStage(self, nodeBlacklistedForStage):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onNodeUnblacklisted(self, nodeUnblacklisted):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onOtherEvent(self, event):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onSpeculativeTaskSubmitted(self, speculativeTask):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onStageCompleted(self, stageCompleted):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onStageSubmitted(self, stageSubmitted):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onTaskEnd(self, taskEnd):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onTaskGettingResult(self, taskGettingResult):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onTaskStart(self, taskStart):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    def onUnpersistRDD(self, unpersistRDD):  # noqa: N802,N803
-        # type: (Any) -> None
-        pass
-
-    class Java:
-        implements = ["org.apache.spark.scheduler.SparkListenerInterface"]
-
-
-class SentryListener(SparkListener):
-    def _add_breadcrumb(
-        self,
-        level,  # type: str
-        message,  # type: str
-        data=None,  # type: Optional[dict[str, Any]]
-    ):
-        # type: (...) -> None
-        sentry_sdk_alpha.get_isolation_scope().add_breadcrumb(
-            level=level, message=message, data=data
-        )
-
-    def onJobStart(self, jobStart):  # noqa: N802,N803
-        # type: (Any) -> None
-        sentry_sdk_alpha.get_isolation_scope().clear_breadcrumbs()
-
-        message = f"Job {jobStart.jobId()} Started"
-        self._add_breadcrumb(level="info", message=message)
-        _set_app_properties()
-
-    def onJobEnd(self, jobEnd):  # noqa: N802,N803
-        # type: (Any) -> None
-        level = ""
-        message = ""
-        data = {"result": jobEnd.jobResult().toString()}
-
-        if jobEnd.jobResult().toString() == "JobSucceeded":
-            level = "info"
-            message = f"Job {jobEnd.jobId()} Ended"
-        else:
-            level = "warning"
-            message = f"Job {jobEnd.jobId()} Failed"
-
-        self._add_breadcrumb(level=level, message=message, data=data)
-
-    def onStageSubmitted(self, stageSubmitted):  # noqa: N802,N803
-        # type: (Any) -> None
-        stage_info = stageSubmitted.stageInfo()
-        message = f"Stage {stage_info.stageId()} Submitted"
-
-        data = {"name": stage_info.name()}
-        attempt_id = _get_attempt_id(stage_info)
-        if attempt_id is not None:
-            data["attemptId"] = attempt_id
-
-        self._add_breadcrumb(level="info", message=message, data=data)
-        _set_app_properties()
-
-    def onStageCompleted(self, stageCompleted):  # noqa: N802,N803
-        # type: (Any) -> None
-        from py4j.protocol import Py4JJavaError  # type: ignore
-
-        stage_info = stageCompleted.stageInfo()
-        message = ""
-        level = ""
-
-        data = {"name": stage_info.name()}
-        attempt_id = _get_attempt_id(stage_info)
-        if attempt_id is not None:
-            data["attemptId"] = attempt_id
-
-        # Have to Try Except because stageInfo.failureReason() is typed with Scala Option
-        try:
-            data["reason"] = stage_info.failureReason().get()
-            message = f"Stage {stage_info.stageId()} Failed"
-            level = "warning"
-        except Py4JJavaError:
-            message = f"Stage {stage_info.stageId()} Completed"
-            level = "info"
-
-        self._add_breadcrumb(level=level, message=message, data=data)
-
-
-def _get_attempt_id(stage_info):
-    # type: (Any) -> Optional[int]
-    try:
-        return stage_info.attemptId()
-    except Exception:
-        pass
-
-    try:
-        return stage_info.attemptNumber()
-    except Exception:
-        pass
-
-    return None
diff --git a/src/sentry_sdk_alpha/integrations/spark/spark_worker.py b/src/sentry_sdk_alpha/integrations/spark/spark_worker.py
deleted file mode 100644
index 6c68f3435e785c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/spark/spark_worker.py
+++ /dev/null
@@ -1,110 +0,0 @@
-import sys
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    event_hint_with_exc_info,
-    exc_info_from_error,
-    single_exception_from_error_tuple,
-    walk_exception_chain,
-)
-
-if TYPE_CHECKING:
-    from typing import Any, Optional
-
-    from sentry_sdk_alpha._types import Event, ExcInfo, Hint
-
-
-class SparkWorkerIntegration(Integration):
-    identifier = "spark_worker"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        import pyspark.daemon as original_daemon
-
-        original_daemon.worker_main = _sentry_worker_main
-
-
-def _capture_exception(exc_info):
-    # type: (ExcInfo) -> None
-    client = sentry_sdk_alpha.get_client()
-
-    mechanism = {"type": "spark", "handled": False}
-
-    exc_info = exc_info_from_error(exc_info)
-
-    exc_type, exc_value, tb = exc_info
-    rv = []
-
-    # On Exception worker will call sys.exit(-1), so we can ignore SystemExit and similar errors
-    for exc_type, exc_value, tb in walk_exception_chain(exc_info):
-        if exc_type not in (SystemExit, EOFError, ConnectionResetError):
-            rv.append(
-                single_exception_from_error_tuple(
-                    exc_type, exc_value, tb, client.options, mechanism
-                )
-            )
-
-    if rv:
-        rv.reverse()
-        hint = event_hint_with_exc_info(exc_info)
-        event = {"level": "error", "exception": {"values": rv}}  # type: Event
-
-        _tag_task_context()
-
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _tag_task_context():
-    # type: () -> None
-    from pyspark.taskcontext import TaskContext
-
-    scope = sentry_sdk_alpha.get_isolation_scope()
-
-    @scope.add_event_processor
-    def process_event(event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-        with capture_internal_exceptions():
-            integration = sentry_sdk_alpha.get_client().get_integration(SparkWorkerIntegration)
-            task_context = TaskContext.get()
-
-            if integration is None or task_context is None:
-                return event
-
-            event.setdefault("tags", {}).setdefault("stageId", str(task_context.stageId()))
-            event["tags"].setdefault("partitionId", str(task_context.partitionId()))
-            event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber()))
-            event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId()))
-
-            if task_context._localProperties:
-                if "sentry_app_name" in task_context._localProperties:
-                    event["tags"].setdefault(
-                        "app_name", task_context._localProperties["sentry_app_name"]
-                    )
-                    event["tags"].setdefault(
-                        "application_id",
-                        task_context._localProperties["sentry_application_id"],
-                    )
-
-                if "callSite.short" in task_context._localProperties:
-                    event.setdefault("extra", {}).setdefault(
-                        "callSite", task_context._localProperties["callSite.short"]
-                    )
-
-        return event
-
-
-def _sentry_worker_main(*args, **kwargs):
-    # type: (*Optional[Any], **Optional[Any]) -> None
-    import pyspark.worker as original_worker
-
-    try:
-        original_worker.main(*args, **kwargs)
-    except SystemExit:
-        if sentry_sdk_alpha.get_client().get_integration(SparkWorkerIntegration) is not None:
-            exc_info = sys.exc_info()
-            with capture_internal_exceptions():
-                _capture_exception(exc_info)
diff --git a/src/sentry_sdk_alpha/integrations/sqlalchemy.py b/src/sentry_sdk_alpha/integrations/sqlalchemy.py
deleted file mode 100644
index cec206a8387e41..00000000000000
--- a/src/sentry_sdk_alpha/integrations/sqlalchemy.py
+++ /dev/null
@@ -1,142 +0,0 @@
-from sentry_sdk_alpha.consts import SPANDATA, SPANSTATUS
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.tracing_utils import add_query_source, record_sql_queries
-from sentry_sdk_alpha.utils import (
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    parse_version,
-)
-
-try:
-    from sqlalchemy import __version__ as SQLALCHEMY_VERSION  # type: ignore
-    from sqlalchemy.engine import Engine  # type: ignore
-    from sqlalchemy.event import listen  # type: ignore
-except ImportError:
-    raise DidNotEnable("SQLAlchemy not installed.")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, ContextManager, Optional
-
-    from sentry_sdk_alpha.tracing import Span
-
-
-class SqlalchemyIntegration(Integration):
-    identifier = "sqlalchemy"
-    origin = f"auto.db.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(SQLALCHEMY_VERSION)
-        _check_minimum_version(SqlalchemyIntegration, version)
-
-        listen(Engine, "before_cursor_execute", _before_cursor_execute)
-        listen(Engine, "after_cursor_execute", _after_cursor_execute)
-        listen(Engine, "handle_error", _handle_error)
-
-
-@ensure_integration_enabled(SqlalchemyIntegration)
-def _before_cursor_execute(conn, cursor, statement, parameters, context, executemany, *args):
-    # type: (Any, Any, Any, Any, Any, bool, *Any) -> None
-    ctx_mgr = record_sql_queries(
-        cursor,
-        statement,
-        parameters,
-        paramstyle=context and context.dialect and context.dialect.paramstyle or None,
-        executemany=executemany,
-        span_origin=SqlalchemyIntegration.origin,
-    )
-    context._sentry_sql_span_manager = ctx_mgr
-
-    span = ctx_mgr.__enter__()
-
-    if span is not None:
-        _set_db_data(span, conn)
-        context._sentry_sql_span = span
-
-
-@ensure_integration_enabled(SqlalchemyIntegration)
-def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
-    # type: (Any, Any, Any, Any, Any, *Any) -> None
-    ctx_mgr = getattr(
-        context, "_sentry_sql_span_manager", None
-    )  # type: Optional[ContextManager[Any]]
-
-    span = getattr(context, "_sentry_sql_span", None)  # type: Optional[Span]
-    if span is not None:
-        with capture_internal_exceptions():
-            add_query_source(span)
-
-    if ctx_mgr is not None:
-        context._sentry_sql_span_manager = None
-        ctx_mgr.__exit__(None, None, None)
-
-
-def _handle_error(context, *args):
-    # type: (Any, *Any) -> None
-    execution_context = context.execution_context
-    if execution_context is None:
-        return
-
-    span = getattr(execution_context, "_sentry_sql_span", None)  # type: Optional[Span]
-
-    if span is not None:
-        span.set_status(SPANSTATUS.INTERNAL_ERROR)
-
-    # _after_cursor_execute does not get called for crashing SQL stmts. Judging
-    # from SQLAlchemy codebase it does seem like any error coming into this
-    # handler is going to be fatal.
-    ctx_mgr = getattr(
-        execution_context, "_sentry_sql_span_manager", None
-    )  # type: Optional[ContextManager[Any]]
-
-    if ctx_mgr is not None:
-        execution_context._sentry_sql_span_manager = None
-        ctx_mgr.__exit__(None, None, None)
-
-
-# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
-def _get_db_system(name):
-    # type: (str) -> Optional[str]
-    name = str(name)
-
-    if "sqlite" in name:
-        return "sqlite"
-
-    if "postgres" in name:
-        return "postgresql"
-
-    if "mariadb" in name:
-        return "mariadb"
-
-    if "mysql" in name:
-        return "mysql"
-
-    if "oracle" in name:
-        return "oracle"
-
-    return None
-
-
-def _set_db_data(span, conn):
-    # type: (Span, Any) -> None
-    db_system = _get_db_system(conn.engine.name)
-    if db_system is not None:
-        span.set_attribute(SPANDATA.DB_SYSTEM, db_system)
-
-    if conn.engine.url is None:
-        return
-
-    db_name = conn.engine.url.database
-    if db_name is not None:
-        span.set_attribute(SPANDATA.DB_NAME, db_name)
-
-    server_address = conn.engine.url.host
-    if server_address is not None:
-        span.set_attribute(SPANDATA.SERVER_ADDRESS, server_address)
-
-    server_port = conn.engine.url.port
-    if server_port is not None:
-        span.set_attribute(SPANDATA.SERVER_PORT, server_port)
diff --git a/src/sentry_sdk_alpha/integrations/starlette.py b/src/sentry_sdk_alpha/integrations/starlette.py
deleted file mode 100644
index e4ac77d80b1f24..00000000000000
--- a/src/sentry_sdk_alpha/integrations/starlette.py
+++ /dev/null
@@ -1,705 +0,0 @@
-import asyncio
-import functools
-from collections.abc import Set
-from copy import deepcopy
-from json import JSONDecodeError
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
-from sentry_sdk_alpha.integrations import (
-    _DEFAULT_FAILED_REQUEST_STATUS_CODES,
-    DidNotEnable,
-    Integration,
-)
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    _is_json_content_type,
-    request_body_within_bounds,
-)
-from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    logger,
-    parse_version,
-    transaction_from_function,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Awaitable, Callable
-    from typing import Any, Dict, Optional, Tuple
-
-    from sentry_sdk_alpha._types import Event
-
-try:
-    import starlette  # type: ignore
-    from starlette import __version__ as STARLETTE_VERSION
-    from starlette.applications import Starlette  # type: ignore
-    from starlette.datastructures import UploadFile  # type: ignore
-    from starlette.middleware import Middleware  # type: ignore
-    from starlette.middleware.authentication import AuthenticationMiddleware  # type: ignore
-    from starlette.requests import Request  # type: ignore
-    from starlette.routing import Match  # type: ignore
-    from starlette.types import ASGIApp, Receive
-    from starlette.types import Scope as StarletteScope  # type: ignore
-    from starlette.types import Send
-except ImportError:
-    raise DidNotEnable("Starlette is not installed")
-
-try:
-    # Starlette 0.20
-    from starlette.middleware.exceptions import ExceptionMiddleware  # type: ignore
-except ImportError:
-    # Startlette 0.19.1
-    from starlette.exceptions import ExceptionMiddleware  # type: ignore
-
-try:
-    # Optional dependency of Starlette to parse form data.
-    try:
-        # python-multipart 0.0.13 and later
-        import python_multipart as multipart  # type: ignore
-    except ImportError:
-        # python-multipart 0.0.12 and earlier
-        import multipart  # type: ignore
-except ImportError:
-    multipart = None
-
-
-_DEFAULT_TRANSACTION_NAME = "generic Starlette request"
-
-TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-
-
-class StarletteIntegration(Integration):
-    identifier = "starlette"
-    origin = f"auto.http.{identifier}"
-
-    transaction_style = ""
-
-    def __init__(
-        self,
-        transaction_style="url",  # type: str
-        failed_request_status_codes=_DEFAULT_FAILED_REQUEST_STATUS_CODES,  # type: Set[int]
-        middleware_spans=True,  # type: bool
-        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: tuple[str, ...]
-    ):
-        # type: (...) -> None
-        if transaction_style not in TRANSACTION_STYLE_VALUES:
-            raise ValueError(
-                "Invalid value for transaction_style: %s (must be in %s)"
-                % (transaction_style, TRANSACTION_STYLE_VALUES)
-            )
-        self.transaction_style = transaction_style
-        self.middleware_spans = middleware_spans
-        self.http_methods_to_capture = tuple(map(str.upper, http_methods_to_capture))
-
-        self.failed_request_status_codes = failed_request_status_codes
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(STARLETTE_VERSION)
-
-        if version is None:
-            raise DidNotEnable(f"Unparsable Starlette version: {STARLETTE_VERSION}")
-
-        patch_middlewares()
-        patch_asgi_app()
-        patch_request_response()
-
-        if version >= (0, 24):
-            patch_templates()
-
-
-def _enable_span_for_middleware(middleware_class):
-    # type: (Any) -> type
-    old_call = middleware_class.__call__
-
-    async def _create_span_call(app, scope, receive, send, **kwargs):
-        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
-        if integration is None or not integration.middleware_spans:
-            return await old_call(app, scope, receive, send, **kwargs)
-
-        middleware_name = app.__class__.__name__
-
-        # Update transaction name with middleware name
-        name, source = _get_transaction_from_middleware(app, scope, integration)
-        if name is not None:
-            sentry_sdk_alpha.get_current_scope().set_transaction_name(
-                name,
-                source=source,
-            )
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.MIDDLEWARE_STARLETTE,
-            name=middleware_name,
-            origin=StarletteIntegration.origin,
-            only_if_parent=True,
-        ) as middleware_span:
-            middleware_span.set_tag("starlette.middleware_name", middleware_name)
-
-            # Creating spans for the "receive" callback
-            async def _sentry_receive(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
-                    name=getattr(receive, "__qualname__", str(receive)),
-                    origin=StarletteIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("starlette.middleware_name", middleware_name)
-                    return await receive(*args, **kwargs)
-
-            receive_name = getattr(receive, "__name__", str(receive))
-            receive_patched = receive_name == "_sentry_receive"
-            new_receive = _sentry_receive if not receive_patched else receive
-
-            # Creating spans for the "send" callback
-            async def _sentry_send(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_STARLETTE_SEND,
-                    name=getattr(send, "__qualname__", str(send)),
-                    origin=StarletteIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("starlette.middleware_name", middleware_name)
-                    return await send(*args, **kwargs)
-
-            send_name = getattr(send, "__name__", str(send))
-            send_patched = send_name == "_sentry_send"
-            new_send = _sentry_send if not send_patched else send
-
-            return await old_call(app, scope, new_receive, new_send, **kwargs)
-
-    not_yet_patched = old_call.__name__ not in [
-        "_create_span_call",
-        "_sentry_authenticationmiddleware_call",
-        "_sentry_exceptionmiddleware_call",
-    ]
-
-    if not_yet_patched:
-        middleware_class.__call__ = _create_span_call
-
-    return middleware_class
-
-
-@ensure_integration_enabled(StarletteIntegration)
-def _capture_exception(exception, handled=False):
-    # type: (BaseException, **Any) -> None
-    event, hint = event_from_exception(
-        exception,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": StarletteIntegration.identifier, "handled": handled},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def patch_exception_middleware(middleware_class):
-    # type: (Any) -> None
-    """
-    Capture all exceptions in Starlette app and
-    also extract user information.
-    """
-    old_middleware_init = middleware_class.__init__
-
-    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
-
-    if not_yet_patched:
-
-        def _sentry_middleware_init(self, *args, **kwargs):
-            # type: (Any, Any, Any) -> None
-            old_middleware_init(self, *args, **kwargs)
-
-            # Patch existing exception handlers
-            old_handlers = self._exception_handlers.copy()
-
-            async def _sentry_patched_exception_handler(self, *args, **kwargs):
-                # type: (Any, Any, Any) -> None
-                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
-
-                exp = args[0]
-
-                if integration is not None:
-                    is_http_server_error = (
-                        hasattr(exp, "status_code")
-                        and isinstance(exp.status_code, int)
-                        and exp.status_code in integration.failed_request_status_codes
-                    )
-                    if is_http_server_error:
-                        _capture_exception(exp, handled=True)
-
-                # Find a matching handler
-                old_handler = None
-                for cls in type(exp).__mro__:
-                    if cls in old_handlers:
-                        old_handler = old_handlers[cls]
-                        break
-
-                if old_handler is None:
-                    return
-
-                if _is_async_callable(old_handler):
-                    return await old_handler(self, *args, **kwargs)
-                else:
-                    return old_handler(self, *args, **kwargs)
-
-            for key in self._exception_handlers.keys():
-                self._exception_handlers[key] = _sentry_patched_exception_handler
-
-        middleware_class.__init__ = _sentry_middleware_init
-
-        old_call = middleware_class.__call__
-
-        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
-            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-            # Also add the user (that was eventually set by be Authentication middle
-            # that was called before this middleware). This is done because the authentication
-            # middleware sets the user in the scope and then (in the same function)
-            # calls this exception middelware. In case there is no exception (or no handler
-            # for the type of exception occuring) then the exception bubbles up and setting the
-            # user information into the sentry scope is done in auth middleware and the
-            # ASGI middleware will then send everything to Sentry and this is fine.
-            # But if there is an exception happening that the exception middleware here
-            # has a handler for, it will send the exception directly to Sentry, so we need
-            # the user information right now.
-            # This is why we do it here.
-            _add_user_to_sentry_scope(scope)
-            await old_call(self, scope, receive, send)
-
-        middleware_class.__call__ = _sentry_exceptionmiddleware_call
-
-
-@ensure_integration_enabled(StarletteIntegration)
-def _add_user_to_sentry_scope(scope):
-    # type: (Dict[str, Any]) -> None
-    """
-    Extracts user information from the ASGI scope and
-    adds it to Sentry's scope.
-    """
-    if "user" not in scope:
-        return
-
-    if not should_send_default_pii():
-        return
-
-    user_info = {}  # type: Dict[str, Any]
-    starlette_user = scope["user"]
-
-    username = getattr(starlette_user, "username", None)
-    if username:
-        user_info.setdefault("username", starlette_user.username)
-
-    user_id = getattr(starlette_user, "id", None)
-    if user_id:
-        user_info.setdefault("id", starlette_user.id)
-
-    email = getattr(starlette_user, "email", None)
-    if email:
-        user_info.setdefault("email", starlette_user.email)
-
-    sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-    sentry_scope.set_user(user_info)
-
-
-def patch_authentication_middleware(middleware_class):
-    # type: (Any) -> None
-    """
-    Add user information to Sentry scope.
-    """
-    old_call = middleware_class.__call__
-
-    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
-
-    if not_yet_patched:
-
-        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
-            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
-            await old_call(self, scope, receive, send)
-            _add_user_to_sentry_scope(scope)
-
-        middleware_class.__call__ = _sentry_authenticationmiddleware_call
-
-
-def patch_middlewares():
-    # type: () -> None
-    """
-    Patches Starlettes `Middleware` class to record
-    spans for every middleware invoked.
-    """
-    old_middleware_init = Middleware.__init__
-
-    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
-
-    if not_yet_patched:
-
-        def _sentry_middleware_init(self, cls, *args, **kwargs):
-            # type: (Any, Any, Any, Any) -> None
-            if cls == SentryAsgiMiddleware:
-                return old_middleware_init(self, cls, *args, **kwargs)
-
-            span_enabled_cls = _enable_span_for_middleware(cls)
-            old_middleware_init(self, span_enabled_cls, *args, **kwargs)
-
-            if cls == AuthenticationMiddleware:
-                patch_authentication_middleware(cls)
-
-            if cls == ExceptionMiddleware:
-                patch_exception_middleware(cls)
-
-        Middleware.__init__ = _sentry_middleware_init
-
-
-def patch_asgi_app():
-    # type: () -> None
-    """
-    Instrument Starlette ASGI app using the SentryAsgiMiddleware.
-    """
-    old_app = Starlette.__call__
-
-    async def _sentry_patched_asgi_app(self, scope, receive, send):
-        # type: (Starlette, StarletteScope, Receive, Send) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
-        if integration is None:
-            return await old_app(self, scope, receive, send)
-
-        middleware = SentryAsgiMiddleware(
-            lambda *a, **kw: old_app(self, *a, **kw),
-            mechanism_type=StarletteIntegration.identifier,
-            transaction_style=integration.transaction_style,
-            span_origin=StarletteIntegration.origin,
-            http_methods_to_capture=(
-                integration.http_methods_to_capture
-                if integration
-                else DEFAULT_HTTP_METHODS_TO_CAPTURE
-            ),
-        )
-
-        middleware.__call__ = middleware._run_asgi3
-        return await middleware(scope, receive, send)
-
-    Starlette.__call__ = _sentry_patched_asgi_app
-
-
-# This was vendored in from Starlette to support Starlette 0.19.1 because
-# this function was only introduced in 0.20.x
-def _is_async_callable(obj):
-    # type: (Any) -> bool
-    while isinstance(obj, functools.partial):
-        obj = obj.func
-
-    return asyncio.iscoroutinefunction(obj) or (
-        callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
-    )
-
-
-def patch_request_response():
-    # type: () -> None
-    old_request_response = starlette.routing.request_response
-
-    def _sentry_request_response(func):
-        # type: (Callable[[Any], Any]) -> ASGIApp
-        old_func = func
-
-        is_coroutine = _is_async_callable(old_func)
-        if is_coroutine:
-
-            async def _sentry_async_func(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
-                if integration is None:
-                    return await old_func(*args, **kwargs)
-
-                request = args[0]
-
-                _set_transaction_name_and_source(
-                    sentry_sdk_alpha.get_current_scope(),
-                    integration.transaction_style,
-                    request,
-                )
-
-                sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-                extractor = StarletteRequestExtractor(request)
-                info = await extractor.extract_request_info()
-
-                def _make_request_event_processor(req, integration):
-                    # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
-                    def event_processor(event, hint):
-                        # type: (Event, Dict[str, Any]) -> Event
-
-                        # Add info from request to event
-                        request_info = event.get("request", {})
-                        if info:
-                            if "cookies" in info:
-                                request_info["cookies"] = info["cookies"]
-                            if "data" in info:
-                                request_info["data"] = info["data"]
-                        event["request"] = deepcopy(request_info)
-
-                        return event
-
-                    return event_processor
-
-                sentry_scope._name = StarletteIntegration.identifier
-                sentry_scope.add_event_processor(
-                    _make_request_event_processor(request, integration)
-                )
-
-                return await old_func(*args, **kwargs)
-
-            func = _sentry_async_func
-
-        else:
-
-            @functools.wraps(old_func)
-            def _sentry_sync_func(*args, **kwargs):
-                # type: (*Any, **Any) -> Any
-                integration = sentry_sdk_alpha.get_client().get_integration(StarletteIntegration)
-                if integration is None:
-                    return old_func(*args, **kwargs)
-
-                current_scope = sentry_sdk_alpha.get_current_scope()
-                if current_scope.root_span is not None:
-                    current_scope.root_span.update_active_thread()
-
-                sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-                if sentry_scope.profile is not None:
-                    sentry_scope.profile.update_active_thread_id()
-
-                request = args[0]
-
-                _set_transaction_name_and_source(
-                    sentry_scope, integration.transaction_style, request
-                )
-
-                extractor = StarletteRequestExtractor(request)
-                cookies = extractor.extract_cookies_from_request()
-
-                def _make_request_event_processor(req, integration):
-                    # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
-                    def event_processor(event, hint):
-                        # type: (Event, dict[str, Any]) -> Event
-
-                        # Extract information from request
-                        request_info = event.get("request", {})
-                        if cookies:
-                            request_info["cookies"] = cookies
-
-                        event["request"] = deepcopy(request_info)
-
-                        return event
-
-                    return event_processor
-
-                sentry_scope._name = StarletteIntegration.identifier
-                sentry_scope.add_event_processor(
-                    _make_request_event_processor(request, integration)
-                )
-
-                return old_func(*args, **kwargs)
-
-            func = _sentry_sync_func
-
-        return old_request_response(func)
-
-    starlette.routing.request_response = _sentry_request_response
-
-
-def patch_templates():
-    # type: () -> None
-
-    # If markupsafe is not installed, then Jinja2 is not installed
-    # (markupsafe is a dependency of Jinja2)
-    # In this case we do not need to patch the Jinja2Templates class
-    try:
-        from markupsafe import Markup
-    except ImportError:
-        return  # Nothing to do
-
-    from starlette.templating import Jinja2Templates  # type: ignore
-
-    old_jinja2templates_init = Jinja2Templates.__init__
-
-    not_yet_patched = "_sentry_jinja2templates_init" not in str(old_jinja2templates_init)
-
-    if not_yet_patched:
-
-        def _sentry_jinja2templates_init(self, *args, **kwargs):
-            # type: (Jinja2Templates, *Any, **Any) -> None
-            def add_sentry_trace_meta(request):
-                # type: (Request) -> Dict[str, Any]
-                trace_meta = Markup(sentry_sdk_alpha.get_current_scope().trace_propagation_meta())
-                return {
-                    "sentry_trace_meta": trace_meta,
-                }
-
-            kwargs.setdefault("context_processors", [])
-
-            if add_sentry_trace_meta not in kwargs["context_processors"]:
-                kwargs["context_processors"].append(add_sentry_trace_meta)
-
-            return old_jinja2templates_init(self, *args, **kwargs)
-
-        Jinja2Templates.__init__ = _sentry_jinja2templates_init
-
-
-class StarletteRequestExtractor:
-    """
-    Extracts useful information from the Starlette request
-    (like form data or cookies) and adds it to the Sentry event.
-    """
-
-    request = None  # type: Request
-
-    def __init__(self, request):
-        # type: (StarletteRequestExtractor, Request) -> None
-        self.request = request
-
-    def extract_cookies_from_request(self):
-        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
-        cookies = None  # type: Optional[Dict[str, Any]]
-        if should_send_default_pii():
-            cookies = self.cookies()
-
-        return cookies
-
-    async def extract_request_info(self):
-        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
-        client = sentry_sdk_alpha.get_client()
-
-        request_info = {}  # type: Dict[str, Any]
-
-        with capture_internal_exceptions():
-            # Add cookies
-            if should_send_default_pii():
-                request_info["cookies"] = self.cookies()
-
-            # If there is no body, just return the cookies
-            content_length = await self.content_length()
-            if not content_length:
-                return request_info
-
-            # Add annotation if body is too big
-            if content_length and not request_body_within_bounds(client, content_length):
-                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
-                return request_info
-
-            # Add JSON body, if it is a JSON request
-            json = await self.json()
-            if json:
-                request_info["data"] = json
-                return request_info
-
-            # Add form as key/value pairs, if request has form data
-            form = await self.form()
-            if form:
-                form_data = {}
-                for key, val in form.items():
-                    is_file = isinstance(val, UploadFile)
-                    form_data[key] = (
-                        val if not is_file else AnnotatedValue.removed_because_raw_data()
-                    )
-
-                request_info["data"] = form_data
-                return request_info
-
-            # Raw data, do not add body just an annotation
-            request_info["data"] = AnnotatedValue.removed_because_raw_data()
-            return request_info
-
-    async def content_length(self):
-        # type: (StarletteRequestExtractor) -> Optional[int]
-        if "content-length" in self.request.headers:
-            return int(self.request.headers["content-length"])
-
-        return None
-
-    def cookies(self):
-        # type: (StarletteRequestExtractor) -> Dict[str, Any]
-        return self.request.cookies
-
-    async def form(self):
-        # type: (StarletteRequestExtractor) -> Any
-        if multipart is None:
-            return None
-
-        # Parse the body first to get it cached, as Starlette does not cache form() as it
-        # does with body() and json() https://github.com/encode/starlette/discussions/1933
-        # Calling `.form()` without calling `.body()` first will
-        # potentially break the users project.
-        await self.request.body()
-
-        return await self.request.form()
-
-    def is_json(self):
-        # type: (StarletteRequestExtractor) -> bool
-        return _is_json_content_type(self.request.headers.get("content-type"))
-
-    async def json(self):
-        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
-        if not self.is_json():
-            return None
-        try:
-            return await self.request.json()
-        except JSONDecodeError:
-            return None
-
-
-def _transaction_name_from_router(scope):
-    # type: (StarletteScope) -> Optional[str]
-    router = scope.get("router")
-    if not router:
-        return None
-
-    for route in router.routes:
-        match = route.matches(scope)
-        if match[0] == Match.FULL:
-            try:
-                return route.path
-            except AttributeError:
-                # routes added via app.host() won't have a path attribute
-                return scope.get("path")
-
-    return None
-
-
-def _set_transaction_name_and_source(scope, transaction_style, request):
-    # type: (sentry_sdk.Scope, str, Any) -> None
-    name = None
-    source = SOURCE_FOR_STYLE[transaction_style]
-
-    if transaction_style == "endpoint":
-        endpoint = request.scope.get("endpoint")
-        if endpoint:
-            name = transaction_from_function(endpoint) or None
-
-    elif transaction_style == "url":
-        name = _transaction_name_from_router(request.scope)
-
-    if name is None:
-        name = _DEFAULT_TRANSACTION_NAME
-        source = TransactionSource.ROUTE
-
-    scope.set_transaction_name(name, source=source)
-    logger.debug("[Starlette] Set transaction name and source on scope: %s / %s", name, source)
-
-
-def _get_transaction_from_middleware(app, asgi_scope, integration):
-    # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]]
-    name = None
-    source = None
-
-    if integration.transaction_style == "endpoint":
-        name = transaction_from_function(app.__class__)
-        source = TransactionSource.COMPONENT
-    elif integration.transaction_style == "url":
-        name = _transaction_name_from_router(asgi_scope)
-        source = TransactionSource.ROUTE
-
-    return name, source
diff --git a/src/sentry_sdk_alpha/integrations/starlite.py b/src/sentry_sdk_alpha/integrations/starlite.py
deleted file mode 100644
index 11dc09e4cde925..00000000000000
--- a/src/sentry_sdk_alpha/integrations/starlite.py
+++ /dev/null
@@ -1,289 +0,0 @@
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SOURCE_FOR_STYLE, TransactionSource
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.utils import (
-    ensure_integration_enabled,
-    event_from_exception,
-    transaction_from_function,
-)
-
-try:
-    from pydantic import BaseModel  # type: ignore
-    from starlite import Request, Starlite, State  # type: ignore
-    from starlite.handlers.base import BaseRouteHandler  # type: ignore
-    from starlite.middleware import DefineMiddleware  # type: ignore
-    from starlite.plugins.base import get_plugin_for_value  # type: ignore
-    from starlite.routes.http import HTTPRoute  # type: ignore
-    from starlite.utils import ConnectionDataExtractor, Ref, is_async_callable  # type: ignore
-except ImportError:
-    raise DidNotEnable("Starlite is not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from typing import Any, Optional, Union
-
-    from starlite import MiddlewareProtocol
-    from starlite.types import (
-        ASGIApp,
-        Hint,
-        HTTPReceiveMessage,
-        HTTPScope,
-        Message,
-        Middleware,
-        Receive,
-    )
-    from starlite.types import Scope as StarliteScope  # type: ignore
-    from starlite.types import Send, WebSocketReceiveMessage
-
-    from sentry_sdk_alpha._types import Event
-
-
-_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
-
-
-class StarliteIntegration(Integration):
-    identifier = "starlite"
-    origin = f"auto.http.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        patch_app_init()
-        patch_middlewares()
-        patch_http_route_handle()
-
-
-class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
-    def __init__(self, app, span_origin=StarliteIntegration.origin):
-        # type: (ASGIApp, str) -> None
-        super().__init__(
-            app=app,
-            unsafe_context_data=False,
-            transaction_style="endpoint",
-            mechanism_type="asgi",
-            span_origin=span_origin,
-        )
-
-
-def patch_app_init():
-    # type: () -> None
-    """
-    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
-    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
-    See:
-    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
-    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
-    """
-    old__init__ = Starlite.__init__
-
-    @ensure_integration_enabled(StarliteIntegration, old__init__)
-    def injection_wrapper(self, *args, **kwargs):
-        # type: (Starlite, *Any, **Any) -> None
-        after_exception = kwargs.pop("after_exception", [])
-        kwargs.update(
-            after_exception=[
-                exception_handler,
-                *(after_exception if isinstance(after_exception, list) else [after_exception]),
-            ]
-        )
-
-        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3  # type: ignore
-        middleware = kwargs.get("middleware") or []
-        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
-        old__init__(self, *args, **kwargs)
-
-    Starlite.__init__ = injection_wrapper
-
-
-def patch_middlewares():
-    # type: () -> None
-    old_resolve_middleware_stack = BaseRouteHandler.resolve_middleware
-
-    @ensure_integration_enabled(StarliteIntegration, old_resolve_middleware_stack)
-    def resolve_middleware_wrapper(self):
-        # type: (BaseRouteHandler) -> list[Middleware]
-        return [
-            enable_span_for_middleware(middleware)
-            for middleware in old_resolve_middleware_stack(self)
-        ]
-
-    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
-
-
-def enable_span_for_middleware(middleware):
-    # type: (Middleware) -> Middleware
-    if (
-        not hasattr(middleware, "__call__")  # noqa: B004
-        or middleware is SentryStarliteASGIMiddleware
-    ):
-        return middleware
-
-    if isinstance(middleware, DefineMiddleware):
-        old_call = middleware.middleware.__call__  # type: ASGIApp
-    else:
-        old_call = middleware.__call__
-
-    async def _create_span_call(self, scope, receive, send):
-        # type: (MiddlewareProtocol, StarliteScope, Receive, Send) -> None
-        if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None:
-            return await old_call(self, scope, receive, send)
-
-        middleware_name = self.__class__.__name__
-        with sentry_sdk_alpha.start_span(
-            op=OP.MIDDLEWARE_STARLITE,
-            name=middleware_name,
-            origin=StarliteIntegration.origin,
-            only_if_parent=True,
-        ) as middleware_span:
-            middleware_span.set_tag("starlite.middleware_name", middleware_name)
-
-            # Creating spans for the "receive" callback
-            async def _sentry_receive(*args, **kwargs):
-                # type: (*Any, **Any) -> Union[HTTPReceiveMessage, WebSocketReceiveMessage]
-                if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None:
-                    return await receive(*args, **kwargs)
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_STARLITE_RECEIVE,
-                    name=getattr(receive, "__qualname__", str(receive)),
-                    origin=StarliteIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("starlite.middleware_name", middleware_name)
-                    return await receive(*args, **kwargs)
-
-            receive_name = getattr(receive, "__name__", str(receive))
-            receive_patched = receive_name == "_sentry_receive"
-            new_receive = _sentry_receive if not receive_patched else receive
-
-            # Creating spans for the "send" callback
-            async def _sentry_send(message):
-                # type: (Message) -> None
-                if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None:
-                    return await send(message)
-                with sentry_sdk_alpha.start_span(
-                    op=OP.MIDDLEWARE_STARLITE_SEND,
-                    name=getattr(send, "__qualname__", str(send)),
-                    origin=StarliteIntegration.origin,
-                    only_if_parent=True,
-                ) as span:
-                    span.set_tag("starlite.middleware_name", middleware_name)
-                    return await send(message)
-
-            send_name = getattr(send, "__name__", str(send))
-            send_patched = send_name == "_sentry_send"
-            new_send = _sentry_send if not send_patched else send
-
-            return await old_call(self, scope, new_receive, new_send)
-
-    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
-
-    if not_yet_patched:
-        if isinstance(middleware, DefineMiddleware):
-            middleware.middleware.__call__ = _create_span_call
-        else:
-            middleware.__call__ = _create_span_call
-
-    return middleware
-
-
-def patch_http_route_handle():
-    # type: () -> None
-    old_handle = HTTPRoute.handle
-
-    async def handle_wrapper(self, scope, receive, send):
-        # type: (HTTPRoute, HTTPScope, Receive, Send) -> None
-        if sentry_sdk_alpha.get_client().get_integration(StarliteIntegration) is None:
-            return await old_handle(self, scope, receive, send)
-
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        request = scope["app"].request_class(
-            scope=scope, receive=receive, send=send
-        )  # type: Request[Any, Any]
-        extracted_request_data = ConnectionDataExtractor(parse_body=True, parse_query=True)(request)
-        body = extracted_request_data.pop("body")
-
-        request_data = await body
-
-        def event_processor(event, _):
-            # type: (Event, Hint) -> Event
-            route_handler = scope.get("route_handler")
-
-            request_info = event.get("request", {})
-            request_info["content_length"] = len(scope.get("_body", b""))
-            if should_send_default_pii():
-                request_info["cookies"] = extracted_request_data["cookies"]
-            if request_data is not None:
-                request_info["data"] = request_data
-
-            func = None
-            if route_handler.name is not None:
-                tx_name = route_handler.name
-            elif isinstance(route_handler.fn, Ref):
-                func = route_handler.fn.value
-            else:
-                func = route_handler.fn
-            if func is not None:
-                tx_name = transaction_from_function(func)
-
-            tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
-
-            if not tx_name:
-                tx_name = _DEFAULT_TRANSACTION_NAME
-                tx_info = {"source": TransactionSource.ROUTE}
-
-            event.update(
-                {
-                    "request": request_info,
-                    "transaction": tx_name,
-                    "transaction_info": tx_info,
-                }
-            )
-            return event
-
-        sentry_scope._name = StarliteIntegration.identifier
-        sentry_scope.add_event_processor(event_processor)
-
-        return await old_handle(self, scope, receive, send)
-
-    HTTPRoute.handle = handle_wrapper
-
-
-def retrieve_user_from_scope(scope):
-    # type: (StarliteScope) -> Optional[dict[str, Any]]
-    scope_user = scope.get("user")
-    if not scope_user:
-        return None
-    if isinstance(scope_user, dict):
-        return scope_user
-    if isinstance(scope_user, BaseModel):
-        return scope_user.dict()
-    if hasattr(scope_user, "asdict"):  # dataclasses
-        return scope_user.asdict()
-
-    plugin = get_plugin_for_value(scope_user)
-    if plugin and not is_async_callable(plugin.to_dict):
-        return plugin.to_dict(scope_user)
-
-    return None
-
-
-@ensure_integration_enabled(StarliteIntegration)
-def exception_handler(exc, scope, _):
-    # type: (Exception, StarliteScope, State) -> None
-    user_info = None  # type: Optional[dict[str, Any]]
-    if should_send_default_pii():
-        user_info = retrieve_user_from_scope(scope)
-    if user_info and isinstance(user_info, dict):
-        sentry_scope = sentry_sdk_alpha.get_isolation_scope()
-        sentry_scope.set_user(user_info)
-
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": StarliteIntegration.identifier, "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
diff --git a/src/sentry_sdk_alpha/integrations/statsig.py b/src/sentry_sdk_alpha/integrations/statsig.py
deleted file mode 100644
index c89a69512acda4..00000000000000
--- a/src/sentry_sdk_alpha/integrations/statsig.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from functools import wraps
-from typing import TYPE_CHECKING, Any
-
-from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.utils import parse_version
-
-try:
-    from statsig import statsig as statsig_module
-    from statsig.version import __version__ as STATSIG_VERSION
-except ImportError:
-    raise DidNotEnable("statsig is not installed")
-
-if TYPE_CHECKING:
-    from statsig.statsig_user import StatsigUser
-
-
-class StatsigIntegration(Integration):
-    identifier = "statsig"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = parse_version(STATSIG_VERSION)
-        _check_minimum_version(StatsigIntegration, version, "statsig")
-
-        # Wrap and patch evaluation method(s) in the statsig module
-        old_check_gate = statsig_module.check_gate
-
-        @wraps(old_check_gate)
-        def sentry_check_gate(user, gate, *args, **kwargs):
-            # type: (StatsigUser, str, *Any, **Any) -> Any
-            enabled = old_check_gate(user, gate, *args, **kwargs)
-            add_feature_flag(gate, enabled)
-            return enabled
-
-        statsig_module.check_gate = sentry_check_gate
diff --git a/src/sentry_sdk_alpha/integrations/stdlib.py b/src/sentry_sdk_alpha/integrations/stdlib.py
deleted file mode 100644
index 7e8bb9e825b8c1..00000000000000
--- a/src/sentry_sdk_alpha/integrations/stdlib.py
+++ /dev/null
@@ -1,307 +0,0 @@
-import os
-import platform
-import subprocess
-import sys
-from http.client import HTTPConnection
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP, SPANDATA
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import add_global_event_processor
-from sentry_sdk_alpha.tracing_utils import EnvironHeaders, should_propagate_trace
-from sentry_sdk_alpha.utils import (
-    SENSITIVE_DATA_SUBSTITUTE,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    get_current_thread_meta,
-    http_client_status_to_breadcrumb_level,
-    is_sentry_url,
-    logger,
-    parse_url,
-    safe_repr,
-    set_thread_info_from_span,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Dict, List, Optional
-
-    from sentry_sdk_alpha._types import Event, Hint
-
-
-_RUNTIME_CONTEXT = {
-    "name": platform.python_implementation(),
-    "version": "%s.%s.%s" % (sys.version_info[:3]),
-    "build": sys.version,
-}  # type: dict[str, object]
-
-
-class StdlibIntegration(Integration):
-    identifier = "stdlib"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _install_httplib()
-        _install_subprocess()
-
-        @add_global_event_processor
-        def add_python_runtime_context(event, hint):
-            # type: (Event, Hint) -> Optional[Event]
-            if sentry_sdk_alpha.get_client().get_integration(StdlibIntegration) is not None:
-                contexts = event.setdefault("contexts", {})
-                if isinstance(contexts, dict) and "runtime" not in contexts:
-                    contexts["runtime"] = _RUNTIME_CONTEXT
-
-            return event
-
-
-def _install_httplib():
-    # type: () -> None
-    real_putrequest = HTTPConnection.putrequest
-    real_getresponse = HTTPConnection.getresponse
-
-    def putrequest(self, method, url, *args, **kwargs):
-        # type: (HTTPConnection, str, str, *Any, **Any) -> Any
-        host = self.host
-        port = self.port
-        default_port = self.default_port
-
-        client = sentry_sdk_alpha.get_client()
-        if client.get_integration(StdlibIntegration) is None or is_sentry_url(
-            client, f"{host}:{port}"  # noqa: E231
-        ):
-            return real_putrequest(self, method, url, *args, **kwargs)
-
-        real_url = url
-        if real_url is None or not real_url.startswith(("http://", "https://")):
-            real_url = "{}://{}{}{}".format(
-                default_port == 443 and "https" or "http",
-                host,
-                port != default_port and ":%s" % port or "",
-                url,
-            )
-
-        parsed_url = None
-        with capture_internal_exceptions():
-            parsed_url = parse_url(real_url, sanitize=False)
-
-        span = sentry_sdk_alpha.start_span(
-            op=OP.HTTP_CLIENT,
-            name="{} {}".format(
-                method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE
-            ),
-            origin="auto.http.stdlib.httplib",
-            only_if_parent=True,
-        )
-        span.__enter__()
-
-        data = {
-            SPANDATA.HTTP_METHOD: method,
-        }
-        set_thread_info_from_span(data, span)
-
-        if parsed_url is not None:
-            data["url"] = parsed_url.url
-            data[SPANDATA.HTTP_QUERY] = parsed_url.query
-            data[SPANDATA.HTTP_FRAGMENT] = parsed_url.fragment
-
-        for key, value in data.items():
-            span.set_attribute(key, value)
-
-        rv = real_putrequest(self, method, url, *args, **kwargs)
-
-        if should_propagate_trace(client, real_url):
-            for (
-                key,
-                value,
-            ) in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(span=span):
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
-                        key=key, value=value, real_url=real_url
-                    )
-                )
-                self.putheader(key, value)
-
-        self._sentrysdk_span = span  # type: ignore[attr-defined]
-        self._sentrysdk_span_data = data  # type: ignore[attr-defined]
-
-        return rv
-
-    def getresponse(self, *args, **kwargs):
-        # type: (HTTPConnection, *Any, **Any) -> Any
-        span = getattr(self, "_sentrysdk_span", None)
-
-        if span is None:
-            return real_getresponse(self, *args, **kwargs)
-
-        try:
-            rv = real_getresponse(self, *args, **kwargs)
-
-            span_data = getattr(self, "_sentrysdk_span_data", {})
-            span_data[SPANDATA.HTTP_STATUS_CODE] = int(rv.status)
-            span_data["reason"] = rv.reason
-
-            status_code = int(rv.status)
-            span.set_http_status(status_code)
-            span.set_attribute("reason", rv.reason)
-
-            sentry_sdk_alpha.add_breadcrumb(
-                type="http",
-                category="httplib",
-                data=span_data,
-                level=http_client_status_to_breadcrumb_level(status_code),
-            )
-        finally:
-            span.__exit__(None, None, None)
-
-        return rv
-
-    HTTPConnection.putrequest = putrequest  # type: ignore[method-assign]
-    HTTPConnection.getresponse = getresponse  # type: ignore[method-assign]
-
-
-def _init_argument(args, kwargs, name, position, setdefault_callback=None):
-    # type: (List[Any], Dict[Any, Any], str, int, Optional[Callable[[Any], Any]]) -> Any
-    """
-    given (*args, **kwargs) of a function call, retrieve (and optionally set a
-    default for) an argument by either name or position.
-
-    This is useful for wrapping functions with complex type signatures and
-    extracting a few arguments without needing to redefine that function's
-    entire type signature.
-    """
-
-    if name in kwargs:
-        rv = kwargs[name]
-        if setdefault_callback is not None:
-            rv = setdefault_callback(rv)
-        if rv is not None:
-            kwargs[name] = rv
-    elif position < len(args):
-        rv = args[position]
-        if setdefault_callback is not None:
-            rv = setdefault_callback(rv)
-        if rv is not None:
-            args[position] = rv
-    else:
-        rv = setdefault_callback and setdefault_callback(None)
-        if rv is not None:
-            kwargs[name] = rv
-
-    return rv
-
-
-def _install_subprocess():
-    # type: () -> None
-    old_popen_init = subprocess.Popen.__init__
-
-    @ensure_integration_enabled(StdlibIntegration, old_popen_init)
-    def sentry_patched_popen_init(self, *a, **kw):
-        # type: (subprocess.Popen[Any], *Any, **Any) -> None
-        # Convert from tuple to list to be able to set values.
-        a = list(a)
-
-        args = _init_argument(a, kw, "args", 0) or []
-        cwd = _init_argument(a, kw, "cwd", 9)
-
-        # if args is not a list or tuple (and e.g. some iterator instead),
-        # let's not use it at all. There are too many things that can go wrong
-        # when trying to collect an iterator into a list and setting that list
-        # into `a` again.
-        #
-        # Also invocations where `args` is not a sequence are not actually
-        # legal. They just happen to work under CPython.
-        description = None
-
-        if isinstance(args, (list, tuple)) and len(args) < 100:
-            with capture_internal_exceptions():
-                description = " ".join(map(str, args))
-
-        if description is None:
-            description = safe_repr(args)
-
-        env = None
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.SUBPROCESS,
-            name=description,
-            origin="auto.subprocess.stdlib.subprocess",
-            only_if_parent=True,
-        ) as span:
-            for k, v in sentry_sdk_alpha.get_current_scope().iter_trace_propagation_headers(
-                span=span
-            ):
-                if env is None:
-                    env = _init_argument(
-                        a,
-                        kw,
-                        "env",
-                        10,
-                        lambda x: dict(x if x is not None else os.environ),
-                    )
-                env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
-
-            if cwd:
-                span.set_attribute("subprocess.cwd", cwd)
-
-            rv = old_popen_init(self, *a, **kw)
-
-            span.set_tag("subprocess.pid", self.pid)
-
-            with capture_internal_exceptions():
-                thread_id, thread_name = get_current_thread_meta()
-                breadcrumb_data = {
-                    "subprocess.pid": self.pid,
-                    SPANDATA.THREAD_ID: thread_id,
-                    SPANDATA.THREAD_NAME: thread_name,
-                }
-                if cwd:
-                    breadcrumb_data["subprocess.cwd"] = cwd
-
-                sentry_sdk_alpha.add_breadcrumb(
-                    type="subprocess",
-                    category="subprocess",
-                    message=description,
-                    data=breadcrumb_data,
-                )
-
-            return rv
-
-    subprocess.Popen.__init__ = sentry_patched_popen_init  # type: ignore
-
-    old_popen_wait = subprocess.Popen.wait
-
-    @ensure_integration_enabled(StdlibIntegration, old_popen_wait)
-    def sentry_patched_popen_wait(self, *a, **kw):
-        # type: (subprocess.Popen[Any], *Any, **Any) -> Any
-        with sentry_sdk_alpha.start_span(
-            op=OP.SUBPROCESS_WAIT,
-            origin="auto.subprocess.stdlib.subprocess",
-            only_if_parent=True,
-        ) as span:
-            span.set_tag("subprocess.pid", self.pid)
-            return old_popen_wait(self, *a, **kw)
-
-    subprocess.Popen.wait = sentry_patched_popen_wait  # type: ignore
-
-    old_popen_communicate = subprocess.Popen.communicate
-
-    @ensure_integration_enabled(StdlibIntegration, old_popen_communicate)
-    def sentry_patched_popen_communicate(self, *a, **kw):
-        # type: (subprocess.Popen[Any], *Any, **Any) -> Any
-        with sentry_sdk_alpha.start_span(
-            op=OP.SUBPROCESS_COMMUNICATE,
-            origin="auto.subprocess.stdlib.subprocess",
-            only_if_parent=True,
-        ) as span:
-            span.set_tag("subprocess.pid", self.pid)
-            return old_popen_communicate(self, *a, **kw)
-
-    subprocess.Popen.communicate = sentry_patched_popen_communicate  # type: ignore
-
-
-def get_subprocess_traceparent_headers():
-    # type: () -> EnvironHeaders
-    return EnvironHeaders(os.environ, prefix="SUBPROCESS_")
diff --git a/src/sentry_sdk_alpha/integrations/strawberry.py b/src/sentry_sdk_alpha/integrations/strawberry.py
deleted file mode 100644
index 8ec6f0d12d98bc..00000000000000
--- a/src/sentry_sdk_alpha/integrations/strawberry.py
+++ /dev/null
@@ -1,373 +0,0 @@
-import functools
-import hashlib
-from inspect import isawaitable
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    _get_installed_modules,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    logger,
-    package_version,
-)
-
-try:
-    from functools import cached_property
-except ImportError:
-    # The strawberry integration requires Python 3.8+. functools.cached_property
-    # was added in 3.8, so this check is technically not needed, but since this
-    # is an auto-enabling integration, we might get to executing this import in
-    # lower Python versions, so we need to deal with it.
-    raise DidNotEnable("strawberry-graphql integration requires Python 3.8 or newer")
-
-try:
-    from strawberry import Schema
-    from strawberry.extensions import SchemaExtension
-    from strawberry.extensions.tracing.utils import (
-        should_skip_tracing as strawberry_should_skip_tracing,
-    )
-    from strawberry.http import async_base_view, sync_base_view
-except ImportError:
-    raise DidNotEnable("strawberry-graphql is not installed")
-
-try:
-    from strawberry.extensions.tracing import (
-        SentryTracingExtension as StrawberrySentryAsyncExtension,
-    )
-    from strawberry.extensions.tracing import (
-        SentryTracingExtensionSync as StrawberrySentrySyncExtension,
-    )
-except ImportError:
-    StrawberrySentryAsyncExtension = None
-    StrawberrySentrySyncExtension = None
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Generator
-    from typing import Any, List, Optional
-
-    from graphql import GraphQLError, GraphQLResolveInfo
-    from strawberry.http import GraphQLHTTPResponse
-    from strawberry.types import ExecutionContext
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-
-ignore_logger("strawberry.execution")
-
-
-class StrawberryIntegration(Integration):
-    identifier = "strawberry"
-    origin = f"auto.graphql.{identifier}"
-
-    def __init__(self, async_execution=None):
-        # type: (Optional[bool]) -> None
-        if async_execution not in (None, False, True):
-            raise ValueError(
-                f'Invalid value for async_execution: "{async_execution}" (must be bool)'
-            )
-        self.async_execution = async_execution
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        version = package_version("strawberry-graphql")
-        _check_minimum_version(StrawberryIntegration, version, "strawberry-graphql")
-
-        _patch_schema_init()
-        _patch_views()
-
-
-def _patch_schema_init():
-    # type: () -> None
-    old_schema_init = Schema.__init__
-
-    @functools.wraps(old_schema_init)
-    def _sentry_patched_schema_init(self, *args, **kwargs):
-        # type: (Schema, Any, Any) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(StrawberryIntegration)
-        if integration is None:
-            return old_schema_init(self, *args, **kwargs)
-
-        extensions = kwargs.get("extensions") or []
-
-        if integration.async_execution is not None:
-            should_use_async_extension = integration.async_execution
-        else:
-            # try to figure it out ourselves
-            should_use_async_extension = _guess_if_using_async(extensions)
-
-            logger.info(
-                "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).",
-                "async" if should_use_async_extension else "sync",
-                "False" if should_use_async_extension else "True",
-            )
-
-        # add our extension
-        extensions.append(
-            SentryAsyncExtension if should_use_async_extension else SentrySyncExtension
-        )
-
-        kwargs["extensions"] = extensions
-
-        return old_schema_init(self, *args, **kwargs)
-
-    Schema.__init__ = _sentry_patched_schema_init  # type: ignore[method-assign]
-
-
-class SentryAsyncExtension(SchemaExtension):
-    def __init__(
-        self,
-        *,
-        execution_context=None,
-    ):
-        # type: (Any, Optional[ExecutionContext]) -> None
-        if execution_context:
-            self.execution_context = execution_context
-
-    @cached_property
-    def _resource_name(self):
-        # type: () -> str
-        query_hash = self.hash_query(self.execution_context.query)  # type: ignore
-
-        if self.execution_context.operation_name:
-            return f"{self.execution_context.operation_name}:{query_hash}"
-
-        return query_hash
-
-    def hash_query(self, query):
-        # type: (str) -> str
-        return hashlib.md5(query.encode("utf-8")).hexdigest()
-
-    def on_operation(self):
-        # type: () -> Generator[None, None, None]
-        self._operation_name = self.execution_context.operation_name
-
-        operation_type = "query"
-        op = OP.GRAPHQL_QUERY
-
-        if self.execution_context.query is None:
-            self.execution_context.query = ""
-
-        if self.execution_context.query.strip().startswith("mutation"):
-            operation_type = "mutation"
-            op = OP.GRAPHQL_MUTATION
-        elif self.execution_context.query.strip().startswith("subscription"):
-            operation_type = "subscription"
-            op = OP.GRAPHQL_SUBSCRIPTION
-
-        description = operation_type
-        if self._operation_name:
-            description += f" {self._operation_name}"
-
-        sentry_sdk_alpha.add_breadcrumb(
-            category="graphql.operation",
-            data={
-                "operation_name": self._operation_name,
-                "operation_type": operation_type,
-            },
-        )
-
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        event_processor = _make_request_event_processor(self.execution_context)
-        scope.add_event_processor(event_processor)
-
-        with sentry_sdk_alpha.start_span(
-            op=op,
-            name=description,
-            origin=StrawberryIntegration.origin,
-            only_if_parent=True,
-        ) as graphql_span:
-            graphql_span.set_attribute("graphql.operation.type", operation_type)
-            graphql_span.set_attribute("graphql.document", self.execution_context.query)
-            graphql_span.set_attribute("graphql.resource_name", self._resource_name)
-
-            yield
-
-            # we might have a more accurate operation_name after the parsing
-            self._operation_name = self.execution_context.operation_name
-
-            if self._operation_name is not None:
-                graphql_span.set_attribute("graphql.operation.name", self._operation_name)
-
-                sentry_sdk_alpha.get_current_scope().set_transaction_name(
-                    self._operation_name,
-                    source=TransactionSource.COMPONENT,
-                )
-
-            root_span = graphql_span.root_span
-            if root_span:
-                root_span.op = op
-
-    def on_validate(self):
-        # type: () -> Generator[None, None, None]
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRAPHQL_VALIDATE,
-            name="validation",
-            origin=StrawberryIntegration.origin,
-        ):
-            yield
-
-    def on_parse(self):
-        # type: () -> Generator[None, None, None]
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRAPHQL_PARSE,
-            name="parsing",
-            origin=StrawberryIntegration.origin,
-        ):
-            yield
-
-    def should_skip_tracing(self, _next, info):
-        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool
-        return strawberry_should_skip_tracing(_next, info)
-
-    async def _resolve(self, _next, root, info, *args, **kwargs):
-        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
-        result = _next(root, info, *args, **kwargs)
-
-        if isawaitable(result):
-            result = await result
-
-        return result
-
-    async def resolve(self, _next, root, info, *args, **kwargs):
-        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
-        if self.should_skip_tracing(_next, info):
-            return await self._resolve(_next, root, info, *args, **kwargs)
-
-        field_path = f"{info.parent_type}.{info.field_name}"
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRAPHQL_RESOLVE,
-            name=f"resolving {field_path}",
-            origin=StrawberryIntegration.origin,
-        ) as span:
-            span.set_attribute("graphql.field_name", info.field_name)
-            span.set_attribute("graphql.parent_type", info.parent_type.name)
-            span.set_attribute("graphql.field_path", field_path)
-            span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list())))
-
-            return await self._resolve(_next, root, info, *args, **kwargs)
-
-
-class SentrySyncExtension(SentryAsyncExtension):
-    def resolve(self, _next, root, info, *args, **kwargs):
-        # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
-        if self.should_skip_tracing(_next, info):
-            return _next(root, info, *args, **kwargs)
-
-        field_path = f"{info.parent_type}.{info.field_name}"
-
-        with sentry_sdk_alpha.start_span(
-            op=OP.GRAPHQL_RESOLVE,
-            name=f"resolving {field_path}",
-            origin=StrawberryIntegration.origin,
-        ) as span:
-            span.set_attribute("graphql.field_name", info.field_name)
-            span.set_attribute("graphql.parent_type", info.parent_type.name)
-            span.set_attribute("graphql.field_path", field_path)
-            span.set_attribute("graphql.path", ".".join(map(str, info.path.as_list())))
-
-            return _next(root, info, *args, **kwargs)
-
-
-def _patch_views():
-    # type: () -> None
-    old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors
-    old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors
-
-    def _sentry_patched_async_view_handle_errors(self, errors, response_data):
-        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
-        old_async_view_handle_errors(self, errors, response_data)
-        _sentry_patched_handle_errors(self, errors, response_data)
-
-    def _sentry_patched_sync_view_handle_errors(self, errors, response_data):
-        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
-        old_sync_view_handle_errors(self, errors, response_data)
-        _sentry_patched_handle_errors(self, errors, response_data)
-
-    @ensure_integration_enabled(StrawberryIntegration)
-    def _sentry_patched_handle_errors(self, errors, response_data):
-        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
-        if not errors:
-            return
-
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        event_processor = _make_response_event_processor(response_data)
-        scope.add_event_processor(event_processor)
-
-        with capture_internal_exceptions():
-            for error in errors:
-                event, hint = event_from_exception(
-                    error,
-                    client_options=sentry_sdk_alpha.get_client().options,
-                    mechanism={
-                        "type": StrawberryIntegration.identifier,
-                        "handled": False,
-                    },
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-
-    async_base_view.AsyncBaseHTTPView._handle_errors = (  # type: ignore[method-assign]
-        _sentry_patched_async_view_handle_errors
-    )
-    sync_base_view.SyncBaseHTTPView._handle_errors = (  # type: ignore[method-assign]
-        _sentry_patched_sync_view_handle_errors
-    )
-
-
-def _make_request_event_processor(execution_context):
-    # type: (ExecutionContext) -> EventProcessor
-
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        with capture_internal_exceptions():
-            if should_send_default_pii():
-                request_data = event.setdefault("request", {})
-                request_data["api_target"] = "graphql"
-
-                if not request_data.get("data"):
-                    data = {"query": execution_context.query}  # type: dict[str, Any]
-                    if execution_context.variables:
-                        data["variables"] = execution_context.variables
-                    if execution_context.operation_name:
-                        data["operationName"] = execution_context.operation_name
-
-                    request_data["data"] = data
-
-            else:
-                try:
-                    del event["request"]["data"]
-                except (KeyError, TypeError):
-                    pass
-
-        return event
-
-    return inner
-
-
-def _make_response_event_processor(response_data):
-    # type: (GraphQLHTTPResponse) -> EventProcessor
-
-    def inner(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        with capture_internal_exceptions():
-            if should_send_default_pii():
-                contexts = event.setdefault("contexts", {})
-                contexts["response"] = {"data": response_data}
-
-        return event
-
-    return inner
-
-
-def _guess_if_using_async(extensions):
-    # type: (List[SchemaExtension]) -> bool
-    return bool({"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules()))
diff --git a/src/sentry_sdk_alpha/integrations/sys_exit.py b/src/sentry_sdk_alpha/integrations/sys_exit.py
deleted file mode 100644
index 9d98e20cf6b5d1..00000000000000
--- a/src/sentry_sdk_alpha/integrations/sys_exit.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import functools
-import sys
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._types import TYPE_CHECKING
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import NoReturn, Union
-
-
-class SysExitIntegration(Integration):
-    """Captures sys.exit calls and sends them as events to Sentry.
-
-    By default, SystemExit exceptions are not captured by the SDK. Enabling this integration will capture SystemExit
-    exceptions generated by sys.exit calls and send them to Sentry.
-
-    This integration, in its default configuration, only captures the sys.exit call if the exit code is a non-zero and
-    non-None value (unsuccessful exits). Pass `capture_successful_exits=True` to capture successful exits as well.
-    Note that the integration does not capture SystemExit exceptions raised outside a call to sys.exit.
-    """
-
-    identifier = "sys_exit"
-
-    def __init__(self, *, capture_successful_exits=False):
-        # type: (bool) -> None
-        self._capture_successful_exits = capture_successful_exits
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        SysExitIntegration._patch_sys_exit()
-
-    @staticmethod
-    def _patch_sys_exit():
-        # type: () -> None
-        old_exit = sys.exit  # type: Callable[[Union[str, int, None]], NoReturn]
-
-        @functools.wraps(old_exit)
-        def sentry_patched_exit(__status=0):
-            # type: (Union[str, int, None]) -> NoReturn
-            # @ensure_integration_enabled ensures that this is non-None
-            integration = sentry_sdk_alpha.get_client().get_integration(SysExitIntegration)
-            if integration is None:
-                old_exit(__status)
-
-            try:
-                old_exit(__status)
-            except SystemExit as e:
-                with capture_internal_exceptions():
-                    if integration._capture_successful_exits or __status not in (
-                        0,
-                        None,
-                    ):
-                        _capture_exception(e)
-                raise e
-
-        sys.exit = sentry_patched_exit
-
-
-def _capture_exception(exc):
-    # type: (SystemExit) -> None
-    event, hint = event_from_exception(
-        exc,
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": SysExitIntegration.identifier, "handled": False},
-    )
-    sentry_sdk_alpha.capture_event(event, hint=hint)
diff --git a/src/sentry_sdk_alpha/integrations/threading.py b/src/sentry_sdk_alpha/integrations/threading.py
deleted file mode 100644
index ff05d95863000c..00000000000000
--- a/src/sentry_sdk_alpha/integrations/threading.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import sys
-import warnings
-from functools import wraps
-from threading import Thread, current_thread
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha import Scope
-from sentry_sdk_alpha.integrations import Integration
-from sentry_sdk_alpha.scope import ScopeType
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception, reraise
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, TypeVar
-
-    from sentry_sdk_alpha._types import ExcInfo
-
-    F = TypeVar("F", bound=Callable[..., Any])
-
-
-class ThreadingIntegration(Integration):
-    identifier = "threading"
-
-    def __init__(self, propagate_scope=True):
-        # type: (bool) -> None
-        self.propagate_scope = propagate_scope
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        old_start = Thread.start
-
-        try:
-            import channels  # type: ignore[import-not-found]
-            from django import VERSION as django_version  # noqa: N811
-
-            channels_version = channels.__version__
-        except ImportError:
-            django_version = None
-            channels_version = None
-
-        @wraps(old_start)
-        def sentry_start(self, *a, **kw):
-            # type: (Thread, *Any, **Any) -> Any
-            integration = sentry_sdk_alpha.get_client().get_integration(ThreadingIntegration)
-            if integration is None:
-                return old_start(self, *a, **kw)
-
-            if integration.propagate_scope:
-                if (
-                    sys.version_info < (3, 9)
-                    and channels_version is not None
-                    and channels_version < "4.0.0"
-                    and django_version is not None
-                    and django_version >= (3, 0)
-                    and django_version < (4, 0)
-                ):
-                    warnings.warn(
-                        "There is a known issue with Django channels 2.x and 3.x when using Python 3.8 or older. "
-                        "(Async support is emulated using threads and some Sentry data may be leaked between those threads.) "
-                        "Please either upgrade to Django channels 4.0+, use Django's async features "
-                        "available in Django 3.1+ instead of Django channels, or upgrade to Python 3.9+.",
-                        stacklevel=2,
-                    )
-                    isolation_scope = sentry_sdk_alpha.get_isolation_scope()
-                    current_scope = sentry_sdk_alpha.get_current_scope()
-
-                else:
-                    isolation_scope = sentry_sdk_alpha.get_isolation_scope().fork()
-                    current_scope = sentry_sdk_alpha.get_current_scope().fork()
-            else:
-                isolation_scope = Scope(ty=ScopeType.ISOLATION)
-                current_scope = Scope(ty=ScopeType.CURRENT)
-
-            # Patching instance methods in `start()` creates a reference cycle if
-            # done in a naive way. See
-            # https://github.com/getsentry/sentry-python/pull/434
-            #
-            # In threading module, using current_thread API will access current thread instance
-            # without holding it to avoid a reference cycle in an easier way.
-            with capture_internal_exceptions():
-                new_run = _wrap_run(
-                    isolation_scope,
-                    current_scope,
-                    getattr(self.run, "__func__", self.run),
-                )
-                self.run = new_run  # type: ignore
-
-            return old_start(self, *a, **kw)
-
-        Thread.start = sentry_start  # type: ignore
-
-
-def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func):
-    # type: (sentry_sdk.Scope, sentry_sdk.Scope, F) -> F
-    @wraps(old_run_func)
-    def run(*a, **kw):
-        # type: (*Any, **Any) -> Any
-        def _run_old_run_func():
-            # type: () -> Any
-            try:
-                self = current_thread()
-                return old_run_func(self, *a, **kw)
-            except Exception:
-                reraise(*_capture_exception())
-
-        with sentry_sdk_alpha.use_isolation_scope(isolation_scope_to_use):
-            with sentry_sdk_alpha.use_scope(current_scope_to_use):
-                return _run_old_run_func()
-
-    return run  # type: ignore
-
-
-def _capture_exception():
-    # type: () -> ExcInfo
-    exc_info = sys.exc_info()
-
-    client = sentry_sdk_alpha.get_client()
-    if client.get_integration(ThreadingIntegration) is not None:
-        event, hint = event_from_exception(
-            exc_info,
-            client_options=client.options,
-            mechanism={"type": "threading", "handled": False},
-        )
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-    return exc_info
diff --git a/src/sentry_sdk_alpha/integrations/tornado.py b/src/sentry_sdk_alpha/integrations/tornado.py
deleted file mode 100644
index da6bde6b93628e..00000000000000
--- a/src/sentry_sdk_alpha/integrations/tornado.py
+++ /dev/null
@@ -1,256 +0,0 @@
-import contextlib
-import weakref
-from inspect import iscoroutinefunction
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    RequestExtractor,
-    _filter_headers,
-    _is_json_content_type,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.integrations.logging import ignore_logger
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.tracing import TransactionSource
-from sentry_sdk_alpha.utils import (
-    CONTEXTVARS_ERROR_MESSAGE,
-    HAS_REAL_CONTEXTVARS,
-    capture_internal_exceptions,
-    ensure_integration_enabled,
-    event_from_exception,
-    transaction_from_function,
-)
-
-try:
-    from tornado import version_info as TORNADO_VERSION
-    from tornado.gen import coroutine
-    from tornado.httputil import HTTPServerRequest
-    from tornado.web import HTTPError, RequestHandler
-except ImportError:
-    raise DidNotEnable("Tornado not installed")
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Generator
-    from typing import Any, Dict, Optional
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-
-
-REQUEST_PROPERTY_TO_ATTRIBUTE = {
-    "method": "http.request.method",
-    "path": "url.path",
-    "query": "url.query",
-    "protocol": "url.scheme",
-}
-
-
-class TornadoIntegration(Integration):
-    identifier = "tornado"
-    origin = f"auto.http.{identifier}"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        _check_minimum_version(TornadoIntegration, TORNADO_VERSION)
-
-        if not HAS_REAL_CONTEXTVARS:
-            # Tornado is async. We better have contextvars or we're going to leak
-            # state between requests.
-            raise DidNotEnable(
-                "The tornado integration for Sentry requires Python 3.7+ or the aiocontextvars package"
-                + CONTEXTVARS_ERROR_MESSAGE
-            )
-
-        ignore_logger("tornado.access")
-
-        old_execute = RequestHandler._execute
-
-        awaitable = iscoroutinefunction(old_execute)
-
-        if awaitable:
-            # Starting Tornado 6 RequestHandler._execute method is a standard Python coroutine (async/await)
-            # In that case our method should be a coroutine function too
-            async def sentry_execute_request_handler(self, *args, **kwargs):
-                # type: (RequestHandler, *Any, **Any) -> Any
-                with _handle_request_impl(self):
-                    return await old_execute(self, *args, **kwargs)
-
-        else:
-
-            @coroutine  # type: ignore
-            def sentry_execute_request_handler(self, *args, **kwargs):  # type: ignore
-                # type: (RequestHandler, *Any, **Any) -> Any
-                with _handle_request_impl(self):
-                    result = yield from old_execute(self, *args, **kwargs)
-                    return result
-
-        RequestHandler._execute = sentry_execute_request_handler
-
-        old_log_exception = RequestHandler.log_exception
-
-        def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
-            # type: (Any, type, BaseException, Any, *Any, **Any) -> Optional[Any]
-            _capture_exception(ty, value, tb)
-            return old_log_exception(self, ty, value, tb, *args, **kwargs)
-
-        RequestHandler.log_exception = sentry_log_exception
-
-
-@contextlib.contextmanager
-def _handle_request_impl(self):
-    # type: (RequestHandler) -> Generator[None, None, None]
-    integration = sentry_sdk_alpha.get_client().get_integration(TornadoIntegration)
-
-    if integration is None:
-        yield
-
-    weak_handler = weakref.ref(self)
-
-    with sentry_sdk_alpha.isolation_scope() as scope:
-        headers = self.request.headers
-
-        scope.clear_breadcrumbs()
-        processor = _make_event_processor(weak_handler)
-        scope.add_event_processor(processor)
-
-        with sentry_sdk_alpha.continue_trace(headers):
-            with sentry_sdk_alpha.start_span(
-                op=OP.HTTP_SERVER,
-                # Like with all other integrations, this is our
-                # fallback transaction in case there is no route.
-                # sentry_urldispatcher_resolve is responsible for
-                # setting a transaction name later.
-                name="generic Tornado request",
-                source=TransactionSource.ROUTE,
-                origin=TornadoIntegration.origin,
-                attributes=_prepopulate_attributes(self.request),
-            ):
-                yield
-
-
-@ensure_integration_enabled(TornadoIntegration)
-def _capture_exception(ty, value, tb):
-    # type: (type, BaseException, Any) -> None
-    if isinstance(value, HTTPError):
-        return
-
-    event, hint = event_from_exception(
-        (ty, value, tb),
-        client_options=sentry_sdk_alpha.get_client().options,
-        mechanism={"type": "tornado", "handled": False},
-    )
-
-    sentry_sdk_alpha.capture_event(event, hint=hint)
-
-
-def _make_event_processor(weak_handler):
-    # type: (Callable[[], RequestHandler]) -> EventProcessor
-    def tornado_processor(event, hint):
-        # type: (Event, dict[str, Any]) -> Event
-        handler = weak_handler()
-        if handler is None:
-            return event
-
-        request = handler.request
-
-        with capture_internal_exceptions():
-            method = getattr(handler, handler.request.method.lower())
-            event["transaction"] = transaction_from_function(method) or ""
-            event["transaction_info"] = {"source": TransactionSource.COMPONENT}
-
-        with capture_internal_exceptions():
-            extractor = TornadoRequestExtractor(request)
-            extractor.extract_into_event(event)
-
-            request_info = event["request"]
-
-            request_info["url"] = "{}://{}{}".format(
-                request.protocol,
-                request.host,
-                request.path,
-            )
-
-            request_info["query_string"] = request.query
-            request_info["method"] = request.method
-            request_info["env"] = {"REMOTE_ADDR": request.remote_ip}
-            request_info["headers"] = _filter_headers(dict(request.headers))
-
-        with capture_internal_exceptions():
-            if handler.current_user and should_send_default_pii():
-                event.setdefault("user", {}).setdefault("is_authenticated", True)
-
-        return event
-
-    return tornado_processor
-
-
-class TornadoRequestExtractor(RequestExtractor):
-    def content_length(self):
-        # type: () -> int
-        if self.request.body is None:
-            return 0
-        return len(self.request.body)
-
-    def cookies(self):
-        # type: () -> Dict[str, str]
-        return {k: v.value for k, v in self.request.cookies.items()}
-
-    def raw_data(self):
-        # type: () -> bytes
-        return self.request.body
-
-    def form(self):
-        # type: () -> Dict[str, Any]
-        return {
-            k: [v.decode("latin1", "replace") for v in vs]
-            for k, vs in self.request.body_arguments.items()
-        }
-
-    def is_json(self):
-        # type: () -> bool
-        return _is_json_content_type(self.request.headers.get("content-type"))
-
-    def files(self):
-        # type: () -> Dict[str, Any]
-        return {k: v[0] for k, v in self.request.files.items() if v}
-
-    def size_of_file(self, file):
-        # type: (Any) -> int
-        return len(file.body or ())
-
-
-def _prepopulate_attributes(request):
-    # type: (HTTPServerRequest) -> dict[str, Any]
-    # https://www.tornadoweb.org/en/stable/httputil.html#tornado.httputil.HTTPServerRequest
-    attributes = {}
-
-    for prop, attr in REQUEST_PROPERTY_TO_ATTRIBUTE.items():
-        if getattr(request, prop, None) is not None:
-            attributes[attr] = getattr(request, prop)
-
-    if getattr(request, "version", None):
-        try:
-            proto, version = request.version.split("/")
-            attributes["network.protocol.name"] = proto
-            attributes["network.protocol.version"] = version
-        except ValueError:
-            attributes["network.protocol.name"] = request.version
-
-    if getattr(request, "host", None):
-        try:
-            address, port = request.host.split(":")
-            attributes["server.address"] = address
-            attributes["server.port"] = port
-        except ValueError:
-            attributes["server.address"] = request.host
-
-    with capture_internal_exceptions():
-        attributes["url.full"] = request.full_url()
-
-    attributes.update(_request_headers_to_span_attributes(request.headers))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/integrations/trytond.py b/src/sentry_sdk_alpha/integrations/trytond.py
deleted file mode 100644
index 05841e840e6633..00000000000000
--- a/src/sentry_sdk_alpha/integrations/trytond.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from trytond import __version__ as trytond_version  # type: ignore
-from trytond.exceptions import TrytonException  # type: ignore
-from trytond.wsgi import app  # type: ignore
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import Integration, _check_minimum_version
-from sentry_sdk_alpha.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk_alpha.utils import ensure_integration_enabled, event_from_exception
-
-# TODO: trytond-worker, trytond-cron and trytond-admin intergations
-
-
-class TrytondWSGIIntegration(Integration):
-    identifier = "trytond_wsgi"
-    origin = f"auto.http.{identifier}"
-
-    def __init__(self):  # type: () -> None
-        pass
-
-    @staticmethod
-    def setup_once():  # type: () -> None
-        _check_minimum_version(TrytondWSGIIntegration, trytond_version)
-
-        app.wsgi_app = SentryWsgiMiddleware(
-            app.wsgi_app,
-            span_origin=TrytondWSGIIntegration.origin,
-        )
-
-        @ensure_integration_enabled(TrytondWSGIIntegration)
-        def error_handler(e):  # type: (Exception) -> None
-            if isinstance(e, TrytonException):
-                return
-            else:
-                client = sentry_sdk_alpha.get_client()
-                event, hint = event_from_exception(
-                    e,
-                    client_options=client.options,
-                    mechanism={"type": "trytond", "handled": False},
-                )
-                sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        # Expected error handlers signature was changed
-        # when the error_handler decorator was introduced
-        # in Tryton-5.4
-        if hasattr(app, "error_handler"):
-
-            @app.error_handler
-            def _(app, request, e):  # type: ignore
-                error_handler(e)
-
-        else:
-            app.error_handlers.append(error_handler)
diff --git a/src/sentry_sdk_alpha/integrations/typer.py b/src/sentry_sdk_alpha/integrations/typer.py
deleted file mode 100644
index b0b09decf3a8ef..00000000000000
--- a/src/sentry_sdk_alpha/integrations/typer.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-from sentry_sdk_alpha.utils import capture_internal_exceptions, event_from_exception
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from types import TracebackType
-    from typing import Any, Optional, Type
-
-    Excepthook = Callable[
-        [type[BaseException], BaseException, Optional[TracebackType]],
-        Any,
-    ]
-
-try:
-    import typer
-except ImportError:
-    raise DidNotEnable("Typer not installed")
-
-
-class TyperIntegration(Integration):
-    identifier = "typer"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        typer.main.except_hook = _make_excepthook(typer.main.except_hook)  # type: ignore
-
-
-def _make_excepthook(old_excepthook):
-    # type: (Excepthook) -> Excepthook
-    def sentry_sdk_excepthook(type_, value, traceback):
-        # type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
-        integration = sentry_sdk_alpha.get_client().get_integration(TyperIntegration)
-
-        # Note: If we replace this with ensure_integration_enabled then
-        # we break the exceptiongroup backport;
-        # See: https://github.com/getsentry/sentry-python/issues/3097
-        if integration is None:
-            return old_excepthook(type_, value, traceback)
-
-        with capture_internal_exceptions():
-            event, hint = event_from_exception(
-                (type_, value, traceback),
-                client_options=sentry_sdk_alpha.get_client().options,
-                mechanism={"type": "typer", "handled": False},
-            )
-            sentry_sdk_alpha.capture_event(event, hint=hint)
-
-        return old_excepthook(type_, value, traceback)
-
-    return sentry_sdk_excepthook
diff --git a/src/sentry_sdk_alpha/integrations/unleash.py b/src/sentry_sdk_alpha/integrations/unleash.py
deleted file mode 100644
index 08abb8a6ccb3e9..00000000000000
--- a/src/sentry_sdk_alpha/integrations/unleash.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from functools import wraps
-from typing import Any
-
-from sentry_sdk_alpha.feature_flags import add_feature_flag
-from sentry_sdk_alpha.integrations import DidNotEnable, Integration
-
-try:
-    from UnleashClient import UnleashClient
-except ImportError:
-    raise DidNotEnable("UnleashClient is not installed")
-
-
-class UnleashIntegration(Integration):
-    identifier = "unleash"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        # Wrap and patch evaluation methods (class methods)
-        old_is_enabled = UnleashClient.is_enabled
-
-        @wraps(old_is_enabled)
-        def sentry_is_enabled(self, feature, *args, **kwargs):
-            # type: (UnleashClient, str, *Any, **Any) -> Any
-            enabled = old_is_enabled(self, feature, *args, **kwargs)
-
-            # We have no way of knowing what type of unleash feature this is, so we have to treat
-            # it as a boolean / toggle feature.
-            add_feature_flag(feature, enabled)
-
-            return enabled
-
-        UnleashClient.is_enabled = sentry_is_enabled  # type: ignore
diff --git a/src/sentry_sdk_alpha/integrations/wsgi.py b/src/sentry_sdk_alpha/integrations/wsgi.py
deleted file mode 100644
index 9e33c8c15b7d93..00000000000000
--- a/src/sentry_sdk_alpha/integrations/wsgi.py
+++ /dev/null
@@ -1,341 +0,0 @@
-import sys
-from functools import partial
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._werkzeug import _get_headers, get_host
-from sentry_sdk_alpha.consts import OP
-from sentry_sdk_alpha.integrations._wsgi_common import (
-    DEFAULT_HTTP_METHODS_TO_CAPTURE,
-    _filter_headers,
-    _request_headers_to_span_attributes,
-)
-from sentry_sdk_alpha.scope import should_send_default_pii
-from sentry_sdk_alpha.sessions import track_session
-from sentry_sdk_alpha.tracing import Span, TransactionSource
-from sentry_sdk_alpha.utils import (
-    ContextVar,
-    capture_internal_exceptions,
-    event_from_exception,
-    reraise,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterator
-    from typing import Any, Dict, Optional, Protocol, Tuple, TypeVar
-
-    from sentry_sdk_alpha._types import Event, EventProcessor
-    from sentry_sdk_alpha.utils import ExcInfo
-
-    WsgiResponseIter = TypeVar("WsgiResponseIter")
-    WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
-    WsgiExcInfo = TypeVar("WsgiExcInfo")
-
-    class StartResponse(Protocol):
-        def __call__(self, status, response_headers, exc_info=None):  # type: ignore
-            # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
-            pass
-
-
-_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
-
-DEFAULT_TRANSACTION_NAME = "generic WSGI request"
-
-ENVIRON_TO_ATTRIBUTE = {
-    "PATH_INFO": "url.path",
-    "QUERY_STRING": "url.query",
-    "REQUEST_METHOD": "http.request.method",
-    "SERVER_NAME": "server.address",
-    "SERVER_PORT": "server.port",
-    "wsgi.url_scheme": "url.scheme",
-}
-
-
-def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
-    # type: (str, str, str) -> str
-    return s.encode("latin1").decode(charset, errors)
-
-
-def get_request_url(environ, use_x_forwarded_for=False):
-    # type: (Dict[str, str], bool) -> str
-    """Return the absolute URL without query string for the given WSGI
-    environment."""
-    script_name = environ.get("SCRIPT_NAME", "").rstrip("/")
-    path_info = environ.get("PATH_INFO", "").lstrip("/")
-    path = f"{script_name}/{path_info}"
-
-    return "{}://{}/{}".format(
-        environ.get("wsgi.url_scheme"),
-        get_host(environ, use_x_forwarded_for),
-        wsgi_decoding_dance(path).lstrip("/"),
-    )
-
-
-class SentryWsgiMiddleware:
-    __slots__ = (
-        "app",
-        "use_x_forwarded_for",
-        "span_origin",
-        "http_methods_to_capture",
-    )
-
-    def __init__(
-        self,
-        app,  # type: Callable[[Dict[str, str], Callable[..., Any]], Any]
-        use_x_forwarded_for=False,  # type: bool
-        span_origin=None,  # type: Optional[str]
-        http_methods_to_capture=DEFAULT_HTTP_METHODS_TO_CAPTURE,  # type: Tuple[str, ...]
-    ):
-        # type: (...) -> None
-        self.app = app
-        self.use_x_forwarded_for = use_x_forwarded_for
-        self.span_origin = span_origin
-        self.http_methods_to_capture = http_methods_to_capture
-
-    def __call__(self, environ, start_response):
-        # type: (Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-        if _wsgi_middleware_applied.get(False):
-            return self.app(environ, start_response)
-
-        _wsgi_middleware_applied.set(True)
-        try:
-            with sentry_sdk_alpha.isolation_scope() as scope:
-                scope.set_transaction_name(DEFAULT_TRANSACTION_NAME, source=TransactionSource.ROUTE)
-
-                with track_session(scope, session_mode="request"):
-                    with capture_internal_exceptions():
-                        scope.clear_breadcrumbs()
-                        scope._name = "wsgi"
-                        scope.add_event_processor(
-                            _make_wsgi_event_processor(environ, self.use_x_forwarded_for)
-                        )
-                    method = environ.get("REQUEST_METHOD", "").upper()
-                    should_trace = method in self.http_methods_to_capture
-                    if should_trace:
-                        with sentry_sdk_alpha.continue_trace(environ):
-                            with sentry_sdk_alpha.start_span(
-                                op=OP.HTTP_SERVER,
-                                name=DEFAULT_TRANSACTION_NAME,
-                                source=TransactionSource.ROUTE,
-                                origin=self.span_origin,
-                                attributes=_prepopulate_attributes(
-                                    environ, self.use_x_forwarded_for
-                                ),
-                            ) as span:
-                                response = self._run_original_app(environ, start_response, span)
-                    else:
-                        response = self._run_original_app(environ, start_response, None)
-
-        finally:
-            _wsgi_middleware_applied.set(False)
-
-        return _ScopedResponse(scope, response)
-
-    def _run_original_app(self, environ, start_response, span):
-        # type: (dict[str, str], StartResponse, Optional[Span]) -> Any
-        try:
-            return self.app(
-                environ,
-                partial(
-                    _sentry_start_response,
-                    start_response,
-                    span,
-                ),
-            )
-        except BaseException:
-            reraise(*_capture_exception())
-
-
-def _sentry_start_response(  # type: ignore
-    old_start_response,  # type: StartResponse
-    span,  # type: Optional[Span]
-    status,  # type: str
-    response_headers,  # type: WsgiResponseHeaders
-    exc_info=None,  # type: Optional[WsgiExcInfo]
-):
-    # type: (...) -> WsgiResponseIter
-    with capture_internal_exceptions():
-        status_int = int(status.split(" ", 1)[0])
-        if span is not None:
-            span.set_http_status(status_int)
-
-    if exc_info is None:
-        # The Django Rest Framework WSGI test client, and likely other
-        # (incorrect) implementations, cannot deal with the exc_info argument
-        # if one is present. Avoid providing a third argument if not necessary.
-        return old_start_response(status, response_headers)
-    else:
-        return old_start_response(status, response_headers, exc_info)
-
-
-def _get_environ(environ):
-    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
-    """
-    Returns our explicitly included environment variables we want to
-    capture (server name, port and remote addr if pii is enabled).
-    """
-    keys = ["SERVER_NAME", "SERVER_PORT"]
-    if should_send_default_pii():
-        # make debugging of proxy setup easier. Proxy headers are
-        # in headers.
-        keys += ["REMOTE_ADDR"]
-
-    for key in keys:
-        if key in environ:
-            yield key, environ[key]
-
-
-def get_client_ip(environ):
-    # type: (Dict[str, str]) -> Optional[Any]
-    """
-    Infer the user IP address from various headers. This cannot be used in
-    security sensitive situations since the value may be forged from a client,
-    but it's good enough for the event payload.
-    """
-    try:
-        return environ["HTTP_X_FORWARDED_FOR"].split(",")[0].strip()
-    except (KeyError, IndexError):
-        pass
-
-    try:
-        return environ["HTTP_X_REAL_IP"]
-    except KeyError:
-        pass
-
-    return environ.get("REMOTE_ADDR")
-
-
-def _capture_exception():
-    # type: () -> ExcInfo
-    """
-    Captures the current exception and sends it to Sentry.
-    Returns the ExcInfo tuple to it can be reraised afterwards.
-    """
-    exc_info = sys.exc_info()
-    e = exc_info[1]
-
-    # SystemExit(0) is the only uncaught exception that is expected behavior
-    should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
-    if not should_skip_capture:
-        event, hint = event_from_exception(
-            exc_info,
-            client_options=sentry_sdk_alpha.get_client().options,
-            mechanism={"type": "wsgi", "handled": False},
-        )
-        sentry_sdk_alpha.capture_event(event, hint=hint)
-
-    return exc_info
-
-
-class _ScopedResponse:
-    """
-    Users a separate scope for each response chunk.
-
-    This will make WSGI apps more tolerant against:
-    - WSGI servers streaming responses from a different thread/from
-      different threads than the one that called start_response
-    - close() not being called
-    - WSGI servers streaming responses interleaved from the same thread
-    """
-
-    __slots__ = ("_response", "_scope")
-
-    def __init__(self, scope, response):
-        # type: (sentry_sdk.Scope, Iterator[bytes]) -> None
-        self._scope = scope
-        self._response = response
-
-    def __iter__(self):
-        # type: () -> Iterator[bytes]
-        iterator = iter(self._response)
-
-        while True:
-            with sentry_sdk_alpha.use_isolation_scope(self._scope):
-                try:
-                    chunk = next(iterator)
-                except StopIteration:
-                    break
-                except BaseException:
-                    reraise(*_capture_exception())
-
-            yield chunk
-
-    def close(self):
-        # type: () -> None
-        with sentry_sdk_alpha.use_isolation_scope(self._scope):
-            try:
-                self._response.close()  # type: ignore
-            except AttributeError:
-                pass
-            except BaseException:
-                reraise(*_capture_exception())
-
-
-def _make_wsgi_event_processor(environ, use_x_forwarded_for):
-    # type: (Dict[str, str], bool) -> EventProcessor
-    # It's a bit unfortunate that we have to extract and parse the request data
-    # from the environ so eagerly, but there are a few good reasons for this.
-    #
-    # We might be in a situation where the scope never gets torn down
-    # properly. In that case we will have an unnecessary strong reference to
-    # all objects in the environ (some of which may take a lot of memory) when
-    # we're really just interested in a few of them.
-    #
-    # Keeping the environment around for longer than the request lifecycle is
-    # also not necessarily something uWSGI can deal with:
-    # https://github.com/unbit/uwsgi/issues/1950
-
-    client_ip = get_client_ip(environ)
-    request_url = get_request_url(environ, use_x_forwarded_for)
-    query_string = environ.get("QUERY_STRING")
-    method = environ.get("REQUEST_METHOD")
-    env = dict(_get_environ(environ))
-    headers = _filter_headers(dict(_get_headers(environ)))
-
-    def event_processor(event, hint):
-        # type: (Event, Dict[str, Any]) -> Event
-        with capture_internal_exceptions():
-            # if the code below fails halfway through we at least have some data
-            request_info = event.setdefault("request", {})
-
-            if should_send_default_pii():
-                user_info = event.setdefault("user", {})
-                if client_ip:
-                    user_info.setdefault("ip_address", client_ip)
-
-            request_info["url"] = request_url
-            request_info["query_string"] = query_string
-            request_info["method"] = method
-            request_info["env"] = env
-            request_info["headers"] = headers
-
-        return event
-
-    return event_processor
-
-
-def _prepopulate_attributes(wsgi_environ, use_x_forwarded_for=False):
-    # type: (dict[str, str], bool) -> dict[str, str]
-    """Extract span attributes from the WSGI environment."""
-    attributes = {}
-
-    for property, attr in ENVIRON_TO_ATTRIBUTE.items():
-        if wsgi_environ.get(property) is not None:
-            attributes[attr] = wsgi_environ[property]
-
-    if wsgi_environ.get("SERVER_PROTOCOL") is not None:
-        try:
-            proto, version = wsgi_environ["SERVER_PROTOCOL"].split("/")
-            attributes["network.protocol.name"] = proto
-            attributes["network.protocol.version"] = version
-        except Exception:
-            attributes["network.protocol.name"] = wsgi_environ["SERVER_PROTOCOL"]
-
-    with capture_internal_exceptions():
-        url = get_request_url(wsgi_environ, use_x_forwarded_for)
-        query = wsgi_environ.get("QUERY_STRING")
-        attributes["url.full"] = f"{url}?{query}"
-
-    attributes.update(_request_headers_to_span_attributes(dict(_get_headers(wsgi_environ))))
-
-    return attributes
diff --git a/src/sentry_sdk_alpha/logger.py b/src/sentry_sdk_alpha/logger.py
deleted file mode 100644
index dcf09c322e3f38..00000000000000
--- a/src/sentry_sdk_alpha/logger.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# NOTE: this is the logger sentry exposes to users, not some generic logger.
-import functools
-import time
-from typing import Any
-
-from sentry_sdk_alpha import get_client, get_current_scope
-from sentry_sdk_alpha.utils import safe_repr
-
-
-def _capture_log(severity_text, severity_number, template, **kwargs):
-    # type: (str, int, str, **Any) -> None
-    client = get_client()
-    scope = get_current_scope()
-
-    attrs = {
-        "sentry.message.template": template,
-    }  # type: dict[str, str | bool | float | int]
-    if "attributes" in kwargs:
-        attrs.update(kwargs.pop("attributes"))
-    for k, v in kwargs.items():
-        attrs[f"sentry.message.parameters.{k}"] = v
-
-    attrs = {
-        k: (
-            v
-            if (
-                isinstance(v, str)
-                or isinstance(v, int)
-                or isinstance(v, bool)
-                or isinstance(v, float)
-            )
-            else safe_repr(v)
-        )
-        for (k, v) in attrs.items()
-    }
-
-    # noinspection PyProtectedMember
-    client._capture_experimental_log(
-        scope,
-        {
-            "severity_text": severity_text,
-            "severity_number": severity_number,
-            "attributes": attrs,
-            "body": template.format(**kwargs),
-            "time_unix_nano": time.time_ns(),
-            "trace_id": None,
-        },
-    )
-
-
-trace = functools.partial(_capture_log, "trace", 1)
-debug = functools.partial(_capture_log, "debug", 5)
-info = functools.partial(_capture_log, "info", 9)
-warning = functools.partial(_capture_log, "warning", 13)
-error = functools.partial(_capture_log, "error", 17)
-fatal = functools.partial(_capture_log, "fatal", 21)
diff --git a/src/sentry_sdk_alpha/monitor.py b/src/sentry_sdk_alpha/monitor.py
deleted file mode 100644
index 5ca0516b2f9a18..00000000000000
--- a/src/sentry_sdk_alpha/monitor.py
+++ /dev/null
@@ -1,121 +0,0 @@
-import os
-import time
-from threading import Lock, Thread
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-
-MAX_DOWNSAMPLE_FACTOR = 10
-
-
-class Monitor:
-    """
-    Performs health checks in a separate thread once every interval seconds
-    and updates the internal state. Other parts of the SDK only read this state
-    and act accordingly.
-    """
-
-    name = "sentry.monitor"
-
-    def __init__(self, transport, interval=10):
-        # type: (sentry_sdk.transport.Transport, float) -> None
-        self.transport = transport  # type: sentry_sdk.transport.Transport
-        self.interval = interval  # type: float
-
-        self._healthy = True
-        self._downsample_factor = 0  # type: int
-
-        self._thread = None  # type: Optional[Thread]
-        self._thread_lock = Lock()
-        self._thread_for_pid = None  # type: Optional[int]
-        self._running = True
-
-    def _ensure_running(self):
-        # type: () -> None
-        """
-        Check that the monitor has an active thread to run in, or create one if not.
-
-        Note that this might fail (e.g. in Python 3.12 it's not possible to
-        spawn new threads at interpreter shutdown). In that case self._running
-        will be False after running this function.
-        """
-        if self._thread_for_pid == os.getpid() and self._thread is not None:
-            return None
-
-        with self._thread_lock:
-            if self._thread_for_pid == os.getpid() and self._thread is not None:
-                return None
-
-            def _thread():
-                # type: (...) -> None
-                while self._running:
-                    time.sleep(self.interval)
-                    if self._running:
-                        self.run()
-
-            thread = Thread(name=self.name, target=_thread)
-            thread.daemon = True
-            try:
-                thread.start()
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self._running = False
-                return None
-
-            self._thread = thread
-            self._thread_for_pid = os.getpid()
-
-        return None
-
-    def run(self):
-        # type: () -> None
-        self.check_health()
-        self.set_downsample_factor()
-
-    def set_downsample_factor(self):
-        # type: () -> None
-        if self._healthy:
-            if self._downsample_factor > 0:
-                logger.debug("[Monitor] health check positive, reverting to normal sampling")
-            self._downsample_factor = 0
-        else:
-            if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR:
-                self._downsample_factor += 1
-            logger.debug(
-                "[Monitor] health check negative, downsampling with a factor of %d",
-                self._downsample_factor,
-            )
-
-    def check_health(self):
-        # type: () -> None
-        """
-        Perform the actual health checks,
-        currently only checks if the transport is rate-limited.
-        TODO: augment in the future with more checks.
-        """
-        self._healthy = self.transport.is_healthy()
-
-    def is_healthy(self):
-        # type: () -> bool
-        self._ensure_running()
-        return self._healthy
-
-    @property
-    def downsample_factor(self):
-        # type: () -> int
-        self._ensure_running()
-        return self._downsample_factor
-
-    def kill(self):
-        # type: () -> None
-        self._running = False
-
-    def __del__(self):
-        # type: () -> None
-        self.kill()
diff --git a/src/sentry_sdk_alpha/opentelemetry/__init__.py b/src/sentry_sdk_alpha/opentelemetry/__init__.py
deleted file mode 100644
index 299496a7ca4f8d..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from sentry_sdk_alpha.opentelemetry.propagator import SentryPropagator
-from sentry_sdk_alpha.opentelemetry.sampler import SentrySampler
-from sentry_sdk_alpha.opentelemetry.span_processor import SentrySpanProcessor
-
-__all__ = [
-    "SentryPropagator",
-    "SentrySampler",
-    "SentrySpanProcessor",
-]
diff --git a/src/sentry_sdk_alpha/opentelemetry/consts.py b/src/sentry_sdk_alpha/opentelemetry/consts.py
deleted file mode 100644
index 98999277209dc1..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/consts.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from opentelemetry.context import create_key
-
-from sentry_sdk_alpha.tracing_utils import Baggage
-
-# propagation keys
-SENTRY_TRACE_KEY = create_key("sentry-trace")
-SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
-
-# scope management keys
-SENTRY_SCOPES_KEY = create_key("sentry_scopes")
-SENTRY_FORK_ISOLATION_SCOPE_KEY = create_key("sentry_fork_isolation_scope")
-SENTRY_USE_CURRENT_SCOPE_KEY = create_key("sentry_use_current_scope")
-SENTRY_USE_ISOLATION_SCOPE_KEY = create_key("sentry_use_isolation_scope")
-
-# trace state keys
-TRACESTATE_SAMPLED_KEY = Baggage.SENTRY_PREFIX + "sampled"
-TRACESTATE_SAMPLE_RATE_KEY = Baggage.SENTRY_PREFIX + "sample_rate"
-TRACESTATE_SAMPLE_RAND_KEY = Baggage.SENTRY_PREFIX + "sample_rand"
-
-# misc
-OTEL_SENTRY_CONTEXT = "otel"
-SPAN_ORIGIN = "auto.otel"
-
-
-class SentrySpanAttribute:
-    DESCRIPTION = "sentry.description"
-    OP = "sentry.op"
-    ORIGIN = "sentry.origin"
-    TAG = "sentry.tag"
-    NAME = "sentry.name"
-    SOURCE = "sentry.source"
-    CONTEXT = "sentry.context"
-    CUSTOM_SAMPLED = "sentry.custom_sampled"  # used for saving start_span(sampled=X)
diff --git a/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py b/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
deleted file mode 100644
index 98485a9af8a20a..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/contextvars_context.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from typing import TYPE_CHECKING, cast
-
-from opentelemetry.context import Context, get_value, set_value
-from opentelemetry.context.contextvars_context import ContextVarsRuntimeContext
-from opentelemetry.trace import set_span_in_context
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.opentelemetry.consts import (
-    SENTRY_FORK_ISOLATION_SCOPE_KEY,
-    SENTRY_SCOPES_KEY,
-    SENTRY_USE_CURRENT_SCOPE_KEY,
-    SENTRY_USE_ISOLATION_SCOPE_KEY,
-)
-
-if TYPE_CHECKING:
-    from contextvars import Token
-    from typing import Optional
-
-    import sentry_sdk_alpha.opentelemetry.scope as scope
-
-
-class SentryContextVarsRuntimeContext(ContextVarsRuntimeContext):
-    def attach(self, context):
-        # type: (Context) -> Token[Context]
-        scopes = get_value(SENTRY_SCOPES_KEY, context)
-
-        should_fork_isolation_scope = context.pop(SENTRY_FORK_ISOLATION_SCOPE_KEY, False)
-        should_fork_isolation_scope = cast("bool", should_fork_isolation_scope)
-
-        should_use_isolation_scope = context.pop(SENTRY_USE_ISOLATION_SCOPE_KEY, None)
-        should_use_isolation_scope = cast("Optional[scope.PotelScope]", should_use_isolation_scope)
-
-        should_use_current_scope = context.pop(SENTRY_USE_CURRENT_SCOPE_KEY, None)
-        should_use_current_scope = cast("Optional[scope.PotelScope]", should_use_current_scope)
-
-        if scopes:
-            scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes)
-            (current_scope, isolation_scope) = scopes
-        else:
-            current_scope = sentry_sdk_alpha.get_current_scope()
-            isolation_scope = sentry_sdk_alpha.get_isolation_scope()
-
-        new_context = context
-
-        if should_use_current_scope:
-            new_scope = should_use_current_scope
-
-            # the main case where we use use_scope is for
-            # scope propagation in the ThreadingIntegration
-            # so we need to carry forward the span reference explicitly too
-            span = should_use_current_scope.span
-            if span:
-                new_context = set_span_in_context(span._otel_span, new_context)
-
-        else:
-            new_scope = current_scope.fork()
-
-        if should_use_isolation_scope:
-            new_isolation_scope = should_use_isolation_scope
-        elif should_fork_isolation_scope:
-            new_isolation_scope = isolation_scope.fork()
-        else:
-            new_isolation_scope = isolation_scope
-
-        new_scopes = (new_scope, new_isolation_scope)
-
-        new_context = set_value(SENTRY_SCOPES_KEY, new_scopes, new_context)
-        return super().attach(new_context)
diff --git a/src/sentry_sdk_alpha/opentelemetry/propagator.py b/src/sentry_sdk_alpha/opentelemetry/propagator.py
deleted file mode 100644
index a1d01b4cb9cfcb..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/propagator.py
+++ /dev/null
@@ -1,95 +0,0 @@
-from typing import TYPE_CHECKING, cast
-
-from opentelemetry import trace
-from opentelemetry.context import Context, get_current, get_value, set_value
-from opentelemetry.propagators.textmap import (
-    CarrierT,
-    Getter,
-    Setter,
-    TextMapPropagator,
-    default_getter,
-    default_setter,
-)
-from opentelemetry.trace import NonRecordingSpan, SpanContext, TraceFlags
-
-from sentry_sdk_alpha.consts import BAGGAGE_HEADER_NAME, SENTRY_TRACE_HEADER_NAME
-from sentry_sdk_alpha.opentelemetry.consts import (
-    SENTRY_BAGGAGE_KEY,
-    SENTRY_SCOPES_KEY,
-    SENTRY_TRACE_KEY,
-)
-from sentry_sdk_alpha.tracing_utils import Baggage, extract_sentrytrace_data
-
-if TYPE_CHECKING:
-    from typing import Optional, Set
-
-    import sentry_sdk.opentelemetry.scope as scope
-
-
-class SentryPropagator(TextMapPropagator):
-    """
-    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
-    """
-
-    def extract(self, carrier, context=None, getter=default_getter):
-        # type: (CarrierT, Optional[Context], Getter[CarrierT]) -> Context
-        if context is None:
-            context = get_current()
-
-        # TODO-neel-potel cleanup with continue_trace / isolation_scope
-        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
-        if not sentry_trace:
-            return context
-
-        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
-        if not sentrytrace:
-            return context
-
-        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
-
-        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
-
-        span_context = SpanContext(
-            trace_id=int(trace_id, 16),  # type: ignore
-            span_id=int(span_id, 16),  # type: ignore
-            # we simulate a sampled trace on the otel side and leave the sampling to sentry
-            trace_flags=TraceFlags(TraceFlags.SAMPLED),
-            is_remote=True,
-        )
-
-        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
-
-        if baggage_header:
-            baggage = Baggage.from_incoming_header(baggage_header[0])
-        else:
-            # If there's an incoming sentry-trace but no incoming baggage header,
-            # for instance in traces coming from older SDKs,
-            # baggage will be empty and frozen and won't be populated as head SDK.
-            baggage = Baggage(sentry_items={})
-
-        baggage.freeze()
-        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
-
-        span = NonRecordingSpan(span_context)
-        modified_context = trace.set_span_in_context(span, context)
-        return modified_context
-
-    def inject(self, carrier, context=None, setter=default_setter):
-        # type: (CarrierT, Optional[Context], Setter[CarrierT]) -> None
-        if context is None:
-            context = get_current()
-
-        scopes = get_value(SENTRY_SCOPES_KEY, context)
-        if scopes:
-            scopes = cast("tuple[scope.PotelScope, scope.PotelScope]", scopes)
-            (current_scope, _) = scopes
-
-            # TODO-neel-potel check trace_propagation_targets
-            # TODO-neel-potel test propagator works with twp
-            for key, value in current_scope.iter_trace_propagation_headers():
-                setter.set(carrier, key, value)
-
-    @property
-    def fields(self):
-        # type: () -> Set[str]
-        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/src/sentry_sdk_alpha/opentelemetry/sampler.py b/src/sentry_sdk_alpha/opentelemetry/sampler.py
deleted file mode 100644
index 05256820bce5b1..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/sampler.py
+++ /dev/null
@@ -1,313 +0,0 @@
-from decimal import Decimal
-from typing import TYPE_CHECKING, cast
-
-from opentelemetry import trace
-from opentelemetry.sdk.trace.sampling import Decision, Sampler, SamplingResult
-from opentelemetry.trace.span import TraceState
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.opentelemetry.consts import (
-    TRACESTATE_SAMPLE_RAND_KEY,
-    TRACESTATE_SAMPLE_RATE_KEY,
-    TRACESTATE_SAMPLED_KEY,
-    SentrySpanAttribute,
-)
-from sentry_sdk_alpha.tracing_utils import _generate_sample_rand, has_tracing_enabled
-from sentry_sdk_alpha.utils import is_valid_sample_rate, logger
-
-if TYPE_CHECKING:
-    from collections.abc import Sequence
-    from typing import Any, Optional, Union
-
-    from opentelemetry.context import Context
-    from opentelemetry.trace import Link, SpanKind
-    from opentelemetry.trace.span import SpanContext
-    from opentelemetry.util.types import Attributes
-
-
-def get_parent_sampled(parent_context, trace_id):
-    # type: (Optional[SpanContext], int) -> Optional[bool]
-    if parent_context is None:
-        return None
-
-    is_span_context_valid = parent_context is not None and parent_context.is_valid
-
-    # Only inherit sample rate if `traceId` is the same
-    if is_span_context_valid and parent_context.trace_id == trace_id:
-        # this is getSamplingDecision in JS
-        # if there was no sampling flag, defer the decision
-        dsc_sampled = parent_context.trace_state.get(TRACESTATE_SAMPLED_KEY)
-        if dsc_sampled == "deferred":
-            return None
-
-        if parent_context.trace_flags.sampled is not None:
-            return parent_context.trace_flags.sampled
-
-        if dsc_sampled == "true":
-            return True
-        elif dsc_sampled == "false":
-            return False
-
-    return None
-
-
-def get_parent_sample_rate(parent_context, trace_id):
-    # type: (Optional[SpanContext], int) -> Optional[float]
-    if parent_context is None:
-        return None
-
-    is_span_context_valid = parent_context is not None and parent_context.is_valid
-
-    if is_span_context_valid and parent_context.trace_id == trace_id:
-        parent_sample_rate = parent_context.trace_state.get(TRACESTATE_SAMPLE_RATE_KEY)
-        if parent_sample_rate is None:
-            return None
-
-        try:
-            return float(parent_sample_rate)
-        except Exception:
-            return None
-
-    return None
-
-
-def get_parent_sample_rand(parent_context, trace_id):
-    # type: (Optional[SpanContext], int) -> Optional[Decimal]
-    if parent_context is None:
-        return None
-
-    is_span_context_valid = parent_context is not None and parent_context.is_valid
-
-    if is_span_context_valid and parent_context.trace_id == trace_id:
-        parent_sample_rand = parent_context.trace_state.get(TRACESTATE_SAMPLE_RAND_KEY)
-        if parent_sample_rand is None:
-            return None
-
-        return Decimal(parent_sample_rand)
-
-    return None
-
-
-def dropped_result(span_context, attributes, sample_rate=None, sample_rand=None):
-    # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult
-    """
-    React to a span getting unsampled and return a DROP SamplingResult.
-
-    Update the trace_state with the effective sampled, sample_rate and sample_rand,
-    record that we dropped the event for client report purposes, and return
-    an OTel SamplingResult with Decision.DROP.
-
-    See for more info about OTel sampling:
-    https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html
-    """
-    trace_state = _update_trace_state(
-        span_context, sampled=False, sample_rate=sample_rate, sample_rand=sample_rand
-    )
-
-    is_root_span = not (span_context.is_valid and not span_context.is_remote)
-    if is_root_span:
-        # Tell Sentry why we dropped the transaction/root-span
-        client = sentry_sdk_alpha.get_client()
-        if client.monitor and client.monitor.downsample_factor > 0:
-            reason = "backpressure"
-        else:
-            reason = "sample_rate"
-
-        if client.transport and has_tracing_enabled(client.options):
-            client.transport.record_lost_event(reason, data_category="transaction")
-
-            # Only one span (the transaction itself) is discarded, since we did not record any spans here.
-            client.transport.record_lost_event(reason, data_category="span")
-
-    return SamplingResult(
-        Decision.DROP,
-        attributes=attributes,
-        trace_state=trace_state,
-    )
-
-
-def sampled_result(span_context, attributes, sample_rate=None, sample_rand=None):
-    # type: (SpanContext, Attributes, Optional[float], Optional[Decimal]) -> SamplingResult
-    """
-    React to a span being sampled and return a sampled SamplingResult.
-
-    Update the trace_state with the effective sampled, sample_rate and sample_rand,
-    and return an OTel SamplingResult with Decision.RECORD_AND_SAMPLE.
-
-    See for more info about OTel sampling:
-    https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.sampling.html
-    """
-    trace_state = _update_trace_state(
-        span_context, sampled=True, sample_rate=sample_rate, sample_rand=sample_rand
-    )
-
-    return SamplingResult(
-        Decision.RECORD_AND_SAMPLE,
-        attributes=attributes,
-        trace_state=trace_state,
-    )
-
-
-def _update_trace_state(span_context, sampled, sample_rate=None, sample_rand=None):
-    # type: (SpanContext, bool, Optional[float], Optional[Decimal]) -> TraceState
-    trace_state = span_context.trace_state
-
-    sampled = "true" if sampled else "false"
-    if TRACESTATE_SAMPLED_KEY not in trace_state:
-        trace_state = trace_state.add(TRACESTATE_SAMPLED_KEY, sampled)
-    elif trace_state.get(TRACESTATE_SAMPLED_KEY) == "deferred":
-        trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, sampled)
-
-    if sample_rate is not None:
-        trace_state = trace_state.update(TRACESTATE_SAMPLE_RATE_KEY, str(sample_rate))
-
-    if sample_rand is not None:
-        trace_state = trace_state.update(
-            TRACESTATE_SAMPLE_RAND_KEY, f"{sample_rand:.6f}"  # noqa: E231
-        )
-
-    return trace_state
-
-
-class SentrySampler(Sampler):
-    def should_sample(
-        self,
-        parent_context,  # type: Optional[Context]
-        trace_id,  # type: int
-        name,  # type: str
-        kind=None,  # type: Optional[SpanKind]
-        attributes=None,  # type: Attributes
-        links=None,  # type: Optional[Sequence[Link]]
-        trace_state=None,  # type: Optional[TraceState]
-    ):
-        # type: (...) -> SamplingResult
-        client = sentry_sdk_alpha.get_client()
-
-        parent_span_context = trace.get_current_span(parent_context).get_span_context()
-
-        attributes = attributes or {}
-
-        # No tracing enabled, thus no sampling
-        if not has_tracing_enabled(client.options):
-            return dropped_result(parent_span_context, attributes)
-
-        # parent_span_context.is_valid means this span has a parent, remote or local
-        is_root_span = not parent_span_context.is_valid or parent_span_context.is_remote
-
-        sample_rate = None
-
-        parent_sampled = get_parent_sampled(parent_span_context, trace_id)
-        parent_sample_rate = get_parent_sample_rate(parent_span_context, trace_id)
-        parent_sample_rand = get_parent_sample_rand(parent_span_context, trace_id)
-
-        if parent_sample_rand is not None:
-            # We have a sample_rand on the incoming trace or we already backfilled
-            # it in PropagationContext
-            sample_rand = parent_sample_rand
-        else:
-            # We are the head SDK and we need to generate a new sample_rand
-            sample_rand = cast(Decimal, _generate_sample_rand(str(trace_id), (0, 1)))
-
-        # Explicit sampled value provided at start_span
-        custom_sampled = cast("Optional[bool]", attributes.get(SentrySpanAttribute.CUSTOM_SAMPLED))
-        if custom_sampled is not None:
-            if is_root_span:
-                sample_rate = float(custom_sampled)
-                if sample_rate > 0:
-                    return sampled_result(
-                        parent_span_context,
-                        attributes,
-                        sample_rate=sample_rate,
-                        sample_rand=sample_rand,
-                    )
-                else:
-                    return dropped_result(
-                        parent_span_context,
-                        attributes,
-                        sample_rate=sample_rate,
-                        sample_rand=sample_rand,
-                    )
-            else:
-                logger.debug(f"[Tracing.Sampler] Ignoring sampled param for non-root span {name}")
-
-        # Check if there is a traces_sampler
-        # Traces_sampler is responsible to check parent sampled to have full transactions.
-        has_traces_sampler = callable(client.options.get("traces_sampler"))
-
-        sample_rate_to_propagate = None
-
-        if is_root_span and has_traces_sampler:
-            sampling_context = create_sampling_context(
-                name, attributes, parent_span_context, trace_id
-            )
-            sample_rate = client.options["traces_sampler"](sampling_context)
-            sample_rate_to_propagate = sample_rate
-        else:
-            # Check if there is a parent with a sampling decision
-            if parent_sampled is not None:
-                sample_rate = bool(parent_sampled)
-                sample_rate_to_propagate = parent_sample_rate if parent_sample_rate else sample_rate
-            else:
-                # Check if there is a traces_sample_rate
-                sample_rate = client.options.get("traces_sample_rate")
-                sample_rate_to_propagate = sample_rate
-
-        # If the sample rate is invalid, drop the span
-        if not is_valid_sample_rate(sample_rate, source=self.__class__.__name__):
-            logger.warning(f"[Tracing.Sampler] Discarding {name} because of invalid sample rate.")
-            return dropped_result(parent_span_context, attributes)
-
-        # Down-sample in case of back pressure monitor says so
-        if is_root_span and client.monitor:
-            sample_rate /= 2**client.monitor.downsample_factor
-            if client.monitor.downsample_factor > 0:
-                sample_rate_to_propagate = sample_rate
-
-        # Compare sample_rand to sample_rate to make the final sampling decision
-        sample_rate = float(cast("Union[bool, float, int]", sample_rate))
-        sampled = sample_rand < Decimal.from_float(sample_rate)
-
-        if sampled:
-            if is_root_span:
-                logger.debug(
-                    f"[Tracing.Sampler] Sampled #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}"
-                )
-
-            return sampled_result(
-                parent_span_context,
-                attributes,
-                sample_rate=sample_rate_to_propagate,
-                sample_rand=None if sample_rand == parent_sample_rand else sample_rand,
-            )
-        else:
-            if is_root_span:
-                logger.debug(
-                    f"[Tracing.Sampler] Dropped #{name} with sample_rate: {sample_rate} and sample_rand: {sample_rand}"
-                )
-
-            return dropped_result(
-                parent_span_context,
-                attributes,
-                sample_rate=sample_rate_to_propagate,
-                sample_rand=None if sample_rand == parent_sample_rand else sample_rand,
-            )
-
-    def get_description(self) -> str:
-        return self.__class__.__name__
-
-
-def create_sampling_context(name, attributes, parent_span_context, trace_id):
-    # type: (str, Attributes, Optional[SpanContext], int) -> dict[str, Any]
-    sampling_context = {
-        "transaction_context": {
-            "name": name,
-            "op": attributes.get(SentrySpanAttribute.OP) if attributes else None,
-            "source": (attributes.get(SentrySpanAttribute.SOURCE) if attributes else None),
-        },
-        "parent_sampled": get_parent_sampled(parent_span_context, trace_id),
-    }  # type: dict[str, Any]
-
-    if attributes is not None:
-        sampling_context.update(attributes)
-
-    return sampling_context
diff --git a/src/sentry_sdk_alpha/opentelemetry/scope.py b/src/sentry_sdk_alpha/opentelemetry/scope.py
deleted file mode 100644
index c7f3eb24609b9d..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/scope.py
+++ /dev/null
@@ -1,201 +0,0 @@
-import warnings
-from contextlib import contextmanager
-from typing import cast
-
-from opentelemetry.context import attach, detach, get_current, get_value, set_value
-from opentelemetry.trace import NonRecordingSpan, SpanContext, TraceFlags, TraceState, use_span
-
-from sentry_sdk_alpha._types import TYPE_CHECKING
-from sentry_sdk_alpha.opentelemetry.consts import (
-    SENTRY_FORK_ISOLATION_SCOPE_KEY,
-    SENTRY_SCOPES_KEY,
-    SENTRY_USE_CURRENT_SCOPE_KEY,
-    SENTRY_USE_ISOLATION_SCOPE_KEY,
-    TRACESTATE_SAMPLED_KEY,
-)
-from sentry_sdk_alpha.opentelemetry.contextvars_context import SentryContextVarsRuntimeContext
-from sentry_sdk_alpha.opentelemetry.utils import trace_state_from_baggage
-from sentry_sdk_alpha.scope import Scope, ScopeType
-from sentry_sdk_alpha.tracing import Span
-
-if TYPE_CHECKING:
-    from collections.abc import Generator
-    from typing import Any, Dict, Optional, Tuple
-
-
-class PotelScope(Scope):
-    @classmethod
-    def _get_scopes(cls):
-        # type: () -> Optional[Tuple[PotelScope, PotelScope]]
-        """
-        Returns the current scopes tuple on the otel context. Internal use only.
-        """
-        return cast("Optional[Tuple[PotelScope, PotelScope]]", get_value(SENTRY_SCOPES_KEY))
-
-    @classmethod
-    def get_current_scope(cls):
-        # type: () -> PotelScope
-        """
-        Returns the current scope.
-        """
-        return cls._get_current_scope() or _INITIAL_CURRENT_SCOPE
-
-    @classmethod
-    def _get_current_scope(cls):
-        # type: () -> Optional[PotelScope]
-        """
-        Returns the current scope without creating a new one. Internal use only.
-        """
-        scopes = cls._get_scopes()
-        return scopes[0] if scopes else None
-
-    @classmethod
-    def get_isolation_scope(cls):
-        # type: () -> PotelScope
-        """
-        Returns the isolation scope.
-        """
-        return cls._get_isolation_scope() or _INITIAL_ISOLATION_SCOPE
-
-    @classmethod
-    def _get_isolation_scope(cls):
-        # type: () -> Optional[PotelScope]
-        """
-        Returns the isolation scope without creating a new one. Internal use only.
-        """
-        scopes = cls._get_scopes()
-        return scopes[1] if scopes else None
-
-    @contextmanager
-    def continue_trace(self, environ_or_headers):
-        # type: (Dict[str, Any]) -> Generator[None, None, None]
-        """
-        Sets the propagation context from environment or headers to continue an incoming trace.
-        Any span started within this context manager will use the same trace_id, parent_span_id
-        and inherit the sampling decision from the incoming trace.
-        """
-        self.generate_propagation_context(environ_or_headers)
-
-        span_context = self._incoming_otel_span_context()
-        if span_context is None:
-            yield
-        else:
-            with use_span(NonRecordingSpan(span_context)):
-                yield
-
-    def _incoming_otel_span_context(self):
-        # type: () -> Optional[SpanContext]
-        if self._propagation_context is None:
-            return None
-        # If sentry-trace extraction didn't have a parent_span_id, we don't have an upstream header
-        if self._propagation_context.parent_span_id is None:
-            return None
-
-        trace_flags = TraceFlags(
-            TraceFlags.SAMPLED if self._propagation_context.parent_sampled else TraceFlags.DEFAULT
-        )
-
-        if self._propagation_context.baggage:
-            trace_state = trace_state_from_baggage(self._propagation_context.baggage)
-        else:
-            trace_state = TraceState()
-
-        # for twp to work, we also need to consider deferred sampling when the sampling
-        # flag is not present, so the above TraceFlags are not sufficient
-        if self._propagation_context.parent_sampled is None:
-            trace_state = trace_state.update(TRACESTATE_SAMPLED_KEY, "deferred")
-
-        span_context = SpanContext(
-            trace_id=int(self._propagation_context.trace_id, 16),
-            span_id=int(self._propagation_context.parent_span_id, 16),
-            is_remote=True,
-            trace_flags=trace_flags,
-            trace_state=trace_state,
-        )
-
-        return span_context
-
-    def start_transaction(self, **kwargs):
-        # type: (Any) -> Span
-        """
-        .. deprecated:: 3.0.0
-            This function is deprecated and will be removed in a future release.
-            Use :py:meth:`sentry_sdk.start_span` instead.
-        """
-        warnings.warn(
-            "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        return self.start_span(**kwargs)
-
-    def start_span(self, **kwargs):
-        # type: (Any) -> Span
-        return Span(**kwargs)
-
-
-_INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT)
-_INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION)
-
-
-def setup_initial_scopes():
-    # type: () -> None
-    global _INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE
-    _INITIAL_CURRENT_SCOPE = PotelScope(ty=ScopeType.CURRENT)
-    _INITIAL_ISOLATION_SCOPE = PotelScope(ty=ScopeType.ISOLATION)
-
-    scopes = (_INITIAL_CURRENT_SCOPE, _INITIAL_ISOLATION_SCOPE)
-    attach(set_value(SENTRY_SCOPES_KEY, scopes))
-
-
-def setup_scope_context_management():
-    # type: () -> None
-    import opentelemetry.context
-
-    opentelemetry.context._RUNTIME_CONTEXT = SentryContextVarsRuntimeContext()
-    setup_initial_scopes()
-
-
-@contextmanager
-def isolation_scope():
-    # type: () -> Generator[PotelScope, None, None]
-    context = set_value(SENTRY_FORK_ISOLATION_SCOPE_KEY, True)
-    token = attach(context)
-    try:
-        yield PotelScope.get_isolation_scope()
-    finally:
-        detach(token)
-
-
-@contextmanager
-def new_scope():
-    # type: () -> Generator[PotelScope, None, None]
-    token = attach(get_current())
-    try:
-        yield PotelScope.get_current_scope()
-    finally:
-        detach(token)
-
-
-@contextmanager
-def use_scope(scope):
-    # type: (PotelScope) -> Generator[PotelScope, None, None]
-    context = set_value(SENTRY_USE_CURRENT_SCOPE_KEY, scope)
-    token = attach(context)
-
-    try:
-        yield scope
-    finally:
-        detach(token)
-
-
-@contextmanager
-def use_isolation_scope(isolation_scope):
-    # type: (PotelScope) -> Generator[PotelScope, None, None]
-    context = set_value(SENTRY_USE_ISOLATION_SCOPE_KEY, isolation_scope)
-    token = attach(context)
-
-    try:
-        yield isolation_scope
-    finally:
-        detach(token)
diff --git a/src/sentry_sdk_alpha/opentelemetry/span_processor.py b/src/sentry_sdk_alpha/opentelemetry/span_processor.py
deleted file mode 100644
index 802306720e4e6d..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/span_processor.py
+++ /dev/null
@@ -1,316 +0,0 @@
-from collections import defaultdict, deque
-from typing import cast
-
-from opentelemetry.context import Context
-from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor
-from opentelemetry.trace import INVALID_SPAN
-from opentelemetry.trace import Span as AbstractSpan
-from opentelemetry.trace import format_span_id, format_trace_id, get_current_span
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._types import TYPE_CHECKING
-from sentry_sdk_alpha.consts import DEFAULT_SPAN_ORIGIN, SPANDATA
-from sentry_sdk_alpha.opentelemetry.consts import OTEL_SENTRY_CONTEXT, SentrySpanAttribute
-from sentry_sdk_alpha.opentelemetry.sampler import create_sampling_context
-from sentry_sdk_alpha.opentelemetry.utils import (
-    convert_from_otel_timestamp,
-    extract_span_attributes,
-    extract_span_data,
-    extract_transaction_name_source,
-    get_profile_context,
-    get_sentry_meta,
-    get_trace_context,
-    is_sentry_span,
-    set_sentry_meta,
-)
-from sentry_sdk_alpha.profiler.continuous_profiler import (
-    get_profiler_id,
-    try_autostart_continuous_profiler,
-    try_profile_lifecycle_trace_start,
-)
-from sentry_sdk_alpha.profiler.transaction_profiler import Profile
-from sentry_sdk_alpha.utils import get_current_thread_meta
-
-if TYPE_CHECKING:
-    from typing import Any, DefaultDict, Deque, List, Optional
-
-    from sentry_sdk_alpha._types import Event
-
-
-DEFAULT_MAX_SPANS = 1000
-
-
-class SentrySpanProcessor(SpanProcessor):
-    """
-    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
-    """
-
-    def __new__(cls):
-        # type: () -> SentrySpanProcessor
-        if not hasattr(cls, "instance"):
-            cls.instance = super().__new__(cls)
-
-        return cls.instance
-
-    def __init__(self):
-        # type: () -> None
-        self._children_spans = defaultdict(list)  # type: DefaultDict[int, List[ReadableSpan]]
-        self._dropped_spans = defaultdict(int)  # type: DefaultDict[int, int]
-
-    def on_start(self, span, parent_context=None):
-        # type: (Span, Optional[Context]) -> None
-        if is_sentry_span(span):
-            return
-
-        self._add_root_span(span, get_current_span(parent_context))
-        self._start_profile(span)
-
-    def on_end(self, span):
-        # type: (ReadableSpan) -> None
-        if is_sentry_span(span):
-            return
-
-        is_root_span = not span.parent or span.parent.is_remote
-        if is_root_span:
-            # if have a root span ending, stop the profiler, build a transaction and send it
-            self._stop_profile(span)
-            self._flush_root_span(span)
-        else:
-            self._append_child_span(span)
-
-    # TODO-neel-potel not sure we need a clear like JS
-    def shutdown(self):
-        # type: () -> None
-        pass
-
-    # TODO-neel-potel change default? this is 30 sec
-    # TODO-neel-potel call this in client.flush
-    def force_flush(self, timeout_millis=30000):
-        # type: (int) -> bool
-        return True
-
-    def _add_root_span(self, span, parent_span):
-        # type: (Span, AbstractSpan) -> None
-        """
-        This is required to make Span.root_span work
-        since we can't traverse back to the root purely with otel efficiently.
-        """
-        if parent_span != INVALID_SPAN and not parent_span.get_span_context().is_remote:
-            # child span points to parent's root or parent
-            parent_root_span = get_sentry_meta(parent_span, "root_span")
-            set_sentry_meta(span, "root_span", parent_root_span or parent_span)
-        else:
-            # root span points to itself
-            set_sentry_meta(span, "root_span", span)
-
-    def _start_profile(self, span):
-        # type: (Span) -> None
-        try_autostart_continuous_profiler()
-
-        profiler_id = get_profiler_id()
-        thread_id, thread_name = get_current_thread_meta()
-
-        if profiler_id:
-            span.set_attribute(SPANDATA.PROFILER_ID, profiler_id)
-        if thread_id:
-            span.set_attribute(SPANDATA.THREAD_ID, str(thread_id))
-        if thread_name:
-            span.set_attribute(SPANDATA.THREAD_NAME, thread_name)
-
-        is_root_span = not span.parent or span.parent.is_remote
-        sampled = span.context and span.context.trace_flags.sampled
-
-        if is_root_span and sampled:
-            # profiler uses time.perf_counter_ns() so we cannot use the
-            # unix timestamp that is on span.start_time
-            # setting it to 0 means the profiler will internally measure time on start
-            profile = Profile(sampled, 0)
-
-            sampling_context = create_sampling_context(
-                span.name, span.attributes, span.parent, span.context.trace_id
-            )
-            profile._set_initial_sampling_decision(sampling_context)
-            profile.__enter__()
-            set_sentry_meta(span, "profile", profile)
-
-            continuous_profile = try_profile_lifecycle_trace_start()
-            profiler_id = get_profiler_id()
-            if profiler_id:
-                span.set_attribute(SPANDATA.PROFILER_ID, profiler_id)
-            set_sentry_meta(span, "continuous_profile", continuous_profile)
-
-    def _stop_profile(self, span):
-        # type: (ReadableSpan) -> None
-        continuous_profiler = get_sentry_meta(span, "continuous_profile")
-        if continuous_profiler:
-            continuous_profiler.stop()
-
-    def _flush_root_span(self, span):
-        # type: (ReadableSpan) -> None
-        transaction_event = self._root_span_to_transaction_event(span)
-        if not transaction_event:
-            return
-
-        collected_spans, dropped_spans = self._collect_children(span)
-        spans = []
-        for child in collected_spans:
-            span_json = self._span_to_json(child)
-            if span_json:
-                spans.append(span_json)
-
-        transaction_event["spans"] = spans
-        if dropped_spans > 0:
-            transaction_event["_dropped_spans"] = dropped_spans
-
-        # TODO-neel-potel sort and cutoff max spans
-
-        sentry_sdk_alpha.capture_event(transaction_event)
-
-    def _append_child_span(self, span):
-        # type: (ReadableSpan) -> None
-        if not span.parent:
-            return
-
-        max_spans = (
-            sentry_sdk_alpha.get_client().options["_experiments"].get("max_spans")
-            or DEFAULT_MAX_SPANS
-        )
-
-        children_spans = self._children_spans[span.parent.span_id]
-        if len(children_spans) < max_spans:
-            children_spans.append(span)
-        else:
-            self._dropped_spans[span.parent.span_id] += 1
-
-    def _collect_children(self, span):
-        # type: (ReadableSpan) -> tuple[List[ReadableSpan], int]
-        if not span.context:
-            return [], 0
-
-        children = []
-        dropped_spans = 0
-        bfs_queue = deque()  # type: Deque[int]
-        bfs_queue.append(span.context.span_id)
-
-        while bfs_queue:
-            parent_span_id = bfs_queue.popleft()
-            node_children = self._children_spans.pop(parent_span_id, [])
-            dropped_spans += self._dropped_spans.pop(parent_span_id, 0)
-            children.extend(node_children)
-            bfs_queue.extend([child.context.span_id for child in node_children if child.context])
-
-        return children, dropped_spans
-
-    # we construct the event from scratch here
-    # and not use the current Transaction class for easier refactoring
-    def _root_span_to_transaction_event(self, span):
-        # type: (ReadableSpan) -> Optional[Event]
-        if not span.context:
-            return None
-
-        event = self._common_span_transaction_attributes_as_json(span)
-        if event is None:
-            return None
-
-        transaction_name, transaction_source = extract_transaction_name_source(span)
-        span_data = extract_span_data(span)
-        trace_context = get_trace_context(span, span_data=span_data)
-        contexts = {"trace": trace_context}
-
-        profile_context = get_profile_context(span)
-        if profile_context:
-            contexts["profile"] = profile_context
-
-        (_, description, _, http_status, _) = span_data
-
-        if http_status:
-            contexts["response"] = {"status_code": http_status}
-
-        if span.resource.attributes:
-            contexts[OTEL_SENTRY_CONTEXT] = {"resource": dict(span.resource.attributes)}
-
-        event.update(
-            {
-                "type": "transaction",
-                "transaction": transaction_name or description,
-                "transaction_info": {"source": transaction_source or "custom"},
-                "contexts": contexts,
-            }
-        )
-
-        profile = cast("Optional[Profile]", get_sentry_meta(span, "profile"))
-        if profile:
-            profile.__exit__(None, None, None)
-            if profile.valid():
-                event["profile"] = profile
-                set_sentry_meta(span, "profile", None)
-
-        return event
-
-    def _span_to_json(self, span):
-        # type: (ReadableSpan) -> Optional[dict[str, Any]]
-        if not span.context:
-            return None
-
-        # This is a safe cast because dict[str, Any] is a superset of Event
-        span_json = cast("dict[str, Any]", self._common_span_transaction_attributes_as_json(span))
-        if span_json is None:
-            return None
-
-        trace_id = format_trace_id(span.context.trace_id)
-        span_id = format_span_id(span.context.span_id)
-        parent_span_id = format_span_id(span.parent.span_id) if span.parent else None
-
-        (op, description, status, _, origin) = extract_span_data(span)
-
-        span_json.update(
-            {
-                "trace_id": trace_id,
-                "span_id": span_id,
-                "op": op,
-                "description": description,
-                "status": status,
-                "origin": origin or DEFAULT_SPAN_ORIGIN,
-            }
-        )
-
-        if parent_span_id:
-            span_json["parent_span_id"] = parent_span_id
-
-        attributes = getattr(span, "attributes", {}) or {}
-        if attributes:
-            span_json["data"] = {}
-            for key, value in attributes.items():
-                if not key.startswith("_"):
-                    span_json["data"][key] = value
-
-        return span_json
-
-    def _common_span_transaction_attributes_as_json(self, span):
-        # type: (ReadableSpan) -> Optional[Event]
-        if not span.start_time or not span.end_time:
-            return None
-
-        common_json = {
-            "start_timestamp": convert_from_otel_timestamp(span.start_time),
-            "timestamp": convert_from_otel_timestamp(span.end_time),
-        }  # type: Event
-
-        tags = extract_span_attributes(span, SentrySpanAttribute.TAG)
-        if tags:
-            common_json["tags"] = tags
-
-        return common_json
-
-    def _log_debug_info(self):
-        # type: () -> None
-        import pprint
-
-        pprint.pprint(
-            {
-                format_span_id(span_id): [
-                    (format_span_id(child.context.span_id), child.name) for child in children
-                ]
-                for span_id, children in self._children_spans.items()
-            }
-        )
diff --git a/src/sentry_sdk_alpha/opentelemetry/tracing.py b/src/sentry_sdk_alpha/opentelemetry/tracing.py
deleted file mode 100644
index 51a101821e3da1..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/tracing.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from opentelemetry import trace
-from opentelemetry.propagate import set_global_textmap
-from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider
-
-from sentry_sdk_alpha.opentelemetry import SentryPropagator, SentrySampler, SentrySpanProcessor
-
-
-def patch_readable_span():
-    # type: () -> None
-    """
-    We need to pass through sentry specific metadata/objects from Span to ReadableSpan
-    to work with them consistently in the SpanProcessor.
-    """
-    old_readable_span = Span._readable_span
-
-    def sentry_patched_readable_span(self):
-        # type: (Span) -> ReadableSpan
-        readable_span = old_readable_span(self)
-        readable_span._sentry_meta = getattr(self, "_sentry_meta", {})  # type: ignore[attr-defined]
-        return readable_span
-
-    Span._readable_span = sentry_patched_readable_span  # type: ignore[method-assign]
-
-
-def setup_sentry_tracing():
-    # type: () -> None
-    provider = TracerProvider(sampler=SentrySampler())
-    provider.add_span_processor(SentrySpanProcessor())
-    trace.set_tracer_provider(provider)
-
-    set_global_textmap(SentryPropagator())
diff --git a/src/sentry_sdk_alpha/opentelemetry/utils.py b/src/sentry_sdk_alpha/opentelemetry/utils.py
deleted file mode 100644
index d32dcf63695a85..00000000000000
--- a/src/sentry_sdk_alpha/opentelemetry/utils.py
+++ /dev/null
@@ -1,459 +0,0 @@
-import re
-from datetime import datetime, timezone
-from typing import cast
-from urllib.parse import quote, unquote
-
-from opentelemetry.sdk.trace import ReadableSpan
-from opentelemetry.semconv.trace import SpanAttributes
-from opentelemetry.trace import Span as AbstractSpan
-from opentelemetry.trace import SpanKind, StatusCode, TraceState, format_span_id, format_trace_id
-from urllib3.util import parse_url as urlparse
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._types import TYPE_CHECKING
-from sentry_sdk_alpha.consts import (
-    DEFAULT_SPAN_ORIGIN,
-    LOW_QUALITY_TRANSACTION_SOURCES,
-    OP,
-    SPANDATA,
-    SPANSTATUS,
-)
-from sentry_sdk_alpha.opentelemetry.consts import SentrySpanAttribute
-from sentry_sdk_alpha.tracing_utils import Baggage, get_span_status_from_http_code
-from sentry_sdk_alpha.utils import Dsn
-
-if TYPE_CHECKING:
-    from collections.abc import Mapping, Sequence
-    from typing import Any, Optional, Union
-
-    from sentry_sdk_alpha._types import OtelExtractedSpanData
-
-
-GRPC_ERROR_MAP = {
-    "1": SPANSTATUS.CANCELLED,
-    "2": SPANSTATUS.UNKNOWN_ERROR,
-    "3": SPANSTATUS.INVALID_ARGUMENT,
-    "4": SPANSTATUS.DEADLINE_EXCEEDED,
-    "5": SPANSTATUS.NOT_FOUND,
-    "6": SPANSTATUS.ALREADY_EXISTS,
-    "7": SPANSTATUS.PERMISSION_DENIED,
-    "8": SPANSTATUS.RESOURCE_EXHAUSTED,
-    "9": SPANSTATUS.FAILED_PRECONDITION,
-    "10": SPANSTATUS.ABORTED,
-    "11": SPANSTATUS.OUT_OF_RANGE,
-    "12": SPANSTATUS.UNIMPLEMENTED,
-    "13": SPANSTATUS.INTERNAL_ERROR,
-    "14": SPANSTATUS.UNAVAILABLE,
-    "15": SPANSTATUS.DATA_LOSS,
-    "16": SPANSTATUS.UNAUTHENTICATED,
-}
-
-
-def is_sentry_span(span):
-    # type: (ReadableSpan) -> bool
-    """
-    Break infinite loop:
-    HTTP requests to Sentry are caught by OTel and send again to Sentry.
-    """
-    from sentry_sdk_alpha import get_client
-
-    if not span.attributes:
-        return False
-
-    span_url = span.attributes.get(SpanAttributes.HTTP_URL, None)
-    span_url = cast("Optional[str]", span_url)
-
-    if not span_url:
-        return False
-
-    dsn_url = None
-    client = get_client()
-
-    if client.dsn:
-        try:
-            dsn_url = Dsn(client.dsn).netloc
-        except Exception:
-            pass
-
-    if not dsn_url:
-        return False
-
-    if dsn_url in span_url:
-        return True
-
-    return False
-
-
-def convert_from_otel_timestamp(time):
-    # type: (int) -> datetime
-    """Convert an OTel nanosecond-level timestamp to a datetime."""
-    return datetime.fromtimestamp(time / 1e9, timezone.utc)
-
-
-def convert_to_otel_timestamp(time):
-    # type: (Union[datetime, float]) -> int
-    """Convert a datetime to an OTel timestamp (with nanosecond precision)."""
-    if isinstance(time, datetime):
-        return int(time.timestamp() * 1e9)
-    return int(time * 1e9)
-
-
-def extract_transaction_name_source(span):
-    # type: (ReadableSpan) -> tuple[Optional[str], Optional[str]]
-    if not span.attributes:
-        return (None, None)
-    return (
-        cast("Optional[str]", span.attributes.get(SentrySpanAttribute.NAME)),
-        cast("Optional[str]", span.attributes.get(SentrySpanAttribute.SOURCE)),
-    )
-
-
-def extract_span_data(span):
-    # type: (ReadableSpan) -> OtelExtractedSpanData
-    op = span.name
-    description = span.name
-    status, http_status = extract_span_status(span)
-    origin = None
-    if span.attributes is None:
-        return (op, description, status, http_status, origin)
-
-    attribute_op = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.OP))
-    op = attribute_op or op
-    description = cast("str", span.attributes.get(SentrySpanAttribute.DESCRIPTION) or description)
-    origin = cast("Optional[str]", span.attributes.get(SentrySpanAttribute.ORIGIN))
-
-    http_method = span.attributes.get(SpanAttributes.HTTP_METHOD)
-    http_method = cast("Optional[str]", http_method)
-    if http_method:
-        return span_data_for_http_method(span)
-
-    db_query = span.attributes.get(SpanAttributes.DB_SYSTEM)
-    if db_query:
-        return span_data_for_db_query(span)
-
-    rpc_service = span.attributes.get(SpanAttributes.RPC_SERVICE)
-    if rpc_service:
-        return (
-            attribute_op or "rpc",
-            description,
-            status,
-            http_status,
-            origin,
-        )
-
-    messaging_system = span.attributes.get(SpanAttributes.MESSAGING_SYSTEM)
-    if messaging_system:
-        return (
-            attribute_op or "message",
-            description,
-            status,
-            http_status,
-            origin,
-        )
-
-    faas_trigger = span.attributes.get(SpanAttributes.FAAS_TRIGGER)
-    if faas_trigger:
-        return (str(faas_trigger), description, status, http_status, origin)
-
-    return (op, description, status, http_status, origin)
-
-
-def span_data_for_http_method(span):
-    # type: (ReadableSpan) -> OtelExtractedSpanData
-    span_attributes = span.attributes or {}
-
-    op = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.OP))
-    if op is None:
-        op = "http"
-
-        if span.kind == SpanKind.SERVER:
-            op += ".server"
-        elif span.kind == SpanKind.CLIENT:
-            op += ".client"
-
-    http_method = span_attributes.get(SpanAttributes.HTTP_METHOD)
-    route = span_attributes.get(SpanAttributes.HTTP_ROUTE)
-    target = span_attributes.get(SpanAttributes.HTTP_TARGET)
-    peer_name = span_attributes.get(SpanAttributes.NET_PEER_NAME)
-
-    # TODO-neel-potel remove description completely
-    description = span_attributes.get(SentrySpanAttribute.DESCRIPTION) or span_attributes.get(
-        SentrySpanAttribute.NAME
-    )
-    description = cast("Optional[str]", description)
-    if description is None:
-        description = f"{http_method}"
-
-        if route:
-            description = f"{http_method} {route}"
-        elif target:
-            description = f"{http_method} {target}"
-        elif peer_name:
-            description = f"{http_method} {peer_name}"
-        else:
-            url = span_attributes.get(SpanAttributes.HTTP_URL)
-            url = cast("Optional[str]", url)
-
-            if url:
-                parsed_url = urlparse(url)
-                url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
-                description = f"{http_method} {url}"
-
-    status, http_status = extract_span_status(span)
-
-    origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN))
-
-    return (op, description, status, http_status, origin)
-
-
-def span_data_for_db_query(span):
-    # type: (ReadableSpan) -> OtelExtractedSpanData
-    span_attributes = span.attributes or {}
-
-    op = cast("str", span_attributes.get(SentrySpanAttribute.OP, OP.DB))
-
-    statement = span_attributes.get(SpanAttributes.DB_STATEMENT, None)
-    statement = cast("Optional[str]", statement)
-
-    description = statement or span.name
-    origin = cast("Optional[str]", span_attributes.get(SentrySpanAttribute.ORIGIN))
-
-    return (op, description, None, None, origin)
-
-
-def extract_span_status(span):
-    # type: (ReadableSpan) -> tuple[Optional[str], Optional[int]]
-    span_attributes = span.attributes or {}
-    status = span.status or None
-
-    if status:
-        inferred_status, http_status = infer_status_from_attributes(span_attributes)
-
-        if status.status_code == StatusCode.OK:
-            return (SPANSTATUS.OK, http_status)
-        elif status.status_code == StatusCode.ERROR:
-            if status.description is None:
-                if inferred_status:
-                    return (inferred_status, http_status)
-
-            if http_status is not None:
-                return (inferred_status, http_status)
-
-            if status.description is not None and status.description in GRPC_ERROR_MAP.values():
-                return (status.description, None)
-            else:
-                return (SPANSTATUS.UNKNOWN_ERROR, None)
-
-    inferred_status, http_status = infer_status_from_attributes(span_attributes)
-    if inferred_status:
-        return (inferred_status, http_status)
-
-    if status and status.status_code == StatusCode.UNSET:
-        return (None, None)
-    else:
-        return (SPANSTATUS.UNKNOWN_ERROR, None)
-
-
-def infer_status_from_attributes(span_attributes):
-    # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> tuple[Optional[str], Optional[int]]
-    http_status = get_http_status_code(span_attributes)
-
-    if http_status:
-        return (get_span_status_from_http_code(http_status), http_status)
-
-    grpc_status = span_attributes.get(SpanAttributes.RPC_GRPC_STATUS_CODE)
-    if grpc_status:
-        return (GRPC_ERROR_MAP.get(str(grpc_status), SPANSTATUS.UNKNOWN_ERROR), None)
-
-    return (None, None)
-
-
-def get_http_status_code(span_attributes):
-    # type: (Mapping[str, str | bool | int | float | Sequence[str] | Sequence[bool] | Sequence[int] | Sequence[float]]) -> Optional[int]
-    try:
-        http_status = span_attributes.get(SpanAttributes.HTTP_RESPONSE_STATUS_CODE)
-    except AttributeError:
-        # HTTP_RESPONSE_STATUS_CODE was added in 1.21, so if we're on an older
-        # OTel version SpanAttributes.HTTP_RESPONSE_STATUS_CODE will throw an
-        # AttributeError
-        http_status = None
-
-    if http_status is None:
-        # Fall back to the deprecated attribute
-        http_status = span_attributes.get(SpanAttributes.HTTP_STATUS_CODE)
-
-    http_status = cast("Optional[int]", http_status)
-
-    return http_status
-
-
-def extract_span_attributes(span, namespace):
-    # type: (ReadableSpan, str) -> dict[str, Any]
-    """
-    Extract Sentry-specific span attributes and make them look the way Sentry expects.
-    """
-    extracted_attrs = {}  # type: dict[str, Any]
-
-    for attr, value in (span.attributes or {}).items():
-        if attr.startswith(namespace):
-            key = attr[len(namespace) + 1 :]
-            extracted_attrs[key] = value
-
-    return extracted_attrs
-
-
-def get_trace_context(span, span_data=None):
-    # type: (ReadableSpan, Optional[OtelExtractedSpanData]) -> dict[str, Any]
-    if not span.context:
-        return {}
-
-    trace_id = format_trace_id(span.context.trace_id)
-    span_id = format_span_id(span.context.span_id)
-    parent_span_id = format_span_id(span.parent.span_id) if span.parent else None
-
-    if span_data is None:
-        span_data = extract_span_data(span)
-
-    (op, _, status, _, origin) = span_data
-
-    trace_context = {
-        "trace_id": trace_id,
-        "span_id": span_id,
-        "parent_span_id": parent_span_id,
-        "op": op,
-        "origin": origin or DEFAULT_SPAN_ORIGIN,
-    }  # type: dict[str, Any]
-
-    if status:
-        trace_context["status"] = status
-
-    if span.attributes:
-        trace_context["data"] = dict(span.attributes)
-
-    trace_state = get_trace_state(span)
-    trace_context["dynamic_sampling_context"] = dsc_from_trace_state(trace_state)
-
-    # TODO-neel-potel profiler thread_id, thread_name
-
-    return trace_context
-
-
-def trace_state_from_baggage(baggage):
-    # type: (Baggage) -> TraceState
-    items = []
-    for k, v in baggage.sentry_items.items():
-        key = Baggage.SENTRY_PREFIX + quote(k)
-        val = quote(str(v))
-        items.append((key, val))
-    return TraceState(items)
-
-
-def baggage_from_trace_state(trace_state):
-    # type: (TraceState) -> Baggage
-    return Baggage(dsc_from_trace_state(trace_state))
-
-
-def serialize_trace_state(trace_state):
-    # type: (TraceState) -> str
-    sentry_items = []
-    for k, v in trace_state.items():
-        if Baggage.SENTRY_PREFIX_REGEX.match(k):
-            sentry_items.append((k, v))
-    return ",".join(key + "=" + value for key, value in sentry_items)
-
-
-def dsc_from_trace_state(trace_state):
-    # type: (TraceState) -> dict[str, str]
-    dsc = {}
-    for k, v in trace_state.items():
-        if Baggage.SENTRY_PREFIX_REGEX.match(k):
-            key = re.sub(Baggage.SENTRY_PREFIX_REGEX, "", k)
-            dsc[unquote(key)] = unquote(v)
-    return dsc
-
-
-def has_incoming_trace(trace_state):
-    # type: (TraceState) -> bool
-    """
-    The existence of a sentry-trace_id in the baggage implies we continued an upstream trace.
-    """
-    return (Baggage.SENTRY_PREFIX + "trace_id") in trace_state
-
-
-def get_trace_state(span):
-    # type: (Union[AbstractSpan, ReadableSpan]) -> TraceState
-    """
-    Get the existing trace_state with sentry items
-    or populate it if we are the head SDK.
-    """
-    span_context = span.get_span_context()
-    if not span_context:
-        return TraceState()
-
-    trace_state = span_context.trace_state
-
-    if has_incoming_trace(trace_state):
-        return trace_state
-    else:
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            return trace_state
-
-        options = client.options or {}
-
-        trace_state = trace_state.update(
-            Baggage.SENTRY_PREFIX + "trace_id",
-            quote(format_trace_id(span_context.trace_id)),
-        )
-
-        if options.get("environment"):
-            trace_state = trace_state.update(
-                Baggage.SENTRY_PREFIX + "environment", quote(options["environment"])
-            )
-
-        if options.get("release"):
-            trace_state = trace_state.update(
-                Baggage.SENTRY_PREFIX + "release", quote(options["release"])
-            )
-
-        if options.get("dsn"):
-            trace_state = trace_state.update(
-                Baggage.SENTRY_PREFIX + "public_key",
-                quote(Dsn(options["dsn"]).public_key),
-            )
-
-        root_span = get_sentry_meta(span, "root_span")
-        if root_span and isinstance(root_span, ReadableSpan):
-            transaction_name, transaction_source = extract_transaction_name_source(root_span)
-
-            if transaction_name and transaction_source not in LOW_QUALITY_TRANSACTION_SOURCES:
-                trace_state = trace_state.update(
-                    Baggage.SENTRY_PREFIX + "transaction", quote(transaction_name)
-                )
-
-        return trace_state
-
-
-def get_sentry_meta(span, key):
-    # type: (Union[AbstractSpan, ReadableSpan], str) -> Any
-    sentry_meta = getattr(span, "_sentry_meta", None)
-    return sentry_meta.get(key) if sentry_meta else None
-
-
-def set_sentry_meta(span, key, value):
-    # type: (Union[AbstractSpan, ReadableSpan], str, Any) -> None
-    sentry_meta = getattr(span, "_sentry_meta", {})
-    sentry_meta[key] = value
-    span._sentry_meta = sentry_meta  # type: ignore[union-attr]
-
-
-def get_profile_context(span):
-    # type: (ReadableSpan) -> Optional[dict[str, str]]
-    if not span.attributes:
-        return None
-
-    profiler_id = cast("Optional[str]", span.attributes.get(SPANDATA.PROFILER_ID))
-    if profiler_id is None:
-        return None
-
-    return {"profiler_id": profiler_id}
diff --git a/src/sentry_sdk_alpha/profiler/__init__.py b/src/sentry_sdk_alpha/profiler/__init__.py
deleted file mode 100644
index 01d6ba03071e93..00000000000000
--- a/src/sentry_sdk_alpha/profiler/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from sentry_sdk_alpha.profiler.continuous_profiler import start_profiler, stop_profiler
-
-__all__ = [
-    "start_profiler",
-    "stop_profiler",
-]
diff --git a/src/sentry_sdk_alpha/profiler/continuous_profiler.py b/src/sentry_sdk_alpha/profiler/continuous_profiler.py
deleted file mode 100644
index a8ca6d91ece119..00000000000000
--- a/src/sentry_sdk_alpha/profiler/continuous_profiler.py
+++ /dev/null
@@ -1,646 +0,0 @@
-import atexit
-import os
-import random
-import sys
-import threading
-import time
-import uuid
-from collections import deque
-from datetime import datetime, timezone
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha._lru_cache import LRUCache
-from sentry_sdk_alpha.consts import VERSION
-from sentry_sdk_alpha.envelope import Envelope
-from sentry_sdk_alpha.profiler.utils import DEFAULT_SAMPLING_FREQUENCY, extract_stack
-from sentry_sdk_alpha.utils import (
-    capture_internal_exception,
-    is_gevent,
-    logger,
-    now,
-    set_in_app_in_frames,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Deque, Dict, List, Optional, Set, Type, TypedDict, Union
-
-    from sentry_sdk_alpha._types import ContinuousProfilerMode, SDKInfo
-    from sentry_sdk_alpha.profiler.utils import (
-        ExtractedSample,
-        FrameId,
-        ProcessedFrame,
-        ProcessedStack,
-        StackId,
-        ThreadId,
-    )
-
-    class ProcessedSample(TypedDict):
-        timestamp: float
-        thread_id: ThreadId
-        stack_id: int
-
-
-try:
-    from gevent.monkey import get_original
-    from gevent.threadpool import ThreadPool as _ThreadPool
-
-    ThreadPool = _ThreadPool  # type: Optional[Type[_ThreadPool]]
-    thread_sleep = get_original("time", "sleep")
-except ImportError:
-    thread_sleep = time.sleep
-    ThreadPool = None
-
-
-_scheduler = None  # type: Optional[ContinuousScheduler]
-
-
-def setup_continuous_profiler(options, sdk_info, capture_func):
-    # type: (Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> bool
-    global _scheduler
-
-    if _scheduler is not None:
-        logger.debug("[Profiling] Continuous Profiler is already setup")
-        return False
-
-    if is_gevent():
-        # If gevent has patched the threading modules then we cannot rely on
-        # them to spawn a native thread for sampling.
-        # Instead we default to the GeventContinuousScheduler which is capable of
-        # spawning native threads within gevent.
-        default_profiler_mode = GeventContinuousScheduler.mode
-    else:
-        default_profiler_mode = ThreadContinuousScheduler.mode
-
-    profiler_mode = default_profiler_mode
-    if options.get("profiler_mode") is not None:
-        profiler_mode = options["profiler_mode"]
-
-    frequency = DEFAULT_SAMPLING_FREQUENCY
-
-    if profiler_mode == ThreadContinuousScheduler.mode:
-        _scheduler = ThreadContinuousScheduler(frequency, options, sdk_info, capture_func)
-    elif profiler_mode == GeventContinuousScheduler.mode:
-        _scheduler = GeventContinuousScheduler(frequency, options, sdk_info, capture_func)
-    else:
-        raise ValueError(f"Unknown continuous profiler mode: {profiler_mode}")
-
-    logger.debug(f"[Profiling] Setting up continuous profiler in {_scheduler.mode} mode")
-
-    atexit.register(teardown_continuous_profiler)
-
-    return True
-
-
-def try_autostart_continuous_profiler():
-    # type: () -> None
-
-    # TODO: deprecate this as it'll be replaced by the auto lifecycle option
-
-    if _scheduler is None:
-        return
-
-    if not _scheduler.is_auto_start_enabled():
-        return
-
-    _scheduler.manual_start()
-
-
-def try_profile_lifecycle_trace_start():
-    # type: () -> Union[ContinuousProfile, None]
-    if _scheduler is None:
-        return None
-
-    return _scheduler.auto_start()
-
-
-def start_profiler():
-    # type: () -> None
-    if _scheduler is None:
-        return
-
-    _scheduler.manual_start()
-
-
-def stop_profiler():
-    # type: () -> None
-    if _scheduler is None:
-        return
-
-    _scheduler.manual_stop()
-
-
-def teardown_continuous_profiler():
-    # type: () -> None
-    stop_profiler()
-
-    global _scheduler
-    _scheduler = None
-
-
-def get_profiler_id():
-    # type: () -> Union[str, None]
-    if _scheduler is None:
-        return None
-    return _scheduler.profiler_id
-
-
-def determine_profile_session_sampling_decision(sample_rate):
-    # type: (Union[float, None]) -> bool
-
-    # `None` is treated as `0.0`
-    if not sample_rate:
-        return False
-
-    return random.random() < float(sample_rate)
-
-
-class ContinuousProfile:
-    active: bool = True
-
-    def stop(self):
-        # type: () -> None
-        self.active = False
-
-
-class ContinuousScheduler:
-    mode = "unknown"  # type: ContinuousProfilerMode
-
-    def __init__(self, frequency, options, sdk_info, capture_func):
-        # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None
-        self.interval = 1.0 / frequency
-        self.options = options
-        self.sdk_info = sdk_info
-        self.capture_func = capture_func
-
-        self.lifecycle = self.options.get("profile_lifecycle")
-        profile_session_sample_rate = self.options.get("profile_session_sample_rate")
-        self.sampled = determine_profile_session_sampling_decision(profile_session_sample_rate)
-
-        self.sampler = self.make_sampler()
-        self.buffer = None  # type: Optional[ProfileBuffer]
-        self.pid = None  # type: Optional[int]
-
-        self.running = False
-
-        self.new_profiles = deque(maxlen=128)  # type: Deque[ContinuousProfile]
-        self.active_profiles = set()  # type: Set[ContinuousProfile]
-
-    def is_auto_start_enabled(self):
-        # type: () -> bool
-
-        # Ensure that the scheduler only autostarts once per process.
-        # This is necessary because many web servers use forks to spawn
-        # additional processes. And the profiler is only spawned on the
-        # master process, then it often only profiles the main process
-        # and not the ones where the requests are being handled.
-        if self.pid == os.getpid():
-            return False
-
-        experiments = self.options.get("_experiments")
-        if not experiments:
-            return False
-
-        return experiments.get("continuous_profiling_auto_start")
-
-    def auto_start(self):
-        # type: () -> Union[ContinuousProfile, None]
-        if not self.sampled:
-            return None
-
-        if self.lifecycle != "trace":
-            return None
-
-        logger.debug("[Profiling] Auto starting profiler")
-
-        profile = ContinuousProfile()
-
-        self.new_profiles.append(profile)
-        self.ensure_running()
-
-        return profile
-
-    def manual_start(self):
-        # type: () -> None
-        if not self.sampled:
-            return
-
-        if self.lifecycle != "manual":
-            return
-
-        self.ensure_running()
-
-    def manual_stop(self):
-        # type: () -> None
-        if self.lifecycle != "manual":
-            return
-
-        self.teardown()
-
-    def ensure_running(self):
-        # type: () -> None
-        raise NotImplementedError
-
-    def teardown(self):
-        # type: () -> None
-        raise NotImplementedError
-
-    def pause(self):
-        # type: () -> None
-        raise NotImplementedError
-
-    def reset_buffer(self):
-        # type: () -> None
-        self.buffer = ProfileBuffer(
-            self.options, self.sdk_info, PROFILE_BUFFER_SECONDS, self.capture_func
-        )
-
-    @property
-    def profiler_id(self):
-        # type: () -> Union[str, None]
-        if self.buffer is None:
-            return None
-        return self.buffer.profiler_id
-
-    def make_sampler(self):
-        # type: () -> Callable[..., None]
-        cwd = os.getcwd()
-
-        cache = LRUCache(max_size=256)
-
-        if self.lifecycle == "trace":
-
-            def _sample_stack(*args, **kwargs):
-                # type: (*Any, **Any) -> None
-                """
-                Take a sample of the stack on all the threads in the process.
-                This should be called at a regular interval to collect samples.
-                """
-
-                # no profiles taking place, so we can stop early
-                if not self.new_profiles and not self.active_profiles:
-                    self.running = False
-                    return
-
-                # This is the number of profiles we want to pop off.
-                # It's possible another thread adds a new profile to
-                # the list and we spend longer than we want inside
-                # the loop below.
-                #
-                # Also make sure to set this value before extracting
-                # frames so we do not write to any new profiles that
-                # were started after this point.
-                new_profiles = len(self.new_profiles)
-
-                ts = now()
-
-                try:
-                    sample = [
-                        (str(tid), extract_stack(frame, cache, cwd))
-                        for tid, frame in sys._current_frames().items()
-                    ]
-                except AttributeError:
-                    # For some reason, the frame we get doesn't have certain attributes.
-                    # When this happens, we abandon the current sample as it's bad.
-                    capture_internal_exception(sys.exc_info())
-                    return
-
-                # Move the new profiles into the active_profiles set.
-                #
-                # We cannot directly add the to active_profiles set
-                # in `start_profiling` because it is called from other
-                # threads which can cause a RuntimeError when it the
-                # set sizes changes during iteration without a lock.
-                #
-                # We also want to avoid using a lock here so threads
-                # that are starting profiles are not blocked until it
-                # can acquire the lock.
-                for _ in range(new_profiles):
-                    self.active_profiles.add(self.new_profiles.popleft())
-                inactive_profiles = []
-
-                for profile in self.active_profiles:
-                    if profile.active:
-                        pass
-                    else:
-                        # If a profile is marked inactive, we buffer it
-                        # to `inactive_profiles` so it can be removed.
-                        # We cannot remove it here as it would result
-                        # in a RuntimeError.
-                        inactive_profiles.append(profile)
-
-                for profile in inactive_profiles:
-                    self.active_profiles.remove(profile)
-
-                if self.buffer is not None:
-                    self.buffer.write(ts, sample)
-
-        else:
-
-            def _sample_stack(*args, **kwargs):
-                # type: (*Any, **Any) -> None
-                """
-                Take a sample of the stack on all the threads in the process.
-                This should be called at a regular interval to collect samples.
-                """
-
-                ts = now()
-
-                try:
-                    sample = [
-                        (str(tid), extract_stack(frame, cache, cwd))
-                        for tid, frame in sys._current_frames().items()
-                    ]
-                except AttributeError:
-                    # For some reason, the frame we get doesn't have certain attributes.
-                    # When this happens, we abandon the current sample as it's bad.
-                    capture_internal_exception(sys.exc_info())
-                    return
-
-                if self.buffer is not None:
-                    self.buffer.write(ts, sample)
-
-        return _sample_stack
-
-    def run(self):
-        # type: () -> None
-        last = time.perf_counter()
-
-        while self.running:
-            self.sampler()
-
-            # some time may have elapsed since the last time
-            # we sampled, so we need to account for that and
-            # not sleep for too long
-            elapsed = time.perf_counter() - last
-            if elapsed < self.interval:
-                thread_sleep(self.interval - elapsed)
-
-            # after sleeping, make sure to take the current
-            # timestamp so we can use it next iteration
-            last = time.perf_counter()
-
-        if self.buffer is not None:
-            self.buffer.flush()
-            self.buffer = None
-
-
-class ThreadContinuousScheduler(ContinuousScheduler):
-    """
-    This scheduler is based on running a daemon thread that will call
-    the sampler at a regular interval.
-    """
-
-    mode = "thread"  # type: ContinuousProfilerMode
-    name = "sentry.profiler.ThreadContinuousScheduler"
-
-    def __init__(self, frequency, options, sdk_info, capture_func):
-        # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None
-        super().__init__(frequency, options, sdk_info, capture_func)
-
-        self.thread = None  # type: Optional[threading.Thread]
-        self.lock = threading.Lock()
-
-    def ensure_running(self):
-        # type: () -> None
-
-        pid = os.getpid()
-
-        # is running on the right process
-        if self.running and self.pid == pid:
-            return
-
-        with self.lock:
-            # another thread may have tried to acquire the lock
-            # at the same time so it may start another thread
-            # make sure to check again before proceeding
-            if self.running and self.pid == pid:
-                return
-
-            self.pid = pid
-            self.running = True
-
-            # if the profiler thread is changing,
-            # we should create a new buffer along with it
-            self.reset_buffer()
-
-            # make sure the thread is a daemon here otherwise this
-            # can keep the application running after other threads
-            # have exited
-            self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
-
-            try:
-                self.thread.start()
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self.running = False
-                self.thread = None
-
-    def teardown(self):
-        # type: () -> None
-        if self.running:
-            self.running = False
-
-        if self.thread is not None:
-            self.thread.join()
-            self.thread = None
-
-        self.buffer = None
-
-
-class GeventContinuousScheduler(ContinuousScheduler):
-    """
-    This scheduler is based on the thread scheduler but adapted to work with
-    gevent. When using gevent, it may monkey patch the threading modules
-    (`threading` and `_thread`). This results in the use of greenlets instead
-    of native threads.
-
-    This is an issue because the sampler CANNOT run in a greenlet because
-    1. Other greenlets doing sync work will prevent the sampler from running
-    2. The greenlet runs in the same thread as other greenlets so when taking
-       a sample, other greenlets will have been evicted from the thread. This
-       results in a sample containing only the sampler's code.
-    """
-
-    mode = "gevent"  # type: ContinuousProfilerMode
-
-    def __init__(self, frequency, options, sdk_info, capture_func):
-        # type: (int, Dict[str, Any], SDKInfo, Callable[[Envelope], None]) -> None
-
-        if ThreadPool is None:
-            raise ValueError(f"Profiler mode: {self.mode} is not available")
-
-        super().__init__(frequency, options, sdk_info, capture_func)
-
-        self.thread = None  # type: Optional[_ThreadPool]
-        self.lock = threading.Lock()
-
-    def ensure_running(self):
-        # type: () -> None
-        pid = os.getpid()
-
-        # is running on the right process
-        if self.running and self.pid == pid:
-            return
-
-        with self.lock:
-            # another thread may have tried to acquire the lock
-            # at the same time so it may start another thread
-            # make sure to check again before proceeding
-            if self.running and self.pid == pid:
-                return
-
-            self.pid = pid
-            self.running = True
-
-            # if the profiler thread is changing,
-            # we should create a new buffer along with it
-            self.reset_buffer()
-
-            self.thread = ThreadPool(1)  # type: ignore[misc]
-            try:
-                self.thread.spawn(self.run)
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self.running = False
-                self.thread = None
-
-    def teardown(self):
-        # type: () -> None
-        if self.running:
-            self.running = False
-
-        if self.thread is not None:
-            self.thread.join()
-            self.thread = None
-
-        self.buffer = None
-
-
-PROFILE_BUFFER_SECONDS = 60
-
-
-class ProfileBuffer:
-    def __init__(self, options, sdk_info, buffer_size, capture_func):
-        # type: (Dict[str, Any], SDKInfo, int, Callable[[Envelope], None]) -> None
-        self.options = options
-        self.sdk_info = sdk_info
-        self.buffer_size = buffer_size
-        self.capture_func = capture_func
-
-        self.profiler_id = uuid.uuid4().hex
-        self.chunk = ProfileChunk()
-
-        # Make sure to use the same clock to compute a sample's monotonic timestamp
-        # to ensure the timestamps are correctly aligned.
-        self.start_monotonic_time = now()
-
-        # Make sure the start timestamp is defined only once per profiler id.
-        # This prevents issues with clock drift within a single profiler session.
-        #
-        # Subtracting the start_monotonic_time here to find a fixed starting position
-        # for relative monotonic timestamps for each sample.
-        self.start_timestamp = datetime.now(timezone.utc).timestamp() - self.start_monotonic_time
-
-    def write(self, monotonic_time, sample):
-        # type: (float, ExtractedSample) -> None
-        if self.should_flush(monotonic_time):
-            self.flush()
-            self.chunk = ProfileChunk()
-            self.start_monotonic_time = now()
-
-        self.chunk.write(self.start_timestamp + monotonic_time, sample)
-
-    def should_flush(self, monotonic_time):
-        # type: (float) -> bool
-
-        # If the delta between the new monotonic time and the start monotonic time
-        # exceeds the buffer size, it means we should flush the chunk
-        return monotonic_time - self.start_monotonic_time >= self.buffer_size
-
-    def flush(self):
-        # type: () -> None
-        chunk = self.chunk.to_json(self.profiler_id, self.options, self.sdk_info)
-        envelope = Envelope()
-        envelope.add_profile_chunk(chunk)
-        self.capture_func(envelope)
-
-
-class ProfileChunk:
-    def __init__(self):
-        # type: () -> None
-        self.chunk_id = uuid.uuid4().hex
-
-        self.indexed_frames = {}  # type: Dict[FrameId, int]
-        self.indexed_stacks = {}  # type: Dict[StackId, int]
-        self.frames = []  # type: List[ProcessedFrame]
-        self.stacks = []  # type: List[ProcessedStack]
-        self.samples = []  # type: List[ProcessedSample]
-
-    def write(self, ts, sample):
-        # type: (float, ExtractedSample) -> None
-        for tid, (stack_id, frame_ids, frames) in sample:
-            try:
-                # Check if the stack is indexed first, this lets us skip
-                # indexing frames if it's not necessary
-                if stack_id not in self.indexed_stacks:
-                    for i, frame_id in enumerate(frame_ids):
-                        if frame_id not in self.indexed_frames:
-                            self.indexed_frames[frame_id] = len(self.indexed_frames)
-                            self.frames.append(frames[i])
-
-                    self.indexed_stacks[stack_id] = len(self.indexed_stacks)
-                    self.stacks.append([self.indexed_frames[frame_id] for frame_id in frame_ids])
-
-                self.samples.append(
-                    {
-                        "timestamp": ts,
-                        "thread_id": tid,
-                        "stack_id": self.indexed_stacks[stack_id],
-                    }
-                )
-            except AttributeError:
-                # For some reason, the frame we get doesn't have certain attributes.
-                # When this happens, we abandon the current sample as it's bad.
-                capture_internal_exception(sys.exc_info())
-
-    def to_json(self, profiler_id, options, sdk_info):
-        # type: (str, Dict[str, Any], SDKInfo) -> Dict[str, Any]
-        profile = {
-            "frames": self.frames,
-            "stacks": self.stacks,
-            "samples": self.samples,
-            "thread_metadata": {
-                str(thread.ident): {
-                    "name": str(thread.name),
-                }
-                for thread in threading.enumerate()
-            },
-        }
-
-        set_in_app_in_frames(
-            profile["frames"],
-            options["in_app_exclude"],
-            options["in_app_include"],
-            options["project_root"],
-        )
-
-        payload = {
-            "chunk_id": self.chunk_id,
-            "client_sdk": {
-                "name": sdk_info["name"],
-                "version": VERSION,
-            },
-            "platform": "python",
-            "profile": profile,
-            "profiler_id": profiler_id,
-            "version": "2",
-        }
-
-        for key in "release", "environment", "dist":
-            if options[key] is not None:
-                payload[key] = str(options[key]).strip()
-
-        return payload
diff --git a/src/sentry_sdk_alpha/profiler/transaction_profiler.py b/src/sentry_sdk_alpha/profiler/transaction_profiler.py
deleted file mode 100644
index b5e9887dc70593..00000000000000
--- a/src/sentry_sdk_alpha/profiler/transaction_profiler.py
+++ /dev/null
@@ -1,747 +0,0 @@
-"""
-This file is originally based on code from https://github.com/nylas/nylas-perftools,
-which is published under the following license:
-
-The MIT License (MIT)
-
-Copyright (c) 2014 Nylas
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-"""
-
-import atexit
-import os
-import platform
-import random
-import sys
-import threading
-import time
-import uuid
-from abc import ABC, abstractmethod
-from collections import deque
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._lru_cache import LRUCache
-from sentry_sdk_alpha.profiler.utils import DEFAULT_SAMPLING_FREQUENCY, extract_stack
-from sentry_sdk_alpha.utils import (
-    capture_internal_exception,
-    get_current_thread_meta,
-    is_gevent,
-    is_valid_sample_rate,
-    logger,
-    set_in_app_in_frames,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Deque, Dict, List, Optional, Set, Type, TypedDict
-
-    from sentry_sdk_alpha._types import Event, ProfilerMode, SamplingContext
-    from sentry_sdk_alpha.profiler.utils import (
-        ExtractedSample,
-        FrameId,
-        ProcessedFrame,
-        ProcessedStack,
-        ProcessedThreadMetadata,
-        StackId,
-        ThreadId,
-    )
-
-    class ProcessedSample(TypedDict):
-        elapsed_since_start_ns: str
-        thread_id: ThreadId
-        stack_id: int
-
-    class ProcessedProfile(TypedDict):
-        frames: list[ProcessedFrame]
-        stacks: list[ProcessedStack]
-        samples: list[ProcessedSample]
-        thread_metadata: dict[ThreadId, ProcessedThreadMetadata]
-
-
-try:
-    from gevent.monkey import get_original
-    from gevent.threadpool import ThreadPool as _ThreadPool
-
-    ThreadPool = _ThreadPool  # type: Optional[Type[_ThreadPool]]
-    thread_sleep = get_original("time", "sleep")
-except ImportError:
-    thread_sleep = time.sleep
-
-    ThreadPool = None
-
-
-_scheduler = None  # type: Optional[Scheduler]
-
-
-# The minimum number of unique samples that must exist in a profile to be
-# considered valid.
-PROFILE_MINIMUM_SAMPLES = 2
-
-
-def has_profiling_enabled(options):
-    # type: (Dict[str, Any]) -> bool
-    profiles_sampler = options["profiles_sampler"]
-    if profiles_sampler is not None:
-        return True
-
-    profiles_sample_rate = options["profiles_sample_rate"]
-    if profiles_sample_rate is not None and profiles_sample_rate > 0:
-        return True
-
-    return False
-
-
-def setup_profiler(options):
-    # type: (Dict[str, Any]) -> bool
-    global _scheduler
-
-    if _scheduler is not None:
-        logger.debug("[Profiling] Profiler is already setup")
-        return False
-
-    frequency = DEFAULT_SAMPLING_FREQUENCY
-
-    if is_gevent():
-        # If gevent has patched the threading modules then we cannot rely on
-        # them to spawn a native thread for sampling.
-        # Instead we default to the GeventScheduler which is capable of
-        # spawning native threads within gevent.
-        default_profiler_mode = GeventScheduler.mode
-    else:
-        default_profiler_mode = ThreadScheduler.mode
-
-    profiler_mode = default_profiler_mode
-    if options.get("profiler_mode") is not None:
-        profiler_mode = options["profiler_mode"]
-
-    if (
-        profiler_mode == ThreadScheduler.mode
-        # for legacy reasons, we'll keep supporting sleep mode for this scheduler
-        or profiler_mode == "sleep"
-    ):
-        _scheduler = ThreadScheduler(frequency=frequency)
-    elif profiler_mode == GeventScheduler.mode:
-        _scheduler = GeventScheduler(frequency=frequency)
-    else:
-        raise ValueError(f"Unknown profiler mode: {profiler_mode}")
-
-    logger.debug(f"[Profiling] Setting up profiler in {_scheduler.mode} mode")
-    _scheduler.setup()
-
-    atexit.register(teardown_profiler)
-
-    return True
-
-
-def teardown_profiler():
-    # type: () -> None
-
-    global _scheduler
-
-    if _scheduler is not None:
-        _scheduler.teardown()
-
-    _scheduler = None
-
-
-MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
-
-
-class Profile:
-    def __init__(
-        self,
-        sampled,  # type: Optional[bool]
-        start_ns,  # type: int
-        scheduler=None,  # type: Optional[Scheduler]
-    ):
-        # type: (...) -> None
-        self.scheduler = _scheduler if scheduler is None else scheduler
-
-        self.event_id = uuid.uuid4().hex  # type: str
-
-        self.sampled = sampled  # type: Optional[bool]
-
-        # Various framework integrations are capable of overwriting the active thread id.
-        # If it is set to `None` at the end of the profile, we fall back to the default.
-        self._default_active_thread_id = get_current_thread_meta()[0] or 0  # type: int
-        self.active_thread_id = None  # type: Optional[int]
-
-        try:
-            self.start_ns = start_ns  # type: int
-        except AttributeError:
-            self.start_ns = 0
-
-        self.stop_ns = 0  # type: int
-        self.active = False  # type: bool
-
-        self.indexed_frames = {}  # type: Dict[FrameId, int]
-        self.indexed_stacks = {}  # type: Dict[StackId, int]
-        self.frames = []  # type: List[ProcessedFrame]
-        self.stacks = []  # type: List[ProcessedStack]
-        self.samples = []  # type: List[ProcessedSample]
-
-        self.unique_samples = 0
-
-    def update_active_thread_id(self):
-        # type: () -> None
-        self.active_thread_id = get_current_thread_meta()[0]
-        logger.debug(f"[Profiling] updating active thread id to {self.active_thread_id}")
-
-    def _set_initial_sampling_decision(self, sampling_context):
-        # type: (SamplingContext) -> None
-        """
-        Sets the profile's sampling decision according to the following
-        precedence rules:
-
-        1. If the transaction to be profiled is not sampled, that decision
-        will be used, regardless of anything else.
-
-        2. Use `profiles_sample_rate` to decide.
-        """
-
-        # The corresponding transaction was not sampled,
-        # so don't generate a profile for it.
-        if not self.sampled:
-            logger.debug("[Profiling] Discarding profile because transaction is discarded.")
-            self.sampled = False
-            return
-
-        # The profiler hasn't been properly initialized.
-        if self.scheduler is None:
-            logger.debug("[Profiling] Discarding profile because profiler was not started.")
-            self.sampled = False
-            return
-
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            self.sampled = False
-            return
-
-        options = client.options
-
-        sample_rate = None
-        if callable(options.get("profiles_sampler")):
-            sample_rate = options["profiles_sampler"](sampling_context)
-        elif options["profiles_sample_rate"] is not None:
-            sample_rate = options["profiles_sample_rate"]
-
-        # The profiles_sample_rate option was not set, so profiling
-        # was never enabled.
-        if sample_rate is None:
-            logger.debug("[Profiling] Discarding profile because profiling was not enabled.")
-            self.sampled = False
-            return
-
-        if not is_valid_sample_rate(sample_rate, source="Profiling"):
-            logger.warning("[Profiling] Discarding profile because of invalid sample rate.")
-            self.sampled = False
-            return
-
-        # Now we roll the dice. random.random is inclusive of 0, but not of 1,
-        # so strict < is safe here. In case sample_rate is a boolean, cast it
-        # to a float (True becomes 1.0 and False becomes 0.0)
-        self.sampled = random.random() < float(sample_rate)
-
-        if self.sampled:
-            logger.debug("[Profiling] Initializing profile")
-        else:
-            logger.debug(
-                "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format(
-                    sample_rate=float(sample_rate)
-                )
-            )
-
-    def start(self):
-        # type: () -> None
-        if not self.sampled or self.active:
-            return
-
-        assert self.scheduler, "No scheduler specified"
-        logger.debug("[Profiling] Starting profile")
-        self.active = True
-        if not self.start_ns:
-            self.start_ns = time.perf_counter_ns()
-        self.scheduler.start_profiling(self)
-
-    def stop(self):
-        # type: () -> None
-        if not self.sampled or not self.active:
-            return
-
-        assert self.scheduler, "No scheduler specified"
-        logger.debug("[Profiling] Stopping profile")
-        self.active = False
-        self.stop_ns = time.perf_counter_ns()
-
-    def __enter__(self):
-        # type: () -> Profile
-        scope = sentry_sdk_alpha.get_isolation_scope()
-        old_profile = scope.profile
-        scope.profile = self
-
-        self._context_manager_state = (scope, old_profile)
-
-        self.start()
-
-        return self
-
-    def __exit__(self, ty, value, tb):
-        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        self.stop()
-
-        scope, old_profile = self._context_manager_state
-        del self._context_manager_state
-
-        scope.profile = old_profile
-
-    def write(self, ts, sample):
-        # type: (int, ExtractedSample) -> None
-        if not self.active:
-            return
-
-        if ts < self.start_ns:
-            return
-
-        offset = ts - self.start_ns
-        if offset > MAX_PROFILE_DURATION_NS:
-            self.stop()
-            return
-
-        self.unique_samples += 1
-
-        elapsed_since_start_ns = str(offset)
-
-        for tid, (stack_id, frame_ids, frames) in sample:
-            try:
-                # Check if the stack is indexed first, this lets us skip
-                # indexing frames if it's not necessary
-                if stack_id not in self.indexed_stacks:
-                    for i, frame_id in enumerate(frame_ids):
-                        if frame_id not in self.indexed_frames:
-                            self.indexed_frames[frame_id] = len(self.indexed_frames)
-                            self.frames.append(frames[i])
-
-                    self.indexed_stacks[stack_id] = len(self.indexed_stacks)
-                    self.stacks.append([self.indexed_frames[frame_id] for frame_id in frame_ids])
-
-                self.samples.append(
-                    {
-                        "elapsed_since_start_ns": elapsed_since_start_ns,
-                        "thread_id": tid,
-                        "stack_id": self.indexed_stacks[stack_id],
-                    }
-                )
-            except AttributeError:
-                # For some reason, the frame we get doesn't have certain attributes.
-                # When this happens, we abandon the current sample as it's bad.
-                capture_internal_exception(sys.exc_info())
-
-    def process(self):
-        # type: () -> ProcessedProfile
-
-        # This collects the thread metadata at the end of a profile. Doing it
-        # this way means that any threads that terminate before the profile ends
-        # will not have any metadata associated with it.
-        thread_metadata = {
-            str(thread.ident): {
-                "name": str(thread.name),
-            }
-            for thread in threading.enumerate()
-        }  # type: Dict[str, ProcessedThreadMetadata]
-
-        return {
-            "frames": self.frames,
-            "stacks": self.stacks,
-            "samples": self.samples,
-            "thread_metadata": thread_metadata,
-        }
-
-    def to_json(self, event_opt, options):
-        # type: (Event, Dict[str, Any]) -> Dict[str, Any]
-        profile = self.process()
-
-        set_in_app_in_frames(
-            profile["frames"],
-            options["in_app_exclude"],
-            options["in_app_include"],
-            options["project_root"],
-        )
-
-        return {
-            "environment": event_opt.get("environment"),
-            "event_id": self.event_id,
-            "platform": "python",
-            "profile": profile,
-            "release": event_opt.get("release", ""),
-            "timestamp": event_opt["start_timestamp"],
-            "version": "1",
-            "device": {
-                "architecture": platform.machine(),
-            },
-            "os": {
-                "name": platform.system(),
-                "version": platform.release(),
-            },
-            "runtime": {
-                "name": platform.python_implementation(),
-                "version": platform.python_version(),
-            },
-            "transactions": [
-                {
-                    "id": event_opt["event_id"],
-                    "name": event_opt["transaction"],
-                    # we start the transaction before the profile and this is
-                    # the transaction start time relative to the profile, so we
-                    # hardcode it to 0 until we can start the profile before
-                    "relative_start_ns": "0",
-                    # use the duration of the profile instead of the transaction
-                    # because we end the transaction after the profile
-                    "relative_end_ns": str(self.stop_ns - self.start_ns),
-                    "trace_id": event_opt["contexts"]["trace"]["trace_id"],
-                    "active_thread_id": str(
-                        self._default_active_thread_id
-                        if self.active_thread_id is None
-                        else self.active_thread_id
-                    ),
-                }
-            ],
-        }
-
-    def valid(self):
-        # type: () -> bool
-        client = sentry_sdk_alpha.get_client()
-        if not client.is_active():
-            return False
-
-        if not has_profiling_enabled(client.options):
-            return False
-
-        if self.sampled is None or not self.sampled:
-            if client.transport:
-                client.transport.record_lost_event("sample_rate", data_category="profile")
-            return False
-
-        if self.unique_samples < PROFILE_MINIMUM_SAMPLES:
-            if client.transport:
-                client.transport.record_lost_event("insufficient_data", data_category="profile")
-            logger.debug("[Profiling] Discarding profile because insufficient samples.")
-            return False
-
-        return True
-
-
-class Scheduler(ABC):
-    mode = "unknown"  # type: ProfilerMode
-
-    def __init__(self, frequency):
-        # type: (int) -> None
-        self.interval = 1.0 / frequency
-
-        self.sampler = self.make_sampler()
-
-        # cap the number of new profiles at any time so it does not grow infinitely
-        self.new_profiles = deque(maxlen=128)  # type: Deque[Profile]
-        self.active_profiles = set()  # type: Set[Profile]
-
-    def __enter__(self):
-        # type: () -> Scheduler
-        self.setup()
-        return self
-
-    def __exit__(self, ty, value, tb):
-        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        self.teardown()
-
-    @abstractmethod
-    def setup(self):
-        # type: () -> None
-        pass
-
-    @abstractmethod
-    def teardown(self):
-        # type: () -> None
-        pass
-
-    def ensure_running(self):
-        # type: () -> None
-        """
-        Ensure the scheduler is running. By default, this method is a no-op.
-        The method should be overridden by any implementation for which it is
-        relevant.
-        """
-        return None
-
-    def start_profiling(self, profile):
-        # type: (Profile) -> None
-        self.ensure_running()
-        self.new_profiles.append(profile)
-
-    def make_sampler(self):
-        # type: () -> Callable[..., None]
-        cwd = os.getcwd()
-
-        cache = LRUCache(max_size=256)
-
-        def _sample_stack(*args, **kwargs):
-            # type: (*Any, **Any) -> None
-            """
-            Take a sample of the stack on all the threads in the process.
-            This should be called at a regular interval to collect samples.
-            """
-            # no profiles taking place, so we can stop early
-            if not self.new_profiles and not self.active_profiles:
-                # make sure to clear the cache if we're not profiling so we dont
-                # keep a reference to the last stack of frames around
-                return
-
-            # This is the number of profiles we want to pop off.
-            # It's possible another thread adds a new profile to
-            # the list and we spend longer than we want inside
-            # the loop below.
-            #
-            # Also make sure to set this value before extracting
-            # frames so we do not write to any new profiles that
-            # were started after this point.
-            new_profiles = len(self.new_profiles)
-
-            now = time.perf_counter_ns()
-
-            try:
-                sample = [
-                    (str(tid), extract_stack(frame, cache, cwd))
-                    for tid, frame in sys._current_frames().items()
-                ]
-            except AttributeError:
-                # For some reason, the frame we get doesn't have certain attributes.
-                # When this happens, we abandon the current sample as it's bad.
-                capture_internal_exception(sys.exc_info())
-                return
-
-            # Move the new profiles into the active_profiles set.
-            #
-            # We cannot directly add the to active_profiles set
-            # in `start_profiling` because it is called from other
-            # threads which can cause a RuntimeError when it the
-            # set sizes changes during iteration without a lock.
-            #
-            # We also want to avoid using a lock here so threads
-            # that are starting profiles are not blocked until it
-            # can acquire the lock.
-            for _ in range(new_profiles):
-                self.active_profiles.add(self.new_profiles.popleft())
-
-            inactive_profiles = []
-
-            for profile in self.active_profiles:
-                if profile.active:
-                    profile.write(now, sample)
-                else:
-                    # If a profile is marked inactive, we buffer it
-                    # to `inactive_profiles` so it can be removed.
-                    # We cannot remove it here as it would result
-                    # in a RuntimeError.
-                    inactive_profiles.append(profile)
-
-            for profile in inactive_profiles:
-                self.active_profiles.remove(profile)
-
-        return _sample_stack
-
-
-class ThreadScheduler(Scheduler):
-    """
-    This scheduler is based on running a daemon thread that will call
-    the sampler at a regular interval.
-    """
-
-    mode = "thread"  # type: ProfilerMode
-    name = "sentry.profiler.ThreadScheduler"
-
-    def __init__(self, frequency):
-        # type: (int) -> None
-        super().__init__(frequency=frequency)
-
-        # used to signal to the thread that it should stop
-        self.running = False
-        self.thread = None  # type: Optional[threading.Thread]
-        self.pid = None  # type: Optional[int]
-        self.lock = threading.Lock()
-
-    def setup(self):
-        # type: () -> None
-        pass
-
-    def teardown(self):
-        # type: () -> None
-        if self.running:
-            self.running = False
-            if self.thread is not None:
-                self.thread.join()
-
-    def ensure_running(self):
-        # type: () -> None
-        """
-        Check that the profiler has an active thread to run in, and start one if
-        that's not the case.
-
-        Note that this might fail (e.g. in Python 3.12 it's not possible to
-        spawn new threads at interpreter shutdown). In that case self.running
-        will be False after running this function.
-        """
-        pid = os.getpid()
-
-        # is running on the right process
-        if self.running and self.pid == pid:
-            return
-
-        with self.lock:
-            # another thread may have tried to acquire the lock
-            # at the same time so it may start another thread
-            # make sure to check again before proceeding
-            if self.running and self.pid == pid:
-                return
-
-            self.pid = pid
-            self.running = True
-
-            # make sure the thread is a daemon here otherwise this
-            # can keep the application running after other threads
-            # have exited
-            self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
-            try:
-                self.thread.start()
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self.running = False
-                self.thread = None
-                return
-
-    def run(self):
-        # type: () -> None
-        last = time.perf_counter()
-
-        while self.running:
-            self.sampler()
-
-            # some time may have elapsed since the last time
-            # we sampled, so we need to account for that and
-            # not sleep for too long
-            elapsed = time.perf_counter() - last
-            if elapsed < self.interval:
-                thread_sleep(self.interval - elapsed)
-
-            # after sleeping, make sure to take the current
-            # timestamp so we can use it next iteration
-            last = time.perf_counter()
-
-
-class GeventScheduler(Scheduler):
-    """
-    This scheduler is based on the thread scheduler but adapted to work with
-    gevent. When using gevent, it may monkey patch the threading modules
-    (`threading` and `_thread`). This results in the use of greenlets instead
-    of native threads.
-
-    This is an issue because the sampler CANNOT run in a greenlet because
-    1. Other greenlets doing sync work will prevent the sampler from running
-    2. The greenlet runs in the same thread as other greenlets so when taking
-       a sample, other greenlets will have been evicted from the thread. This
-       results in a sample containing only the sampler's code.
-    """
-
-    mode = "gevent"  # type: ProfilerMode
-    name = "sentry.profiler.GeventScheduler"
-
-    def __init__(self, frequency):
-        # type: (int) -> None
-
-        if ThreadPool is None:
-            raise ValueError(f"Profiler mode: {self.mode} is not available")
-
-        super().__init__(frequency=frequency)
-
-        # used to signal to the thread that it should stop
-        self.running = False
-        self.thread = None  # type: Optional[_ThreadPool]
-        self.pid = None  # type: Optional[int]
-
-        # This intentionally uses the gevent patched threading.Lock.
-        # The lock will be required when first trying to start profiles
-        # as we need to spawn the profiler thread from the greenlets.
-        self.lock = threading.Lock()
-
-    def setup(self):
-        # type: () -> None
-        pass
-
-    def teardown(self):
-        # type: () -> None
-        if self.running:
-            self.running = False
-            if self.thread is not None:
-                self.thread.join()
-
-    def ensure_running(self):
-        # type: () -> None
-        pid = os.getpid()
-
-        # is running on the right process
-        if self.running and self.pid == pid:
-            return
-
-        with self.lock:
-            # another thread may have tried to acquire the lock
-            # at the same time so it may start another thread
-            # make sure to check again before proceeding
-            if self.running and self.pid == pid:
-                return
-
-            self.pid = pid
-            self.running = True
-
-            self.thread = ThreadPool(1)  # type: ignore[misc]
-            try:
-                self.thread.spawn(self.run)
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self.running = False
-                self.thread = None
-                return
-
-    def run(self):
-        # type: () -> None
-        last = time.perf_counter()
-
-        while self.running:
-            self.sampler()
-
-            # some time may have elapsed since the last time
-            # we sampled, so we need to account for that and
-            # not sleep for too long
-            elapsed = time.perf_counter() - last
-            if elapsed < self.interval:
-                thread_sleep(self.interval - elapsed)
-
-            # after sleeping, make sure to take the current
-            # timestamp so we can use it next iteration
-            last = time.perf_counter()
diff --git a/src/sentry_sdk_alpha/profiler/utils.py b/src/sentry_sdk_alpha/profiler/utils.py
deleted file mode 100644
index 0905189397aaa5..00000000000000
--- a/src/sentry_sdk_alpha/profiler/utils.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import os
-from collections import deque
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha._compat import PY311
-from sentry_sdk_alpha.utils import filename_for_module
-
-if TYPE_CHECKING:
-    from collections.abc import Sequence
-    from types import FrameType
-    from typing import Deque, List, Optional, Tuple, TypedDict
-
-    from sentry_sdk_alpha._lru_cache import LRUCache
-
-    ThreadId = str
-
-    ProcessedStack = list[int]
-
-    class ProcessedFrame(TypedDict):
-        abs_path: str
-        filename: str | None
-        function: str
-        lineno: int
-        module: str | None
-
-    class ProcessedThreadMetadata(TypedDict):
-        name: str
-
-    FrameId = tuple[
-        str,  # abs_path
-        int,  # lineno
-        str,  # function
-    ]
-    FrameIds = tuple[FrameId, ...]
-
-    # The exact value of this id is not very meaningful. The purpose
-    # of this id is to give us a compact and unique identifier for a
-    # raw stack that can be used as a key to a dictionary so that it
-    # can be used during the sampled format generation.
-    StackId = tuple[int, int]
-
-    ExtractedStack = tuple[StackId, FrameIds, list[ProcessedFrame]]
-    ExtractedSample = Sequence[tuple[ThreadId, ExtractedStack]]
-
-# The default sampling frequency to use. This is set at 101 in order to
-# mitigate the effects of lockstep sampling.
-DEFAULT_SAMPLING_FREQUENCY = 101
-
-
-# We want to impose a stack depth limit so that samples aren't too large.
-MAX_STACK_DEPTH = 128
-
-
-if PY311:
-
-    def get_frame_name(frame):
-        # type: (FrameType) -> str
-        return frame.f_code.co_qualname
-
-else:
-
-    def get_frame_name(frame):
-        # type: (FrameType) -> str
-
-        f_code = frame.f_code
-        co_varnames = f_code.co_varnames
-
-        # co_name only contains the frame name.  If the frame was a method,
-        # the class name will NOT be included.
-        name = f_code.co_name
-
-        # if it was a method, we can get the class name by inspecting
-        # the f_locals for the `self` argument
-        try:
-            if (
-                # the co_varnames start with the frame's positional arguments
-                # and we expect the first to be `self` if its an instance method
-                co_varnames
-                and co_varnames[0] == "self"
-                and "self" in frame.f_locals
-            ):
-                for cls in type(frame.f_locals["self"]).__mro__:
-                    if name in cls.__dict__:
-                        return f"{cls.__name__}.{name}"
-        except (AttributeError, ValueError):
-            pass
-
-        # if it was a class method, (decorated with `@classmethod`)
-        # we can get the class name by inspecting the f_locals for the `cls` argument
-        try:
-            if (
-                # the co_varnames start with the frame's positional arguments
-                # and we expect the first to be `cls` if its a class method
-                co_varnames
-                and co_varnames[0] == "cls"
-                and "cls" in frame.f_locals
-            ):
-                for cls in frame.f_locals["cls"].__mro__:
-                    if name in cls.__dict__:
-                        return f"{cls.__name__}.{name}"
-        except (AttributeError, ValueError):
-            pass
-
-        # nothing we can do if it is a staticmethod (decorated with @staticmethod)
-
-        # we've done all we can, time to give up and return what we have
-        return name
-
-
-def frame_id(raw_frame):
-    # type: (FrameType) -> FrameId
-    return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame))
-
-
-def extract_frame(fid, raw_frame, cwd):
-    # type: (FrameId, FrameType, str) -> ProcessedFrame
-    abs_path = raw_frame.f_code.co_filename
-
-    try:
-        module = raw_frame.f_globals["__name__"]
-    except Exception:
-        module = None
-
-    # namedtuples can be many times slower when initialing
-    # and accessing attribute so we opt to use a tuple here instead
-    return {
-        # This originally was `os.path.abspath(abs_path)` but that had
-        # a large performance overhead.
-        #
-        # According to docs, this is equivalent to
-        # `os.path.normpath(os.path.join(os.getcwd(), path))`.
-        # The `os.getcwd()` call is slow here, so we precompute it.
-        #
-        # Additionally, since we are using normalized path already,
-        # we skip calling `os.path.normpath` entirely.
-        "abs_path": os.path.join(cwd, abs_path),
-        "module": module,
-        "filename": filename_for_module(module, abs_path) or None,
-        "function": fid[2],
-        "lineno": raw_frame.f_lineno,
-    }
-
-
-def extract_stack(
-    raw_frame,  # type: Optional[FrameType]
-    cache,  # type: LRUCache
-    cwd,  # type: str
-    max_stack_depth=MAX_STACK_DEPTH,  # type: int
-):
-    # type: (...) -> ExtractedStack
-    """
-    Extracts the stack starting the specified frame. The extracted stack
-    assumes the specified frame is the top of the stack, and works back
-    to the bottom of the stack.
-
-    In the event that the stack is more than `MAX_STACK_DEPTH` frames deep,
-    only the first `MAX_STACK_DEPTH` frames will be returned.
-    """
-
-    raw_frames = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
-
-    while raw_frame is not None:
-        f_back = raw_frame.f_back
-        raw_frames.append(raw_frame)
-        raw_frame = f_back
-
-    frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames)
-    frames = []
-    for i, fid in enumerate(frame_ids):
-        frame = cache.get(fid)
-        if frame is None:
-            frame = extract_frame(fid, raw_frames[i], cwd)
-            cache.set(fid, frame)
-        frames.append(frame)
-
-    # Instead of mapping the stack into frame ids and hashing
-    # that as a tuple, we can directly hash the stack.
-    # This saves us from having to generate yet another list.
-    # Additionally, using the stack as the key directly is
-    # costly because the stack can be large, so we pre-hash
-    # the stack, and use the hash as the key as this will be
-    # needed a few times to improve performance.
-    #
-    # To Reduce the likelihood of hash collisions, we include
-    # the stack depth. This means that only stacks of the same
-    # depth can suffer from hash collisions.
-    stack_id = len(raw_frames), hash(frame_ids)
-
-    return stack_id, frame_ids, frames
diff --git a/src/sentry_sdk_alpha/py.typed b/src/sentry_sdk_alpha/py.typed
deleted file mode 100644
index e69de29bb2d1d6..00000000000000
diff --git a/src/sentry_sdk_alpha/scope.py b/src/sentry_sdk_alpha/scope.py
deleted file mode 100644
index 3697eb5d633c27..00000000000000
--- a/src/sentry_sdk_alpha/scope.py
+++ /dev/null
@@ -1,1514 +0,0 @@
-import os
-import sys
-import warnings
-from collections import deque
-from contextlib import contextmanager
-from copy import copy, deepcopy
-from datetime import datetime, timezone
-from enum import Enum
-from functools import wraps
-from itertools import chain
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha._types import AnnotatedValue
-from sentry_sdk_alpha.attachments import Attachment
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    DEFAULT_MAX_BREADCRUMBS,
-    FALSE_VALUES,
-    SENTRY_TRACE_HEADER_NAME,
-)
-from sentry_sdk_alpha.feature_flags import DEFAULT_FLAG_CAPACITY, FlagBuffer
-from sentry_sdk_alpha.profiler.transaction_profiler import Profile
-from sentry_sdk_alpha.session import Session
-from sentry_sdk_alpha.tracing import NoOpSpan, Span
-from sentry_sdk_alpha.tracing_utils import Baggage, PropagationContext, has_tracing_enabled
-from sentry_sdk_alpha.utils import (
-    ContextVar,
-    capture_internal_exception,
-    capture_internal_exceptions,
-    datetime_from_isoformat,
-    disable_capture_event,
-    event_from_exception,
-    exc_info_from_error,
-    logger,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Generator, Iterator, Mapping, MutableMapping
-    from typing import Any, Deque, Dict, List, Optional, ParamSpec, Self, Tuple, TypeVar, Union
-
-    import sentry_sdk_alpha
-    from sentry_sdk_alpha._types import (
-        Breadcrumb,
-        BreadcrumbHint,
-        ErrorProcessor,
-        Event,
-        EventProcessor,
-        ExcInfo,
-        Hint,
-        LogLevelStr,
-        Type,
-    )
-
-    P = ParamSpec("P")
-    R = TypeVar("R")
-
-    F = TypeVar("F", bound=Callable[..., Any])
-    T = TypeVar("T")
-
-
-# Holds data that will be added to **all** events sent by this process.
-# In case this is a http server (think web framework) with multiple users
-# the data will be added to events of all users.
-# Typically this is used for process wide data such as the release.
-_global_scope = None  # type: Optional[Scope]
-
-# Holds data for the active request.
-# This is used to isolate data for different requests or users.
-# The isolation scope is usually created by integrations, but may also
-# be created manually
-_isolation_scope = ContextVar("isolation_scope", default=None)
-
-# Holds data for the active span.
-# This can be used to manually add additional data to a span.
-_current_scope = ContextVar("current_scope", default=None)
-
-global_event_processors = []  # type: List[EventProcessor]
-
-
-class ScopeType(Enum):
-    CURRENT = "current"
-    ISOLATION = "isolation"
-    GLOBAL = "global"
-    MERGED = "merged"
-
-
-def add_global_event_processor(processor):
-    # type: (EventProcessor) -> None
-    global_event_processors.append(processor)
-
-
-def _attr_setter(fn):
-    # type: (Any) -> Any
-    return property(fset=fn, doc=fn.__doc__)
-
-
-def _disable_capture(fn):
-    # type: (F) -> F
-    @wraps(fn)
-    def wrapper(self, *args, **kwargs):
-        # type: (Any, *Dict[str, Any], **Any) -> Any
-        if not self._should_capture:
-            return
-        try:
-            self._should_capture = False
-            return fn(self, *args, **kwargs)
-        finally:
-            self._should_capture = True
-
-    return wrapper  # type: ignore
-
-
-class Scope:
-    """The scope holds extra information that should be sent with all
-    events that belong to it.
-    """
-
-    # NOTE: Even though it should not happen, the scope needs to not crash when
-    # accessed by multiple threads. It's fine if it's full of races, but those
-    # races should never make the user application crash.
-    #
-    # The same needs to hold for any accesses of the scope the SDK makes.
-
-    __slots__ = (
-        "_level",
-        "_name",
-        "_fingerprint",
-        # note that for legacy reasons, _transaction is the transaction *name*,
-        # not a Transaction object (the object is stored in _span)
-        "_transaction",
-        "_transaction_info",
-        "_user",
-        "_tags",
-        "_contexts",
-        "_extras",
-        "_breadcrumbs",
-        "_n_breadcrumbs_truncated",
-        "_event_processors",
-        "_error_processors",
-        "_should_capture",
-        "_span",
-        "_session",
-        "_attachments",
-        "_force_auto_session_tracking",
-        "_profile",
-        "_propagation_context",
-        "client",
-        "_type",
-        "_last_event_id",
-        "_flags",
-    )
-
-    def __init__(self, ty=None, client=None):
-        # type: (Optional[ScopeType], Optional[sentry_sdk.Client]) -> None
-        self._type = ty
-
-        self._event_processors = []  # type: List[EventProcessor]
-        self._error_processors = []  # type: List[ErrorProcessor]
-
-        self._name = None  # type: Optional[str]
-        self._propagation_context = None  # type: Optional[PropagationContext]
-        self._n_breadcrumbs_truncated = 0  # type: int
-
-        self.client = NonRecordingClient()  # type: sentry_sdk.client.BaseClient
-
-        if client is not None:
-            self.set_client(client)
-
-        self.clear()
-
-        incoming_trace_information = self._load_trace_data_from_env()
-        self.generate_propagation_context(incoming_data=incoming_trace_information)
-
-    def __copy__(self):
-        # type: () -> Self
-        """
-        Returns a copy of this scope.
-        This also creates a copy of all referenced data structures.
-        """
-        rv = object.__new__(self.__class__)  # type: Self
-
-        rv._type = self._type
-        rv.client = self.client
-        rv._level = self._level
-        rv._name = self._name
-        rv._fingerprint = self._fingerprint
-        rv._transaction = self._transaction
-        rv._transaction_info = dict(self._transaction_info)
-        rv._user = self._user
-
-        rv._tags = dict(self._tags)
-        rv._contexts = dict(self._contexts)
-        rv._extras = dict(self._extras)
-
-        rv._breadcrumbs = copy(self._breadcrumbs)
-        rv._n_breadcrumbs_truncated = copy(self._n_breadcrumbs_truncated)
-        rv._event_processors = list(self._event_processors)
-        rv._error_processors = list(self._error_processors)
-        rv._propagation_context = self._propagation_context
-
-        rv._should_capture = self._should_capture
-        rv._span = self._span
-        rv._session = self._session
-        rv._force_auto_session_tracking = self._force_auto_session_tracking
-        rv._attachments = list(self._attachments)
-
-        rv._profile = self._profile
-
-        rv._last_event_id = self._last_event_id
-
-        rv._flags = deepcopy(self._flags)
-
-        return rv
-
-    @classmethod
-    def get_current_scope(cls):
-        # type: () -> Scope
-        """
-        .. versionadded:: 2.0.0
-
-        Returns the current scope.
-        """
-        current_scope = cls._get_current_scope()
-        if current_scope is None:
-            current_scope = Scope(ty=ScopeType.CURRENT)
-            _current_scope.set(current_scope)
-
-        return current_scope
-
-    @classmethod
-    def _get_current_scope(cls):
-        # type: () -> Optional[Scope]
-        """
-        Returns the current scope without creating a new one. Internal use only.
-        """
-        return _current_scope.get()
-
-    @classmethod
-    def set_current_scope(cls, new_current_scope):
-        # type: (Scope) -> None
-        """
-        .. versionadded:: 2.0.0
-
-        Sets the given scope as the new current scope overwriting the existing current scope.
-        :param new_current_scope: The scope to set as the new current scope.
-        """
-        _current_scope.set(new_current_scope)
-
-    @classmethod
-    def get_isolation_scope(cls):
-        # type: () -> Scope
-        """
-        .. versionadded:: 2.0.0
-
-        Returns the isolation scope.
-        """
-        isolation_scope = cls._get_isolation_scope()
-        if isolation_scope is None:
-            isolation_scope = Scope(ty=ScopeType.ISOLATION)
-            _isolation_scope.set(isolation_scope)
-
-        return isolation_scope
-
-    @classmethod
-    def _get_isolation_scope(cls):
-        # type: () -> Optional[Scope]
-        """
-        Returns the isolation scope without creating a new one. Internal use only.
-        """
-        return _isolation_scope.get()
-
-    @classmethod
-    def set_isolation_scope(cls, new_isolation_scope):
-        # type: (Scope) -> None
-        """
-        .. versionadded:: 2.0.0
-
-        Sets the given scope as the new isolation scope overwriting the existing isolation scope.
-        :param new_isolation_scope: The scope to set as the new isolation scope.
-        """
-        _isolation_scope.set(new_isolation_scope)
-
-    @classmethod
-    def get_global_scope(cls):
-        # type: () -> Scope
-        """
-        .. versionadded:: 2.0.0
-
-        Returns the global scope.
-        """
-        global _global_scope
-        if _global_scope is None:
-            _global_scope = Scope(ty=ScopeType.GLOBAL)
-
-        return _global_scope
-
-    @classmethod
-    def last_event_id(cls):
-        # type: () -> Optional[str]
-        """
-        .. versionadded:: 2.2.0
-
-        Returns event ID of the event most recently captured by the isolation scope, or None if no event
-        has been captured. We do not consider events that are dropped, e.g. by a before_send hook.
-        Transactions also are not considered events in this context.
-
-        The event corresponding to the returned event ID is NOT guaranteed to actually be sent to Sentry;
-        whether the event is sent depends on the transport. The event could be sent later or not at all.
-        Even a sent event could fail to arrive in Sentry due to network issues, exhausted quotas, or
-        various other reasons.
-        """
-        return cls.get_isolation_scope()._last_event_id
-
-    def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None):
-        # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Self
-        """
-        Merges global, isolation and current scope into a new scope and
-        adds the given additional scope or additional scope kwargs to it.
-        """
-        if additional_scope and additional_scope_kwargs:
-            raise TypeError("cannot provide scope and kwargs")
-
-        final_scope = self.__class__()
-        final_scope._type = ScopeType.MERGED
-
-        global_scope = self.get_global_scope()
-        final_scope.update_from_scope(global_scope)
-
-        isolation_scope = self.get_isolation_scope()
-        final_scope.update_from_scope(self.get_isolation_scope())
-
-        current_scope = self.get_current_scope()
-        final_scope.update_from_scope(current_scope)
-
-        if self != current_scope and self != isolation_scope:
-            final_scope.update_from_scope(self)
-
-        if additional_scope is not None:
-            if callable(additional_scope):
-                additional_scope(final_scope)
-            else:
-                final_scope.update_from_scope(additional_scope)
-
-        elif additional_scope_kwargs:
-            final_scope.update_from_kwargs(**additional_scope_kwargs)
-
-        return final_scope
-
-    @classmethod
-    def get_client(cls):
-        # type: () -> sentry_sdk.client.BaseClient
-        """
-        .. versionadded:: 2.0.0
-
-        Returns the currently used :py:class:`sentry_sdk.Client`.
-        This checks the current scope, the isolation scope and the global scope for a client.
-        If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned.
-        """
-        current_scope = cls.get_current_scope()
-        try:
-            client = current_scope.client
-        except AttributeError:
-            client = None
-
-        if client is not None and client.is_active():
-            return client
-
-        isolation_scope = cls.get_isolation_scope()
-        try:
-            client = isolation_scope.client
-        except AttributeError:
-            client = None
-
-        if client is not None and client.is_active():
-            return client
-
-        try:
-            client = _global_scope.client  # type: ignore
-        except AttributeError:
-            client = None
-
-        if client is not None and client.is_active():
-            return client
-
-        return NonRecordingClient()
-
-    def set_client(self, client=None):
-        # type: (Optional[sentry_sdk.client.BaseClient]) -> None
-        """
-        .. versionadded:: 2.0.0
-
-        Sets the client for this scope.
-
-        :param client: The client to use in this scope.
-            If `None` the client of the scope will be replaced by a :py:class:`sentry_sdk.NonRecordingClient`.
-
-        """
-        self.client = client if client is not None else NonRecordingClient()
-
-    def fork(self):
-        # type: () -> Self
-        """
-        .. versionadded:: 2.0.0
-
-        Returns a fork of this scope.
-        """
-        forked_scope = copy(self)
-        return forked_scope
-
-    def _load_trace_data_from_env(self):
-        # type: () -> Optional[Dict[str, str]]
-        """
-        Load Sentry trace id and baggage from environment variables.
-        Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false".
-        """
-        incoming_trace_information = None
-
-        sentry_use_environment = (os.environ.get("SENTRY_USE_ENVIRONMENT") or "").lower()
-        use_environment = sentry_use_environment not in FALSE_VALUES
-        if use_environment:
-            incoming_trace_information = {}
-
-            if os.environ.get("SENTRY_TRACE"):
-                incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = (
-                    os.environ.get("SENTRY_TRACE") or ""
-                )
-
-            if os.environ.get("SENTRY_BAGGAGE"):
-                incoming_trace_information[BAGGAGE_HEADER_NAME] = (
-                    os.environ.get("SENTRY_BAGGAGE") or ""
-                )
-
-        return incoming_trace_information or None
-
-    def set_new_propagation_context(self):
-        # type: () -> None
-        """
-        Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one.
-        """
-        self._propagation_context = PropagationContext()
-
-    def generate_propagation_context(self, incoming_data=None):
-        # type: (Optional[Dict[str, str]]) -> None
-        """
-        Makes sure the propagation context is set on the scope.
-        If there is `incoming_data` overwrite existing propagation context.
-        If there is no `incoming_data` create new propagation context, but do NOT overwrite if already existing.
-        """
-        if incoming_data:
-            propagation_context = PropagationContext.from_incoming_data(incoming_data)
-            if propagation_context is not None:
-                self._propagation_context = propagation_context
-
-        if self._type != ScopeType.CURRENT:
-            if self._propagation_context is None:
-                self.set_new_propagation_context()
-
-    def get_dynamic_sampling_context(self):
-        # type: () -> Optional[Dict[str, str]]
-        """
-        Returns the Dynamic Sampling Context from the baggage or populates one.
-        """
-        baggage = self.get_baggage()
-        return baggage.dynamic_sampling_context() if baggage else None
-
-    def get_traceparent(self, *args, **kwargs):
-        # type: (Any, Any) -> Optional[str]
-        """
-        Returns the Sentry "sentry-trace" header (aka the traceparent) from the
-        currently active span or the scopes Propagation Context.
-        """
-        client = self.get_client()
-
-        # If we have an active span, return traceparent from there
-        if has_tracing_enabled(client.options) and self.span is not None and self.span.is_valid:
-            return self.span.to_traceparent()
-
-        # If this scope has a propagation context, return traceparent from there
-        if self._propagation_context is not None:
-            return self._propagation_context.to_traceparent()
-
-        # Fall back to isolation scope's traceparent. It always has one
-        return self.get_isolation_scope().get_traceparent()
-
-    def get_baggage(self, *args, **kwargs):
-        # type: (Any, Any) -> Optional[Baggage]
-        """
-        Returns the Sentry "baggage" header containing trace information from the
-        currently active span or the scopes Propagation Context.
-        If not existing, creates a new one.
-        """
-        client = self.get_client()
-
-        # If we have an active span, return baggage from there
-        if has_tracing_enabled(client.options) and self.span is not None and self.span.is_valid:
-            return self.span.to_baggage()
-
-        # If this scope has a propagation context, return baggage from there
-        # populate a fresh one if it doesn't exist
-        if self._propagation_context is not None:
-            if self._propagation_context.baggage is None:
-                self._propagation_context.baggage = Baggage.from_options(self)
-            return self._propagation_context.baggage
-
-        # Fall back to isolation scope's baggage. It always has one
-        return self.get_isolation_scope().get_baggage()
-
-    def get_trace_context(self):
-        # type: () -> Any
-        """
-        Returns the Sentry "trace" context from the Propagation Context.
-        """
-        if self._propagation_context is None:
-            return None
-
-        trace_context = {
-            "trace_id": self._propagation_context.trace_id,
-            "span_id": self._propagation_context.span_id,
-            "parent_span_id": self._propagation_context.parent_span_id,
-            "dynamic_sampling_context": self.get_dynamic_sampling_context(),
-        }  # type: Dict[str, Any]
-
-        return trace_context
-
-    def trace_propagation_meta(self, *args, **kwargs):
-        # type: (*Any, **Any) -> str
-        """
-        Return meta tags which should be injected into HTML templates
-        to allow propagation of trace information.
-        """
-        meta = ""
-
-        sentry_trace = self.get_traceparent()
-        if sentry_trace is not None:
-            meta += ''.format(
-                SENTRY_TRACE_HEADER_NAME,
-                sentry_trace,
-            )
-
-        baggage = self.get_baggage()
-        if baggage is not None:
-            meta += ''.format(
-                BAGGAGE_HEADER_NAME,
-                baggage.serialize(),
-            )
-
-        return meta
-
-    def iter_headers(self):
-        # type: () -> Iterator[Tuple[str, str]]
-        """
-        Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context.
-        """
-        if self._propagation_context is not None:
-            traceparent = self.get_traceparent()
-            if traceparent is not None:
-                yield SENTRY_TRACE_HEADER_NAME, traceparent
-
-            baggage = self.get_baggage()
-            if baggage is not None:
-                yield BAGGAGE_HEADER_NAME, baggage.serialize()
-
-    def iter_trace_propagation_headers(self, *args, **kwargs):
-        # type: (Any, Any) -> Generator[Tuple[str, str], None, None]
-        """
-        Return HTTP headers which allow propagation of trace data.
-
-        If a span is given, the trace data will taken from the span.
-        If no span is given, the trace data is taken from the scope.
-        """
-        client = self.get_client()
-
-        span = kwargs.pop("span", None)
-        span = span or self.span
-
-        if has_tracing_enabled(client.options) and span is not None and span.is_valid:
-            yield from span.iter_headers()
-        else:
-            # If this scope has a propagation context, return headers from there
-            # (it could be that self is not the current scope nor the isolation scope)
-            if self._propagation_context is not None:
-                yield from self.iter_headers()
-            else:
-                # otherwise try headers from current scope
-                current_scope = self.get_current_scope()
-                if current_scope._propagation_context is not None:
-                    yield from current_scope.iter_headers()
-                else:
-                    # otherwise fall back to headers from isolation scope
-                    isolation_scope = self.get_isolation_scope()
-                    if isolation_scope._propagation_context is not None:
-                        yield from isolation_scope.iter_headers()
-
-    def get_active_propagation_context(self):
-        # type: () -> Optional[PropagationContext]
-        if self._propagation_context is not None:
-            return self._propagation_context
-
-        current_scope = self.get_current_scope()
-        if current_scope._propagation_context is not None:
-            return current_scope._propagation_context
-
-        isolation_scope = self.get_isolation_scope()
-        if isolation_scope._propagation_context is not None:
-            return isolation_scope._propagation_context
-
-        return None
-
-    def clear(self):
-        # type: () -> None
-        """Clears the entire scope."""
-        self._level = None  # type: Optional[LogLevelStr]
-        self._fingerprint = None  # type: Optional[List[str]]
-        self._transaction = None  # type: Optional[str]
-        self._transaction_info = {}  # type: MutableMapping[str, str]
-        self._user = None  # type: Optional[Dict[str, Any]]
-
-        self._tags = {}  # type: Dict[str, Any]
-        self._contexts = {}  # type: Dict[str, Dict[str, Any]]
-        self._extras = {}  # type: MutableMapping[str, Any]
-        self._attachments = []  # type: List[Attachment]
-
-        self.clear_breadcrumbs()
-        self._should_capture = True  # type: bool
-
-        self._span = None  # type: Optional[Span]
-        self._session = None  # type: Optional[Session]
-        self._force_auto_session_tracking = None  # type: Optional[bool]
-
-        self._profile = None  # type: Optional[Profile]
-
-        self._propagation_context = None
-
-        # self._last_event_id is only applicable to isolation scopes
-        self._last_event_id = None  # type: Optional[str]
-        self._flags = None  # type: Optional[FlagBuffer]
-
-    def set_level(self, value):
-        # type: (LogLevelStr) -> None
-        """
-        Sets the level for the scope.
-
-        :param value: The level to set.
-        """
-        self._level = value
-
-    @_attr_setter
-    def fingerprint(self, value):
-        # type: (Optional[List[str]]) -> None
-        """When set this overrides the default fingerprint."""
-        self._fingerprint = value
-
-    @property
-    def root_span(self):
-        # type: () -> Optional[Span]
-        """Return the root span in the scope, if any."""
-        if self._span is None:
-            return None
-
-        return self._span.root_span
-
-    def set_transaction_name(self, name, source=None):
-        # type: (str, Optional[str]) -> None
-        """Set the transaction name and optionally the transaction source."""
-        self._transaction = name
-
-        if self._span and self._span.root_span:
-            self._span.root_span.name = name
-            if source:
-                self._span.root_span.source = source
-
-        if source:
-            self._transaction_info["source"] = source
-
-    @property
-    def transaction_name(self):
-        # type: () -> Optional[str]
-        return self._transaction
-
-    @property
-    def transaction_source(self):
-        # type: () -> Optional[str]
-        return self._transaction_info.get("source")
-
-    def set_user(self, value):
-        # type: (Optional[Dict[str, Any]]) -> None
-        """Sets a user for the scope."""
-        self._user = value
-        session = self.get_isolation_scope()._session
-        if session is not None:
-            session.update(user=value)
-
-    @property
-    def span(self):
-        # type: () -> Optional[Span]
-        """Get current tracing span."""
-        return self._span
-
-    @span.setter
-    def span(self, span):
-        # type: (Optional[Span]) -> None
-        """Set current tracing span."""
-        self._span = span
-
-    @property
-    def profile(self):
-        # type: () -> Optional[Profile]
-        return self._profile
-
-    @profile.setter
-    def profile(self, profile):
-        # type: (Optional[Profile]) -> None
-
-        self._profile = profile
-
-    def set_tag(self, key, value):
-        # type: (str, Any) -> None
-        """
-        Sets a tag for a key to a specific value.
-
-        :param key: Key of the tag to set.
-
-        :param value: Value of the tag to set.
-        """
-        self._tags[key] = value
-
-    def set_tags(self, tags):
-        # type: (Mapping[str, object]) -> None
-        """Sets multiple tags at once.
-
-        This method updates multiple tags at once. The tags are passed as a dictionary
-        or other mapping type.
-
-        Calling this method is equivalent to calling `set_tag` on each key-value pair
-        in the mapping. If a tag key already exists in the scope, its value will be
-        updated. If the tag key does not exist in the scope, the key-value pair will
-        be added to the scope.
-
-        This method only modifies tag keys in the `tags` mapping passed to the method.
-        `scope.set_tags({})` is, therefore, a no-op.
-
-        :param tags: A mapping of tag keys to tag values to set.
-        """
-        self._tags.update(tags)
-
-    def remove_tag(self, key):
-        # type: (str) -> None
-        """
-        Removes a specific tag.
-
-        :param key: Key of the tag to remove.
-        """
-        self._tags.pop(key, None)
-
-    def set_context(
-        self,
-        key,  # type: str
-        value,  # type: Dict[str, Any]
-    ):
-        # type: (...) -> None
-        """
-        Binds a context at a certain key to a specific value.
-        """
-        self._contexts[key] = value
-
-    def remove_context(
-        self, key  # type: str
-    ):
-        # type: (...) -> None
-        """Removes a context."""
-        self._contexts.pop(key, None)
-
-    def set_extra(
-        self,
-        key,  # type: str
-        value,  # type: Any
-    ):
-        # type: (...) -> None
-        """Sets an extra key to a specific value."""
-        self._extras[key] = value
-
-    def remove_extra(
-        self, key  # type: str
-    ):
-        # type: (...) -> None
-        """Removes a specific extra key."""
-        self._extras.pop(key, None)
-
-    def clear_breadcrumbs(self):
-        # type: () -> None
-        """Clears breadcrumb buffer."""
-        self._breadcrumbs = deque()  # type: Deque[Breadcrumb]
-        self._n_breadcrumbs_truncated = 0
-
-    def add_attachment(
-        self,
-        bytes=None,  # type: Union[None, bytes, Callable[[], bytes]]
-        filename=None,  # type: Optional[str]
-        path=None,  # type: Optional[str]
-        content_type=None,  # type: Optional[str]
-        add_to_transactions=False,  # type: bool
-    ):
-        # type: (...) -> None
-        """Adds an attachment to future events sent from this scope.
-
-        The parameters are the same as for the :py:class:`sentry_sdk.attachments.Attachment` constructor.
-        """
-        self._attachments.append(
-            Attachment(
-                bytes=bytes,
-                path=path,
-                filename=filename,
-                content_type=content_type,
-                add_to_transactions=add_to_transactions,
-            )
-        )
-
-    def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
-        # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
-        """
-        Adds a breadcrumb.
-
-        :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
-
-        :param hint: An optional value that can be used by `before_breadcrumb`
-            to customize the breadcrumbs that are emitted.
-        """
-        client = self.get_client()
-
-        if not client.is_active():
-            logger.info("Dropped breadcrumb because no client bound")
-            return
-
-        before_breadcrumb = client.options.get("before_breadcrumb")
-        max_breadcrumbs = client.options.get("max_breadcrumbs", DEFAULT_MAX_BREADCRUMBS)
-
-        crumb = dict(crumb or ())  # type: Breadcrumb
-        crumb.update(kwargs)
-        if not crumb:
-            return
-
-        hint = dict(hint or ())  # type: Hint
-
-        if crumb.get("timestamp") is None:
-            crumb["timestamp"] = datetime.now(timezone.utc)
-        if crumb.get("type") is None:
-            crumb["type"] = "default"
-
-        if before_breadcrumb is not None:
-            new_crumb = before_breadcrumb(crumb, hint)
-        else:
-            new_crumb = crumb
-
-        if new_crumb is not None:
-            self._breadcrumbs.append(new_crumb)
-        else:
-            logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
-
-        while len(self._breadcrumbs) > max_breadcrumbs:
-            self._breadcrumbs.popleft()
-            self._n_breadcrumbs_truncated += 1
-
-    def start_transaction(self, **kwargs):
-        # type: (Any) -> Union[NoOpSpan, Span]
-        """
-        .. deprecated:: 3.0.0
-            This function is deprecated and will be removed in a future release.
-            Use :py:meth:`sentry_sdk.start_span` instead.
-        """
-        warnings.warn(
-            "The `start_transaction` method is deprecated, please use `sentry_sdk.start_span instead.`",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-        return NoOpSpan(**kwargs)
-
-    def start_span(self, **kwargs):
-        # type: (Any) -> Union[NoOpSpan, Span]
-        """
-        Start a span whose parent is the currently active span, if any.
-
-        The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
-        typically used as a context manager to start and stop timing in a `with`
-        block.
-
-        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
-        """
-        return NoOpSpan(**kwargs)
-
-    @contextmanager
-    def continue_trace(self, environ_or_headers):
-        # type: (Dict[str, Any]) -> Generator[None, None, None]
-        """
-        Sets the propagation context from environment or headers to continue an incoming trace.
-        """
-        self.generate_propagation_context(environ_or_headers)
-        yield
-
-    def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
-        # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
-        """
-        Captures an event.
-
-        Merges given scope data and calls :py:meth:`sentry_sdk.client._Client.capture_event`.
-
-        :param event: A ready-made event that can be directly sent to Sentry.
-
-        :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
-
-        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :param scope_kwargs: Optional data to apply to event.
-            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
-        """
-        if disable_capture_event.get(False):
-            return None
-
-        scope = self._merge_scopes(scope, scope_kwargs)
-
-        event_id = self.get_client().capture_event(event=event, hint=hint, scope=scope)
-
-        if event_id is not None and event.get("type") != "transaction":
-            self.get_isolation_scope()._last_event_id = event_id
-
-        return event_id
-
-    def capture_message(self, message, level=None, scope=None, **scope_kwargs):
-        # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str]
-        """
-        Captures a message.
-
-        :param message: The string to send as the message.
-
-        :param level: If no level is provided, the default level is `info`.
-
-        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :param scope_kwargs: Optional data to apply to event.
-            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
-        """
-        if disable_capture_event.get(False):
-            return None
-
-        if level is None:
-            level = "info"
-
-        event = {
-            "message": message,
-            "level": level,
-        }  # type: Event
-
-        return self.capture_event(event, scope=scope, **scope_kwargs)
-
-    def capture_exception(self, error=None, scope=None, **scope_kwargs):
-        # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
-        """Captures an exception.
-
-        :param error: An exception to capture. If `None`, `sys.exc_info()` will be used.
-
-        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :param scope_kwargs: Optional data to apply to event.
-            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
-            The `scope` and `scope_kwargs` parameters are mutually exclusive.
-
-        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
-        """
-        if disable_capture_event.get(False):
-            return None
-
-        if error is not None:
-            exc_info = exc_info_from_error(error)
-        else:
-            exc_info = sys.exc_info()
-
-        event, hint = event_from_exception(exc_info, client_options=self.get_client().options)
-
-        try:
-            return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs)
-        except Exception:
-            capture_internal_exception(sys.exc_info())
-
-        return None
-
-    def start_session(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        """Starts a new session."""
-        session_mode = kwargs.pop("session_mode", "application")
-
-        self.end_session()
-
-        client = self.get_client()
-        self._session = Session(
-            release=client.options.get("release"),
-            environment=client.options.get("environment"),
-            user=self._user,
-            session_mode=session_mode,
-        )
-
-    def end_session(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        """Ends the current session if there is one."""
-        session = self._session
-        self._session = None
-
-        if session is not None:
-            session.close()
-            self.get_client().capture_session(session)
-
-    def stop_auto_session_tracking(self, *args, **kwargs):
-        # type: (*Any, **Any) -> None
-        """Stops automatic session tracking.
-
-        This temporarily session tracking for the current scope when called.
-        To resume session tracking call `resume_auto_session_tracking`.
-        """
-        self.end_session()
-        self._force_auto_session_tracking = False
-
-    def resume_auto_session_tracking(self):
-        # type: (...) -> None
-        """Resumes automatic session tracking for the current scope if
-        disabled earlier.  This requires that generally automatic session
-        tracking is enabled.
-        """
-        self._force_auto_session_tracking = None
-
-    def add_event_processor(
-        self, func  # type: EventProcessor
-    ):
-        # type: (...) -> None
-        """Register a scope local event processor on the scope.
-
-        :param func: This function behaves like `before_send.`
-        """
-        if len(self._event_processors) > 20:
-            logger.warning(
-                "Too many event processors on scope! Clearing list to free up some memory: %r",
-                self._event_processors,
-            )
-            del self._event_processors[:]
-
-        self._event_processors.append(func)
-
-    def add_error_processor(
-        self,
-        func,  # type: ErrorProcessor
-        cls=None,  # type: Optional[Type[BaseException]]
-    ):
-        # type: (...) -> None
-        """Register a scope local error processor on the scope.
-
-        :param func: A callback that works similar to an event processor but is invoked with the original exception info triple as second argument.
-
-        :param cls: Optionally, only process exceptions of this type.
-        """
-        if cls is not None:
-            cls_ = cls  # For mypy.
-            real_func = func
-
-            def func(event, exc_info):
-                # type: (Event, ExcInfo) -> Optional[Event]
-                try:
-                    is_inst = isinstance(exc_info[1], cls_)
-                except Exception:
-                    is_inst = False
-                if is_inst:
-                    return real_func(event, exc_info)
-                return event
-
-        self._error_processors.append(func)
-
-    def _apply_level_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if self._level is not None:
-            event["level"] = self._level
-
-    def _apply_breadcrumbs_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        event.setdefault("breadcrumbs", {})
-
-        # This check is just for mypy -
-        if not isinstance(event["breadcrumbs"], AnnotatedValue):
-            event["breadcrumbs"].setdefault("values", [])
-            event["breadcrumbs"]["values"].extend(self._breadcrumbs)
-
-        # Attempt to sort timestamps
-        try:
-            if not isinstance(event["breadcrumbs"], AnnotatedValue):
-                for crumb in event["breadcrumbs"]["values"]:
-                    if isinstance(crumb["timestamp"], str):
-                        crumb["timestamp"] = datetime_from_isoformat(crumb["timestamp"])
-
-                event["breadcrumbs"]["values"].sort(key=lambda crumb: crumb["timestamp"])
-        except Exception as err:
-            logger.debug("Error when sorting breadcrumbs", exc_info=err)
-            pass
-
-    def _apply_user_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if event.get("user") is None and self._user is not None:
-            event["user"] = self._user
-
-    def _apply_transaction_name_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if event.get("transaction") is None and self._transaction is not None:
-            event["transaction"] = self._transaction
-
-    def _apply_transaction_info_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if event.get("transaction_info") is None and self._transaction_info is not None:
-            event["transaction_info"] = self._transaction_info
-
-    def _apply_fingerprint_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if event.get("fingerprint") is None and self._fingerprint is not None:
-            event["fingerprint"] = self._fingerprint
-
-    def _apply_extra_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if self._extras:
-            event.setdefault("extra", {}).update(self._extras)
-
-    def _apply_tags_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if self._tags:
-            event.setdefault("tags", {}).update(self._tags)
-
-    def _apply_contexts_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        if self._contexts:
-            event.setdefault("contexts", {}).update(self._contexts)
-
-        contexts = event.setdefault("contexts", {})
-
-        # Add "trace" context
-        if contexts.get("trace") is None:
-            if has_tracing_enabled(options) and self._span is not None and self._span.is_valid:
-                contexts["trace"] = self._span.get_trace_context()
-            else:
-                contexts["trace"] = self.get_trace_context()
-
-    def _apply_flags_to_event(self, event, hint, options):
-        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
-        flags = self.flags.get()
-        if len(flags) > 0:
-            event.setdefault("contexts", {}).setdefault("flags", {}).update({"values": flags})
-
-    def _drop(self, cause, ty):
-        # type: (Any, str) -> Optional[Any]
-        logger.info("%s (%s) dropped event", ty, cause)
-        return None
-
-    def run_error_processors(self, event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-        """
-        Runs the error processors on the event and returns the modified event.
-        """
-        exc_info = hint.get("exc_info")
-        if exc_info is not None:
-            error_processors = chain(
-                self.get_global_scope()._error_processors,
-                self.get_isolation_scope()._error_processors,
-                self.get_current_scope()._error_processors,
-            )
-
-            for error_processor in error_processors:
-                new_event = error_processor(event, exc_info)
-                if new_event is None:
-                    return self._drop(error_processor, "error processor")
-
-                event = new_event
-
-        return event
-
-    def run_event_processors(self, event, hint):
-        # type: (Event, Hint) -> Optional[Event]
-        """
-        Runs the event processors on the event and returns the modified event.
-        """
-        ty = event.get("type")
-        is_check_in = ty == "check_in"
-
-        if not is_check_in:
-            # Get scopes without creating them to prevent infinite recursion
-            isolation_scope = self._get_isolation_scope()
-            current_scope = self._get_current_scope()
-
-            event_processors = chain(
-                global_event_processors,
-                _global_scope and _global_scope._event_processors or [],
-                isolation_scope and isolation_scope._event_processors or [],
-                current_scope and current_scope._event_processors or [],
-            )
-
-            for event_processor in event_processors:
-                new_event = event  # type: Optional[Event]
-                with capture_internal_exceptions():
-                    new_event = event_processor(event, hint)
-                if new_event is None:
-                    return self._drop(event_processor, "event processor")
-                event = new_event
-
-        return event
-
-    @_disable_capture
-    def apply_to_event(
-        self,
-        event,  # type: Event
-        hint,  # type: Hint
-        options=None,  # type: Optional[Dict[str, Any]]
-    ):
-        # type: (...) -> Optional[Event]
-        """Applies the information contained on the scope to the given event."""
-        ty = event.get("type")
-        is_transaction = ty == "transaction"
-        is_check_in = ty == "check_in"
-
-        # put all attachments into the hint. This lets callbacks play around
-        # with attachments. We also later pull this out of the hint when we
-        # create the envelope.
-        attachments_to_send = hint.get("attachments") or []
-        for attachment in self._attachments:
-            if not is_transaction or attachment.add_to_transactions:
-                attachments_to_send.append(attachment)
-        hint["attachments"] = attachments_to_send
-
-        self._apply_contexts_to_event(event, hint, options)
-
-        if is_check_in:
-            # Check-ins only support the trace context, strip all others
-            event["contexts"] = {"trace": event.setdefault("contexts", {}).get("trace", {})}
-
-        if not is_check_in:
-            self._apply_level_to_event(event, hint, options)
-            self._apply_fingerprint_to_event(event, hint, options)
-            self._apply_user_to_event(event, hint, options)
-            self._apply_transaction_name_to_event(event, hint, options)
-            self._apply_transaction_info_to_event(event, hint, options)
-            self._apply_tags_to_event(event, hint, options)
-            self._apply_extra_to_event(event, hint, options)
-
-        if not is_transaction and not is_check_in:
-            self._apply_breadcrumbs_to_event(event, hint, options)
-            self._apply_flags_to_event(event, hint, options)
-
-        event = self.run_error_processors(event, hint)
-        if event is None:
-            return None
-
-        event = self.run_event_processors(event, hint)
-        if event is None:
-            return None
-
-        return event
-
-    def update_from_scope(self, scope):
-        # type: (Scope) -> None
-        """Update the scope with another scope's data."""
-        if scope._level is not None:
-            self._level = scope._level
-        if scope._fingerprint is not None:
-            self._fingerprint = scope._fingerprint
-        if scope._transaction is not None:
-            self._transaction = scope._transaction
-        if scope._transaction_info is not None:
-            self._transaction_info.update(scope._transaction_info)
-        if scope._user is not None:
-            self._user = scope._user
-        if scope._tags:
-            self._tags.update(scope._tags)
-        if scope._contexts:
-            self._contexts.update(scope._contexts)
-        if scope._extras:
-            self._extras.update(scope._extras)
-        if scope._breadcrumbs:
-            self._breadcrumbs.extend(scope._breadcrumbs)
-        if scope._n_breadcrumbs_truncated:
-            self._n_breadcrumbs_truncated = (
-                self._n_breadcrumbs_truncated + scope._n_breadcrumbs_truncated
-            )
-        if scope._span:
-            self._span = scope._span
-        if scope._attachments:
-            self._attachments.extend(scope._attachments)
-        if scope._profile:
-            self._profile = scope._profile
-        if scope._propagation_context:
-            self._propagation_context = scope._propagation_context
-        if scope._session:
-            self._session = scope._session
-        if scope._flags:
-            if not self._flags:
-                self._flags = deepcopy(scope._flags)
-            else:
-                for flag in scope._flags.get():
-                    self._flags.set(flag["flag"], flag["result"])
-
-    def update_from_kwargs(
-        self,
-        user=None,  # type: Optional[Any]
-        level=None,  # type: Optional[LogLevelStr]
-        extras=None,  # type: Optional[Dict[str, Any]]
-        contexts=None,  # type: Optional[Dict[str, Dict[str, Any]]]
-        tags=None,  # type: Optional[Dict[str, str]]
-        fingerprint=None,  # type: Optional[List[str]]
-    ):
-        # type: (...) -> None
-        """Update the scope's attributes."""
-        if level is not None:
-            self._level = level
-        if user is not None:
-            self._user = user
-        if extras is not None:
-            self._extras.update(extras)
-        if contexts is not None:
-            self._contexts.update(contexts)
-        if tags is not None:
-            self._tags.update(tags)
-        if fingerprint is not None:
-            self._fingerprint = fingerprint
-
-    def __repr__(self):
-        # type: () -> str
-        return "<{} id={} name={} type={}>".format(
-            self.__class__.__name__,
-            hex(id(self)),
-            self._name,
-            self._type,
-        )
-
-    @property
-    def flags(self):
-        # type: () -> FlagBuffer
-        if self._flags is None:
-            max_flags = (
-                self.get_client().options["_experiments"].get("max_flags") or DEFAULT_FLAG_CAPACITY
-            )
-            self._flags = FlagBuffer(capacity=max_flags)
-        return self._flags
-
-
-@contextmanager
-def new_scope():
-    # type: () -> Generator[Scope, None, None]
-    """
-    .. versionadded:: 2.0.0
-
-    Context manager that forks the current scope and runs the wrapped code in it.
-    After the wrapped code is executed, the original scope is restored.
-
-    Example Usage:
-
-    .. code-block:: python
-
-        import sentry_sdk
-
-        with sentry_sdk.new_scope() as scope:
-            scope.set_tag("color", "green")
-            sentry_sdk.capture_message("hello") # will include `color` tag.
-
-        sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
-
-    """
-    # fork current scope
-    current_scope = Scope.get_current_scope()
-    new_scope = current_scope.fork()
-    token = _current_scope.set(new_scope)
-
-    try:
-        yield new_scope
-
-    finally:
-        # restore original scope
-        _current_scope.reset(token)
-
-
-@contextmanager
-def use_scope(scope):
-    # type: (Scope) -> Generator[Scope, None, None]
-    """
-    .. versionadded:: 2.0.0
-
-    Context manager that uses the given `scope` and runs the wrapped code in it.
-    After the wrapped code is executed, the original scope is restored.
-
-    Example Usage:
-    Suppose the variable `scope` contains a `Scope` object, which is not currently
-    the active scope.
-
-    .. code-block:: python
-
-        import sentry_sdk
-
-        with sentry_sdk.use_scope(scope):
-            scope.set_tag("color", "green")
-            sentry_sdk.capture_message("hello") # will include `color` tag.
-
-        sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
-
-    """
-    # set given scope as current scope
-    token = _current_scope.set(scope)
-
-    try:
-        yield scope
-
-    finally:
-        # restore original scope
-        _current_scope.reset(token)
-
-
-@contextmanager
-def isolation_scope():
-    # type: () -> Generator[Scope, None, None]
-    """
-    .. versionadded:: 2.0.0
-
-    Context manager that forks the current isolation scope and runs the wrapped code in it.
-    The current scope is also forked to not bleed data into the existing current scope.
-    After the wrapped code is executed, the original scopes are restored.
-
-    Example Usage:
-
-    .. code-block:: python
-
-        import sentry_sdk
-
-        with sentry_sdk.isolation_scope() as scope:
-            scope.set_tag("color", "green")
-            sentry_sdk.capture_message("hello") # will include `color` tag.
-
-        sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
-
-    """
-    # fork current scope
-    current_scope = Scope.get_current_scope()
-    forked_current_scope = current_scope.fork()
-    current_token = _current_scope.set(forked_current_scope)
-
-    # fork isolation scope
-    isolation_scope = Scope.get_isolation_scope()
-    new_isolation_scope = isolation_scope.fork()
-    isolation_token = _isolation_scope.set(new_isolation_scope)
-
-    try:
-        yield new_isolation_scope
-
-    finally:
-        # restore original scopes
-        _current_scope.reset(current_token)
-        _isolation_scope.reset(isolation_token)
-
-
-@contextmanager
-def use_isolation_scope(isolation_scope):
-    # type: (Scope) -> Generator[Scope, None, None]
-    """
-    .. versionadded:: 2.0.0
-
-    Context manager that uses the given `isolation_scope` and runs the wrapped code in it.
-    The current scope is also forked to not bleed data into the existing current scope.
-    After the wrapped code is executed, the original scopes are restored.
-
-    Example Usage:
-
-    .. code-block:: python
-
-        import sentry_sdk
-
-        with sentry_sdk.isolation_scope() as scope:
-            scope.set_tag("color", "green")
-            sentry_sdk.capture_message("hello") # will include `color` tag.
-
-        sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
-
-    """
-    # fork current scope
-    current_scope = Scope.get_current_scope()
-    forked_current_scope = current_scope.fork()
-    current_token = _current_scope.set(forked_current_scope)
-
-    # set given scope as isolation scope
-    isolation_token = _isolation_scope.set(isolation_scope)
-
-    try:
-        yield isolation_scope
-
-    finally:
-        # restore original scopes
-        _current_scope.reset(current_token)
-        _isolation_scope.reset(isolation_token)
-
-
-def should_send_default_pii():
-    # type: () -> bool
-    """Shortcut for `Scope.get_client().should_send_default_pii()`."""
-    return Scope.get_client().should_send_default_pii()
-
-
-# Circular imports
-from sentry_sdk_alpha.client import NonRecordingClient
-
-if TYPE_CHECKING:
-    import sentry_sdk_alpha.client
diff --git a/src/sentry_sdk_alpha/scrubber.py b/src/sentry_sdk_alpha/scrubber.py
deleted file mode 100644
index 6bbbff36912f95..00000000000000
--- a/src/sentry_sdk_alpha/scrubber.py
+++ /dev/null
@@ -1,170 +0,0 @@
-from typing import TYPE_CHECKING, Dict, List, cast
-
-from sentry_sdk_alpha.utils import AnnotatedValue, capture_internal_exceptions, iter_event_frames
-
-if TYPE_CHECKING:
-    from typing import Optional
-
-    from sentry_sdk_alpha._types import Event
-
-
-DEFAULT_DENYLIST = [
-    # stolen from relay
-    "password",
-    "passwd",
-    "secret",
-    "api_key",
-    "apikey",
-    "auth",
-    "credentials",
-    "mysql_pwd",
-    "privatekey",
-    "private_key",
-    "token",
-    "session",
-    # django
-    "csrftoken",
-    "sessionid",
-    # wsgi
-    "x_csrftoken",
-    "x_forwarded_for",
-    "set_cookie",
-    "cookie",
-    "authorization",
-    "x_api_key",
-    # other common names used in the wild
-    "aiohttp_session",  # aiohttp
-    "connect.sid",  # Express
-    "csrf_token",  # Pyramid
-    "csrf",  # (this is a cookie name used in accepted answers on stack overflow)
-    "_csrf",  # Express
-    "_csrf_token",  # Bottle
-    "PHPSESSID",  # PHP
-    "_session",  # Sanic
-    "symfony",  # Symfony
-    "user_session",  # Vue
-    "_xsrf",  # Tornado
-    "XSRF-TOKEN",  # Angular, Laravel
-]
-
-DEFAULT_PII_DENYLIST = [
-    "x_forwarded_for",
-    "x_real_ip",
-    "ip_address",
-    "remote_addr",
-]
-
-
-class EventScrubber:
-    def __init__(self, denylist=None, recursive=False, send_default_pii=False, pii_denylist=None):
-        # type: (Optional[List[str]], bool, bool, Optional[List[str]]) -> None
-        """
-        A scrubber that goes through the event payload and removes sensitive data configured through denylists.
-
-        :param denylist: A security denylist that is always scrubbed, defaults to DEFAULT_DENYLIST.
-        :param recursive: Whether to scrub the event payload recursively, default False.
-        :param send_default_pii: Whether pii is sending is on, pii fields are not scrubbed.
-        :param pii_denylist: The denylist to use for scrubbing when pii is not sent, defaults to DEFAULT_PII_DENYLIST.
-        """
-        self.denylist = DEFAULT_DENYLIST.copy() if denylist is None else denylist
-
-        if not send_default_pii:
-            pii_denylist = DEFAULT_PII_DENYLIST.copy() if pii_denylist is None else pii_denylist
-            self.denylist += pii_denylist
-
-        self.denylist = [x.lower() for x in self.denylist]
-        self.recursive = recursive
-
-    def scrub_list(self, lst):
-        # type: (object) -> None
-        """
-        If a list is passed to this method, the method recursively searches the list and any
-        nested lists for any dictionaries. The method calls scrub_dict on all dictionaries
-        it finds.
-        If the parameter passed to this method is not a list, the method does nothing.
-        """
-        if not isinstance(lst, list):
-            return
-
-        for v in lst:
-            self.scrub_dict(v)  # no-op unless v is a dict
-            self.scrub_list(v)  # no-op unless v is a list
-
-    def scrub_dict(self, d):
-        # type: (object) -> None
-        """
-        If a dictionary is passed to this method, the method scrubs the dictionary of any
-        sensitive data. The method calls itself recursively on any nested dictionaries (
-        including dictionaries nested in lists) if self.recursive is True.
-        This method does nothing if the parameter passed to it is not a dictionary.
-        """
-        if not isinstance(d, dict):
-            return
-
-        for k, v in d.items():
-            # The cast is needed because mypy is not smart enough to figure out that k must be a
-            # string after the isinstance check.
-            if isinstance(k, str) and k.lower() in self.denylist:
-                d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
-            elif self.recursive:
-                self.scrub_dict(v)  # no-op unless v is a dict
-                self.scrub_list(v)  # no-op unless v is a list
-
-    def scrub_request(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            if "request" in event:
-                if "headers" in event["request"]:
-                    self.scrub_dict(event["request"]["headers"])
-                if "cookies" in event["request"]:
-                    self.scrub_dict(event["request"]["cookies"])
-                if "data" in event["request"]:
-                    self.scrub_dict(event["request"]["data"])
-
-    def scrub_extra(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            if "extra" in event:
-                self.scrub_dict(event["extra"])
-
-    def scrub_user(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            if "user" in event:
-                self.scrub_dict(event["user"])
-
-    def scrub_breadcrumbs(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            if "breadcrumbs" in event:
-                if (
-                    not isinstance(event["breadcrumbs"], AnnotatedValue)
-                    and "values" in event["breadcrumbs"]
-                ):
-                    for value in event["breadcrumbs"]["values"]:
-                        if "data" in value:
-                            self.scrub_dict(value["data"])
-
-    def scrub_frames(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            for frame in iter_event_frames(event):
-                if "vars" in frame:
-                    self.scrub_dict(frame["vars"])
-
-    def scrub_spans(self, event):
-        # type: (Event) -> None
-        with capture_internal_exceptions():
-            if "spans" in event:
-                for span in cast(list[dict[str, object]], event["spans"]):
-                    if "data" in span:
-                        self.scrub_dict(span["data"])
-
-    def scrub_event(self, event):
-        # type: (Event) -> None
-        self.scrub_request(event)
-        self.scrub_extra(event)
-        self.scrub_user(event)
-        self.scrub_breadcrumbs(event)
-        self.scrub_frames(event)
-        self.scrub_spans(event)
diff --git a/src/sentry_sdk_alpha/serializer.py b/src/sentry_sdk_alpha/serializer.py
deleted file mode 100644
index 8ecb9abf692ca8..00000000000000
--- a/src/sentry_sdk_alpha/serializer.py
+++ /dev/null
@@ -1,370 +0,0 @@
-import math
-import sys
-from collections.abc import Mapping, Sequence, Set
-from datetime import datetime
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.utils import (
-    AnnotatedValue,
-    capture_internal_exception,
-    disable_capture_event,
-    format_timestamp,
-    safe_repr,
-    strip_string,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from types import TracebackType
-    from typing import Any, ContextManager, Dict, List, Optional, Type, Union
-
-    from sentry_sdk_alpha._types import NotImplementedType
-
-    Span = dict[str, Any]
-
-    ReprProcessor = Callable[[Any, dict[str, Any]], Union[NotImplementedType, str]]
-    Segment = Union[str, int]
-
-
-# Bytes are technically not strings in Python 3, but we can serialize them
-serializable_str_types = (str, bytes, bytearray, memoryview)
-
-
-# Maximum length of JSON-serialized event payloads that can be safely sent
-# before the server may reject the event due to its size. This is not intended
-# to reflect actual values defined server-side, but rather only be an upper
-# bound for events sent by the SDK.
-#
-# Can be overwritten if wanting to send more bytes, e.g. with a custom server.
-# When changing this, keep in mind that events may be a little bit larger than
-# this value due to attached metadata, so keep the number conservative.
-MAX_EVENT_BYTES = 10**6
-
-# Maximum depth and breadth of databags. Excess data will be trimmed. If
-# max_request_body_size is "always", request bodies won't be trimmed.
-MAX_DATABAG_DEPTH = 5
-MAX_DATABAG_BREADTH = 10
-CYCLE_MARKER = ""
-
-
-global_repr_processors = []  # type: List[ReprProcessor]
-
-
-def add_global_repr_processor(processor):
-    # type: (ReprProcessor) -> None
-    global_repr_processors.append(processor)
-
-
-class Memo:
-    __slots__ = ("_ids", "_objs")
-
-    def __init__(self):
-        # type: () -> None
-        self._ids = {}  # type: Dict[int, Any]
-        self._objs = []  # type: List[Any]
-
-    def memoize(self, obj):
-        # type: (Any) -> ContextManager[bool]
-        self._objs.append(obj)
-        return self
-
-    def __enter__(self):
-        # type: () -> bool
-        obj = self._objs[-1]
-        if id(obj) in self._ids:
-            return True
-        else:
-            self._ids[id(obj)] = obj
-            return False
-
-    def __exit__(
-        self,
-        ty,  # type: Optional[Type[BaseException]]
-        value,  # type: Optional[BaseException]
-        tb,  # type: Optional[TracebackType]
-    ):
-        # type: (...) -> None
-        self._ids.pop(id(self._objs.pop()), None)
-
-
-def serialize(event, **kwargs):
-    # type: (Dict[str, Any], **Any) -> Dict[str, Any]
-    """
-    A very smart serializer that takes a dict and emits a json-friendly dict.
-    Currently used for serializing the final Event and also prematurely while fetching the stack
-    local variables for each frame in a stacktrace.
-
-    It works internally with 'databags' which are arbitrary data structures like Mapping, Sequence and Set.
-    The algorithm itself is a recursive graph walk down the data structures it encounters.
-
-    It has the following responsibilities:
-    * Trimming databags and keeping them within MAX_DATABAG_BREADTH and MAX_DATABAG_DEPTH.
-    * Calling safe_repr() on objects appropriately to keep them informative and readable in the final payload.
-    * Annotating the payload with the _meta field whenever trimming happens.
-
-    :param max_request_body_size: If set to "always", will never trim request bodies.
-    :param max_value_length: The max length to strip strings to, defaults to sentry_sdk.consts.DEFAULT_MAX_VALUE_LENGTH
-    :param is_vars: If we're serializing vars early, we want to repr() things that are JSON-serializable to make their type more apparent. For example, it's useful to see the difference between a unicode-string and a bytestring when viewing a stacktrace.
-    :param custom_repr: A custom repr function that runs before safe_repr on the object to be serialized. If it returns None or throws internally, we will fallback to safe_repr.
-
-    """
-    memo = Memo()
-    path = []  # type: List[Segment]
-    meta_stack = []  # type: List[Dict[str, Any]]
-
-    keep_request_bodies = kwargs.pop("max_request_body_size", None) == "always"  # type: bool
-    max_value_length = kwargs.pop("max_value_length", None)  # type: Optional[int]
-    is_vars = kwargs.pop("is_vars", False)
-    custom_repr = kwargs.pop("custom_repr", None)  # type: Callable[..., Optional[str]]
-
-    def _safe_repr_wrapper(value):
-        # type: (Any) -> str
-        try:
-            repr_value = None
-            if custom_repr is not None:
-                repr_value = custom_repr(value)
-            return repr_value or safe_repr(value)
-        except Exception:
-            return safe_repr(value)
-
-    def _annotate(**meta):
-        # type: (**Any) -> None
-        while len(meta_stack) <= len(path):
-            try:
-                segment = path[len(meta_stack) - 1]
-                node = meta_stack[-1].setdefault(str(segment), {})
-            except IndexError:
-                node = {}
-
-            meta_stack.append(node)
-
-        meta_stack[-1].setdefault("", {}).update(meta)
-
-    def _is_databag():
-        # type: () -> Optional[bool]
-        """
-        A databag is any value that we need to trim.
-        True for stuff like vars, request bodies, breadcrumbs and extra.
-
-        :returns: `True` for "yes", `False` for :"no", `None` for "maybe soon".
-        """
-        try:
-            if is_vars:
-                return True
-
-            is_request_body = _is_request_body()
-            if is_request_body in (True, None):
-                return is_request_body
-
-            p0 = path[0]
-            if p0 == "breadcrumbs" and path[1] == "values":
-                path[2]
-                return True
-
-            if p0 == "extra":
-                return True
-
-        except IndexError:
-            return None
-
-        return False
-
-    def _is_request_body():
-        # type: () -> Optional[bool]
-        try:
-            if path[0] == "request" and path[1] == "data":
-                return True
-        except IndexError:
-            return None
-
-        return False
-
-    def _serialize_node(
-        obj,  # type: Any
-        is_databag=None,  # type: Optional[bool]
-        is_request_body=None,  # type: Optional[bool]
-        should_repr_strings=None,  # type: Optional[bool]
-        segment=None,  # type: Optional[Segment]
-        remaining_breadth=None,  # type: Optional[Union[int, float]]
-        remaining_depth=None,  # type: Optional[Union[int, float]]
-    ):
-        # type: (...) -> Any
-        if segment is not None:
-            path.append(segment)
-
-        try:
-            with memo.memoize(obj) as result:
-                if result:
-                    return CYCLE_MARKER
-
-                return _serialize_node_impl(
-                    obj,
-                    is_databag=is_databag,
-                    is_request_body=is_request_body,
-                    should_repr_strings=should_repr_strings,
-                    remaining_depth=remaining_depth,
-                    remaining_breadth=remaining_breadth,
-                )
-        except BaseException:
-            capture_internal_exception(sys.exc_info())
-
-            if is_databag:
-                return ""
-
-            return None
-        finally:
-            if segment is not None:
-                path.pop()
-                del meta_stack[len(path) + 1 :]
-
-    def _flatten_annotated(obj):
-        # type: (Any) -> Any
-        if isinstance(obj, AnnotatedValue):
-            _annotate(**obj.metadata)
-            obj = obj.value
-        return obj
-
-    def _serialize_node_impl(
-        obj,
-        is_databag,
-        is_request_body,
-        should_repr_strings,
-        remaining_depth,
-        remaining_breadth,
-    ):
-        # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any
-        if isinstance(obj, AnnotatedValue):
-            should_repr_strings = False
-        if should_repr_strings is None:
-            should_repr_strings = is_vars
-
-        if is_databag is None:
-            is_databag = _is_databag()
-
-        if is_request_body is None:
-            is_request_body = _is_request_body()
-
-        if is_databag:
-            if is_request_body and keep_request_bodies:
-                remaining_depth = float("inf")
-                remaining_breadth = float("inf")
-            else:
-                if remaining_depth is None:
-                    remaining_depth = MAX_DATABAG_DEPTH
-                if remaining_breadth is None:
-                    remaining_breadth = MAX_DATABAG_BREADTH
-
-        obj = _flatten_annotated(obj)
-
-        if remaining_depth is not None and remaining_depth <= 0:
-            _annotate(rem=[["!limit", "x"]])
-            if is_databag:
-                return _flatten_annotated(
-                    strip_string(_safe_repr_wrapper(obj), max_length=max_value_length)
-                )
-            return None
-
-        if is_databag and global_repr_processors:
-            hints = {"memo": memo, "remaining_depth": remaining_depth}
-            for processor in global_repr_processors:
-                result = processor(obj, hints)
-                if result is not NotImplemented:
-                    return _flatten_annotated(result)
-
-        sentry_repr = getattr(type(obj), "__sentry_repr__", None)
-
-        if obj is None or isinstance(obj, (bool, int, float)):
-            if should_repr_strings or (
-                isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
-            ):
-                return _safe_repr_wrapper(obj)
-            else:
-                return obj
-
-        elif callable(sentry_repr):
-            return sentry_repr(obj)
-
-        elif isinstance(obj, datetime):
-            return (
-                str(format_timestamp(obj)) if not should_repr_strings else _safe_repr_wrapper(obj)
-            )
-
-        elif isinstance(obj, Mapping):
-            # Create temporary copy here to avoid calling too much code that
-            # might mutate our dictionary while we're still iterating over it.
-            obj = dict(obj.items())
-
-            rv_dict = {}  # type: Dict[str, Any]
-            i = 0
-
-            for k, v in obj.items():
-                if remaining_breadth is not None and i >= remaining_breadth:
-                    _annotate(len=len(obj))
-                    break
-
-                str_k = str(k)
-                v = _serialize_node(
-                    v,
-                    segment=str_k,
-                    should_repr_strings=should_repr_strings,
-                    is_databag=is_databag,
-                    is_request_body=is_request_body,
-                    remaining_depth=(remaining_depth - 1 if remaining_depth is not None else None),
-                    remaining_breadth=remaining_breadth,
-                )
-                rv_dict[str_k] = v
-                i += 1
-
-            return rv_dict
-
-        elif not isinstance(obj, serializable_str_types) and isinstance(obj, (Set, Sequence)):
-            rv_list = []
-
-            for i, v in enumerate(obj):
-                if remaining_breadth is not None and i >= remaining_breadth:
-                    _annotate(len=len(obj))
-                    break
-
-                rv_list.append(
-                    _serialize_node(
-                        v,
-                        segment=i,
-                        should_repr_strings=should_repr_strings,
-                        is_databag=is_databag,
-                        is_request_body=is_request_body,
-                        remaining_depth=(
-                            remaining_depth - 1 if remaining_depth is not None else None
-                        ),
-                        remaining_breadth=remaining_breadth,
-                    )
-                )
-
-            return rv_list
-
-        if should_repr_strings:
-            obj = _safe_repr_wrapper(obj)
-        else:
-            if isinstance(obj, bytes) or isinstance(obj, bytearray):
-                obj = obj.decode("utf-8", "replace")
-
-            if not isinstance(obj, str):
-                obj = _safe_repr_wrapper(obj)
-
-        is_span_description = len(path) == 3 and path[0] == "spans" and path[-1] == "description"
-        if is_span_description:
-            return obj
-
-        return _flatten_annotated(strip_string(obj, max_length=max_value_length))
-
-    #
-    # Start of serialize() function
-    #
-    disable_capture_event.set(True)
-    try:
-        serialized_event = _serialize_node(event, **kwargs)
-        if not is_vars and meta_stack and isinstance(serialized_event, dict):
-            serialized_event["_meta"] = meta_stack[0]
-
-        return serialized_event
-    finally:
-        disable_capture_event.set(False)
diff --git a/src/sentry_sdk_alpha/session.py b/src/sentry_sdk_alpha/session.py
deleted file mode 100644
index 718a2acb15c9ff..00000000000000
--- a/src/sentry_sdk_alpha/session.py
+++ /dev/null
@@ -1,171 +0,0 @@
-import uuid
-from datetime import datetime, timezone
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha.utils import format_timestamp
-
-if TYPE_CHECKING:
-    from typing import Any, Dict, Optional, Union
-
-    from sentry_sdk_alpha._types import SessionStatus
-
-
-def _minute_trunc(ts):
-    # type: (datetime) -> datetime
-    return ts.replace(second=0, microsecond=0)
-
-
-def _make_uuid(
-    val,  # type: Union[str, uuid.UUID]
-):
-    # type: (...) -> uuid.UUID
-    if isinstance(val, uuid.UUID):
-        return val
-    return uuid.UUID(val)
-
-
-class Session:
-    def __init__(
-        self,
-        sid=None,  # type: Optional[Union[str, uuid.UUID]]
-        did=None,  # type: Optional[str]
-        timestamp=None,  # type: Optional[datetime]
-        started=None,  # type: Optional[datetime]
-        duration=None,  # type: Optional[float]
-        status=None,  # type: Optional[SessionStatus]
-        release=None,  # type: Optional[str]
-        environment=None,  # type: Optional[str]
-        user_agent=None,  # type: Optional[str]
-        ip_address=None,  # type: Optional[str]
-        errors=None,  # type: Optional[int]
-        user=None,  # type: Optional[Any]
-        session_mode="application",  # type: str
-    ):
-        # type: (...) -> None
-        if sid is None:
-            sid = uuid.uuid4()
-        if started is None:
-            started = datetime.now(timezone.utc)
-        if status is None:
-            status = "ok"
-        self.status = status
-        self.did = None  # type: Optional[str]
-        self.started = started
-        self.release = None  # type: Optional[str]
-        self.environment = None  # type: Optional[str]
-        self.duration = None  # type: Optional[float]
-        self.user_agent = None  # type: Optional[str]
-        self.ip_address = None  # type: Optional[str]
-        self.session_mode = session_mode  # type: str
-        self.errors = 0
-
-        self.update(
-            sid=sid,
-            did=did,
-            timestamp=timestamp,
-            duration=duration,
-            release=release,
-            environment=environment,
-            user_agent=user_agent,
-            ip_address=ip_address,
-            errors=errors,
-            user=user,
-        )
-
-    @property
-    def truncated_started(self):
-        # type: (...) -> datetime
-        return _minute_trunc(self.started)
-
-    def update(
-        self,
-        sid=None,  # type: Optional[Union[str, uuid.UUID]]
-        did=None,  # type: Optional[str]
-        timestamp=None,  # type: Optional[datetime]
-        started=None,  # type: Optional[datetime]
-        duration=None,  # type: Optional[float]
-        status=None,  # type: Optional[SessionStatus]
-        release=None,  # type: Optional[str]
-        environment=None,  # type: Optional[str]
-        user_agent=None,  # type: Optional[str]
-        ip_address=None,  # type: Optional[str]
-        errors=None,  # type: Optional[int]
-        user=None,  # type: Optional[Any]
-    ):
-        # type: (...) -> None
-        # If a user is supplied we pull some data form it
-        if user:
-            if ip_address is None:
-                ip_address = user.get("ip_address")
-            if did is None:
-                did = user.get("id") or user.get("email") or user.get("username")
-
-        if sid is not None:
-            self.sid = _make_uuid(sid)
-        if did is not None:
-            self.did = str(did)
-        if timestamp is None:
-            timestamp = datetime.now(timezone.utc)
-        self.timestamp = timestamp
-        if started is not None:
-            self.started = started
-        if duration is not None:
-            self.duration = duration
-        if release is not None:
-            self.release = release
-        if environment is not None:
-            self.environment = environment
-        if ip_address is not None:
-            self.ip_address = ip_address
-        if user_agent is not None:
-            self.user_agent = user_agent
-        if errors is not None:
-            self.errors = errors
-
-        if status is not None:
-            self.status = status
-
-    def close(
-        self, status=None  # type: Optional[SessionStatus]
-    ):
-        # type: (...) -> Any
-        if status is None and self.status == "ok":
-            status = "exited"
-        if status is not None:
-            self.update(status=status)
-
-    def get_json_attrs(
-        self, with_user_info=True  # type: Optional[bool]
-    ):
-        # type: (...) -> Any
-        attrs = {}
-        if self.release is not None:
-            attrs["release"] = self.release
-        if self.environment is not None:
-            attrs["environment"] = self.environment
-        if with_user_info:
-            if self.ip_address is not None:
-                attrs["ip_address"] = self.ip_address
-            if self.user_agent is not None:
-                attrs["user_agent"] = self.user_agent
-        return attrs
-
-    def to_json(self):
-        # type: (...) -> Any
-        rv = {
-            "sid": str(self.sid),
-            "init": True,
-            "started": format_timestamp(self.started),
-            "timestamp": format_timestamp(self.timestamp),
-            "status": self.status,
-        }  # type: Dict[str, Any]
-        if self.errors:
-            rv["errors"] = self.errors
-        if self.did is not None:
-            rv["did"] = self.did
-        if self.duration is not None:
-            rv["duration"] = self.duration
-        attrs = self.get_json_attrs()
-        if attrs:
-            rv["attrs"] = attrs
-        return rv
diff --git a/src/sentry_sdk_alpha/sessions.py b/src/sentry_sdk_alpha/sessions.py
deleted file mode 100644
index 61d3f061ff1989..00000000000000
--- a/src/sentry_sdk_alpha/sessions.py
+++ /dev/null
@@ -1,186 +0,0 @@
-import os
-import time
-from contextlib import contextmanager
-from threading import Lock, Thread
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.envelope import Envelope
-from sentry_sdk_alpha.session import Session
-from sentry_sdk_alpha.utils import format_timestamp
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Generator
-    from typing import Any, Dict, List, Optional
-
-
-def _is_auto_session_tracking_enabled(scope):
-    # type: (sentry_sdk.Scope) -> bool
-    """
-    Utility function to find out if session tracking is enabled.
-    """
-
-    should_track = scope._force_auto_session_tracking
-    if should_track is None:
-        client_options = sentry_sdk_alpha.get_client().options
-        should_track = client_options.get("auto_session_tracking", False)
-
-    return should_track
-
-
-@contextmanager
-def track_session(scope, session_mode="application"):
-    # type: (sentry_sdk.Scope, str) -> Generator[None, None, None]
-    """
-    Start a new session in the provided scope, assuming session tracking is enabled.
-    This is a no-op context manager if session tracking is not enabled.
-    """
-
-    should_track = _is_auto_session_tracking_enabled(scope)
-    if should_track:
-        scope.start_session(session_mode=session_mode)
-    try:
-        yield
-    finally:
-        if should_track:
-            scope.end_session()
-
-
-TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
-MAX_ENVELOPE_ITEMS = 100
-
-
-def make_aggregate_envelope(aggregate_states, attrs):
-    # type: (Any, Any) -> Any
-    return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())}
-
-
-class SessionFlusher:
-    def __init__(
-        self,
-        capture_func,  # type: Callable[[Envelope], None]
-        flush_interval=60,  # type: int
-    ):
-        # type: (...) -> None
-        self.capture_func = capture_func
-        self.flush_interval = flush_interval
-        self.pending_sessions = []  # type: List[Any]
-        self.pending_aggregates = {}  # type: Dict[Any, Any]
-        self._thread = None  # type: Optional[Thread]
-        self._thread_lock = Lock()
-        self._aggregate_lock = Lock()
-        self._thread_for_pid = None  # type: Optional[int]
-        self._running = True
-
-    def flush(self):
-        # type: (...) -> None
-        pending_sessions = self.pending_sessions
-        self.pending_sessions = []
-
-        with self._aggregate_lock:
-            pending_aggregates = self.pending_aggregates
-            self.pending_aggregates = {}
-
-        envelope = Envelope()
-        for session in pending_sessions:
-            if len(envelope.items) == MAX_ENVELOPE_ITEMS:
-                self.capture_func(envelope)
-                envelope = Envelope()
-
-            envelope.add_session(session)
-
-        for attrs, states in pending_aggregates.items():
-            if len(envelope.items) == MAX_ENVELOPE_ITEMS:
-                self.capture_func(envelope)
-                envelope = Envelope()
-
-            envelope.add_sessions(make_aggregate_envelope(states, attrs))
-
-        if len(envelope.items) > 0:
-            self.capture_func(envelope)
-
-    def _ensure_running(self):
-        # type: (...) -> None
-        """
-        Check that we have an active thread to run in, or create one if not.
-
-        Note that this might fail (e.g. in Python 3.12 it's not possible to
-        spawn new threads at interpreter shutdown). In that case self._running
-        will be False after running this function.
-        """
-        if self._thread_for_pid == os.getpid() and self._thread is not None:
-            return None
-        with self._thread_lock:
-            if self._thread_for_pid == os.getpid() and self._thread is not None:
-                return None
-
-            def _thread():
-                # type: (...) -> None
-                while self._running:
-                    time.sleep(self.flush_interval)
-                    if self._running:
-                        self.flush()
-
-            thread = Thread(target=_thread)
-            thread.daemon = True
-            try:
-                thread.start()
-            except RuntimeError:
-                # Unfortunately at this point the interpreter is in a state that no
-                # longer allows us to spawn a thread and we have to bail.
-                self._running = False
-                return None
-
-            self._thread = thread
-            self._thread_for_pid = os.getpid()
-
-        return None
-
-    def add_aggregate_session(
-        self, session  # type: Session
-    ):
-        # type: (...) -> None
-        # NOTE on `session.did`:
-        # the protocol can deal with buckets that have a distinct-id, however
-        # in practice we expect the python SDK to have an extremely high cardinality
-        # here, effectively making aggregation useless, therefore we do not
-        # aggregate per-did.
-
-        # For this part we can get away with using the global interpreter lock
-        with self._aggregate_lock:
-            attrs = session.get_json_attrs(with_user_info=False)
-            primary_key = tuple(sorted(attrs.items()))
-            secondary_key = session.truncated_started  # (, session.did)
-            states = self.pending_aggregates.setdefault(primary_key, {})
-            state = states.setdefault(secondary_key, {})
-
-            if "started" not in state:
-                state["started"] = format_timestamp(session.truncated_started)
-            # if session.did is not None:
-            #     state["did"] = session.did
-            if session.status == "crashed":
-                state["crashed"] = state.get("crashed", 0) + 1
-            elif session.status == "abnormal":
-                state["abnormal"] = state.get("abnormal", 0) + 1
-            elif session.errors > 0:
-                state["errored"] = state.get("errored", 0) + 1
-            else:
-                state["exited"] = state.get("exited", 0) + 1
-
-    def add_session(
-        self, session  # type: Session
-    ):
-        # type: (...) -> None
-        if session.session_mode == "request":
-            self.add_aggregate_session(session)
-        else:
-            self.pending_sessions.append(session.to_json())
-        self._ensure_running()
-
-    def kill(self):
-        # type: (...) -> None
-        self._running = False
-
-    def __del__(self):
-        # type: (...) -> None
-        self.kill()
diff --git a/src/sentry_sdk_alpha/spotlight.py b/src/sentry_sdk_alpha/spotlight.py
deleted file mode 100644
index 6f830acf400be6..00000000000000
--- a/src/sentry_sdk_alpha/spotlight.py
+++ /dev/null
@@ -1,232 +0,0 @@
-import io
-import logging
-import os
-import sys
-import urllib.error
-import urllib.parse
-import urllib.request
-from itertools import chain, product
-from typing import TYPE_CHECKING
-
-import urllib3
-
-if TYPE_CHECKING:
-    from typing import Any
-    from collections.abc import Callable
-    from typing import Dict
-    from typing import Optional
-    from typing import Self
-
-from sentry_sdk_alpha.envelope import Envelope
-from sentry_sdk_alpha.utils import capture_internal_exceptions, env_to_bool
-from sentry_sdk_alpha.utils import logger as sentry_logger
-
-logger = logging.getLogger("spotlight")
-
-
-DEFAULT_SPOTLIGHT_URL = "http://localhost:8969/stream"
-DJANGO_SPOTLIGHT_MIDDLEWARE_PATH = "sentry_sdk.spotlight.SpotlightMiddleware"
-
-
-class SpotlightClient:
-    def __init__(self, url):
-        # type: (str) -> None
-        self.url = url
-        self.http = urllib3.PoolManager()
-        self.fails = 0
-
-    def capture_envelope(self, envelope):
-        # type: (Envelope) -> None
-        body = io.BytesIO()
-        envelope.serialize_into(body)
-        try:
-            req = self.http.request(
-                url=self.url,
-                body=body.getvalue(),
-                method="POST",
-                headers={
-                    "Content-Type": "application/x-sentry-envelope",
-                },
-            )
-            req.close()
-            self.fails = 0
-        except Exception as e:
-            if self.fails < 2:
-                sentry_logger.warning(str(e))
-                self.fails += 1
-            elif self.fails == 2:
-                self.fails += 1
-                sentry_logger.warning(
-                    "Looks like Spotlight is not running, will keep trying to send events but will not log errors."
-                )
-            # omitting self.fails += 1 in the `else:` case intentionally
-            # to avoid overflowing the variable if Spotlight never becomes reachable
-
-
-try:
-    from django.conf import settings
-    from django.http import HttpRequest, HttpResponse, HttpResponseServerError
-    from django.utils.deprecation import MiddlewareMixin
-
-    SPOTLIGHT_JS_ENTRY_PATH = "/assets/main.js"
-    SPOTLIGHT_JS_SNIPPET_PATTERN = (
-        "\n"
-        '\n'
-    )
-    SPOTLIGHT_ERROR_PAGE_SNIPPET = (
-        '\n'
-        '\n'
-    )
-    CHARSET_PREFIX = "charset="
-    BODY_TAG_NAME = "body"
-    BODY_CLOSE_TAG_POSSIBILITIES = tuple(
-        "".format("".join(chars))
-        for chars in product(*zip(BODY_TAG_NAME.upper(), BODY_TAG_NAME.lower()))
-    )
-
-    class SpotlightMiddleware(MiddlewareMixin):  # type: ignore[misc]
-        _spotlight_script = None  # type: Optional[str]
-        _spotlight_url = None  # type: Optional[str]
-
-        def __init__(self, get_response):
-            # type: (Self, Callable[..., HttpResponse]) -> None
-            super().__init__(get_response)
-
-            import sentry_sdk_alpha.api
-
-            self.sentry_sdk = sentry_sdk_alpha.api
-
-            spotlight_client = self.sentry_sdk.get_client().spotlight
-            if spotlight_client is None:
-                sentry_logger.warning(
-                    "Cannot find Spotlight client from SpotlightMiddleware, disabling the middleware."
-                )
-                return None
-            # Spotlight URL has a trailing `/stream` part at the end so split it off
-            self._spotlight_url = urllib.parse.urljoin(spotlight_client.url, "../")
-
-        @property
-        def spotlight_script(self):
-            # type: (Self) -> Optional[str]
-            if self._spotlight_url is not None and self._spotlight_script is None:
-                try:
-                    spotlight_js_url = urllib.parse.urljoin(
-                        self._spotlight_url, SPOTLIGHT_JS_ENTRY_PATH
-                    )
-                    req = urllib.request.Request(
-                        spotlight_js_url,
-                        method="HEAD",
-                    )
-                    urllib.request.urlopen(req)
-                    self._spotlight_script = SPOTLIGHT_JS_SNIPPET_PATTERN.format(
-                        spotlight_url=self._spotlight_url,
-                        spotlight_js_url=spotlight_js_url,
-                    )
-                except urllib.error.URLError as err:
-                    sentry_logger.debug(
-                        "Cannot get Spotlight JS to inject at %s. SpotlightMiddleware will not be very useful.",
-                        spotlight_js_url,
-                        exc_info=err,
-                    )
-
-            return self._spotlight_script
-
-        def process_response(self, _request, response):
-            # type: (Self, HttpRequest, HttpResponse) -> Optional[HttpResponse]
-            content_type_header = tuple(
-                p.strip() for p in response.headers.get("Content-Type", "").lower().split(";")
-            )
-            content_type = content_type_header[0]
-            if len(content_type_header) > 1 and content_type_header[1].startswith(CHARSET_PREFIX):
-                encoding = content_type_header[1][len(CHARSET_PREFIX) :]
-            else:
-                encoding = "utf-8"
-
-            if (
-                self.spotlight_script is not None
-                and not response.streaming
-                and content_type == "text/html"
-            ):
-                content_length = len(response.content)
-                injection = self.spotlight_script.encode(encoding)
-                injection_site = next(
-                    (
-                        idx
-                        for idx in (
-                            response.content.rfind(body_variant.encode(encoding))
-                            for body_variant in BODY_CLOSE_TAG_POSSIBILITIES
-                        )
-                        if idx > -1
-                    ),
-                    content_length,
-                )
-
-                # This approach works even when we don't have a `` tag
-                response.content = (
-                    response.content[:injection_site]
-                    + injection
-                    + response.content[injection_site:]
-                )
-
-                if response.has_header("Content-Length"):
-                    response.headers["Content-Length"] = content_length + len(injection)
-
-            return response
-
-        def process_exception(self, _request, exception):
-            # type: (Self, HttpRequest, Exception) -> Optional[HttpResponseServerError]
-            if not settings.DEBUG or not self._spotlight_url:
-                return None
-
-            try:
-                spotlight = urllib.request.urlopen(self._spotlight_url).read().decode("utf-8")
-            except urllib.error.URLError:
-                return None
-            else:
-                event_id = self.sentry_sdk.capture_exception(exception)
-                return HttpResponseServerError(
-                    spotlight.replace(
-                        "",
-                        SPOTLIGHT_ERROR_PAGE_SNIPPET.format(
-                            spotlight_url=self._spotlight_url, event_id=event_id
-                        ),
-                    )
-                )
-
-except ImportError:
-    settings = None
-
-
-def setup_spotlight(options):
-    # type: (Dict[str, Any]) -> Optional[SpotlightClient]
-    _handler = logging.StreamHandler(sys.stderr)
-    _handler.setFormatter(logging.Formatter(" [spotlight] %(levelname)s: %(message)s"))
-    logger.addHandler(_handler)
-    logger.setLevel(logging.INFO)
-
-    url = options.get("spotlight")
-
-    if url is True:
-        url = DEFAULT_SPOTLIGHT_URL
-
-    if not isinstance(url, str):
-        return None
-
-    with capture_internal_exceptions():
-        if (
-            settings is not None
-            and settings.DEBUG
-            and env_to_bool(os.environ.get("SENTRY_SPOTLIGHT_ON_ERROR", "1"))
-            and env_to_bool(os.environ.get("SENTRY_SPOTLIGHT_MIDDLEWARE", "1"))
-        ):
-            middleware = settings.MIDDLEWARE
-            if DJANGO_SPOTLIGHT_MIDDLEWARE_PATH not in middleware:
-                settings.MIDDLEWARE = type(middleware)(
-                    chain(middleware, (DJANGO_SPOTLIGHT_MIDDLEWARE_PATH,))
-                )
-                logger.info("Enabled Spotlight integration for Django")
-
-    client = SpotlightClient(url)
-    logger.info("Enabled Spotlight using sidecar at %s", url)
-
-    return client
diff --git a/src/sentry_sdk_alpha/tracing.py b/src/sentry_sdk_alpha/tracing.py
deleted file mode 100644
index 6fd621fc26c737..00000000000000
--- a/src/sentry_sdk_alpha/tracing.py
+++ /dev/null
@@ -1,606 +0,0 @@
-import json
-import warnings
-from datetime import datetime
-from typing import TYPE_CHECKING, cast
-
-from opentelemetry import context
-from opentelemetry import trace as otel_trace
-from opentelemetry.sdk.trace import ReadableSpan
-from opentelemetry.trace import INVALID_SPAN
-from opentelemetry.trace import Span as OtelSpan
-from opentelemetry.trace import TraceState, format_span_id, format_trace_id, get_current_span
-from opentelemetry.trace.status import Status, StatusCode
-from opentelemetry.version import __version__ as otel_version
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    DEFAULT_SPAN_NAME,
-    DEFAULT_SPAN_ORIGIN,
-    SENTRY_TRACE_HEADER_NAME,
-    SPANDATA,
-    SPANSTATUS,
-    TransactionSource,
-)
-from sentry_sdk_alpha.opentelemetry.consts import TRACESTATE_SAMPLE_RATE_KEY, SentrySpanAttribute
-from sentry_sdk_alpha.opentelemetry.utils import (
-    baggage_from_trace_state,
-    convert_from_otel_timestamp,
-    convert_to_otel_timestamp,
-    get_sentry_meta,
-    get_trace_context,
-    get_trace_state,
-    serialize_trace_state,
-)
-from sentry_sdk_alpha.tracing_utils import get_span_status_from_http_code
-from sentry_sdk_alpha.utils import (
-    _serialize_span_attribute,
-    get_current_thread_meta,
-    parse_version,
-    should_be_treated_as_error,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterator
-    from typing import Any, Dict, Optional, ParamSpec, Tuple, TypeVar, Union, overload
-
-    P = ParamSpec("P")
-    R = TypeVar("R")
-
-    from sentry_sdk_alpha._types import SamplingContext
-    from sentry_sdk_alpha.tracing_utils import Baggage
-
-_FLAGS_CAPACITY = 10
-_OTEL_VERSION = parse_version(otel_version)
-
-tracer = otel_trace.get_tracer(__name__)
-
-
-class NoOpSpan:
-    def __init__(self, **kwargs):
-        # type: (Any) -> None
-        pass
-
-    def __repr__(self):
-        # type: () -> str
-        return "<%s>" % self.__class__.__name__
-
-    @property
-    def root_span(self):
-        # type: () -> Optional[Span]
-        return None
-
-    def start_child(self, **kwargs):
-        # type: (**Any) -> NoOpSpan
-        return NoOpSpan()
-
-    def to_traceparent(self):
-        # type: () -> str
-        return ""
-
-    def to_baggage(self):
-        # type: () -> Optional[Baggage]
-        return None
-
-    def get_baggage(self):
-        # type: () -> Optional[Baggage]
-        return None
-
-    def iter_headers(self):
-        # type: () -> Iterator[Tuple[str, str]]
-        return iter(())
-
-    def set_tag(self, key, value):
-        # type: (str, Any) -> None
-        pass
-
-    def set_data(self, key, value):
-        # type: (str, Any) -> None
-        pass
-
-    def set_status(self, value):
-        # type: (str) -> None
-        pass
-
-    def set_http_status(self, http_status):
-        # type: (int) -> None
-        pass
-
-    def is_success(self):
-        # type: () -> bool
-        return True
-
-    def to_json(self):
-        # type: () -> Dict[str, Any]
-        return {}
-
-    def get_trace_context(self):
-        # type: () -> Any
-        return {}
-
-    def get_profile_context(self):
-        # type: () -> Any
-        return {}
-
-    def finish(
-        self,
-        end_timestamp=None,  # type: Optional[Union[float, datetime]]
-    ):
-        # type: (...) -> None
-        pass
-
-    def set_context(self, key, value):
-        # type: (str, dict[str, Any]) -> None
-        pass
-
-    def init_span_recorder(self, maxlen):
-        # type: (int) -> None
-        pass
-
-    def _set_initial_sampling_decision(self, sampling_context):
-        # type: (SamplingContext) -> None
-        pass
-
-
-class Span:
-    """
-    OTel span wrapper providing compatibility with the old span interface.
-    """
-
-    def __init__(
-        self,
-        *,
-        op=None,  # type: Optional[str]
-        description=None,  # type: Optional[str]
-        status=None,  # type: Optional[str]
-        sampled=None,  # type: Optional[bool]
-        start_timestamp=None,  # type: Optional[Union[datetime, float]]
-        origin=None,  # type: Optional[str]
-        name=None,  # type: Optional[str]
-        source=TransactionSource.CUSTOM,  # type: str
-        attributes=None,  # type: Optional[dict[str, Any]]
-        only_if_parent=False,  # type: bool
-        parent_span=None,  # type: Optional[Span]
-        otel_span=None,  # type: Optional[OtelSpan]
-        span=None,  # type: Optional[Span]
-    ):
-        # type: (...) -> None
-        """
-        If otel_span is passed explicitly, just acts as a proxy.
-
-        If span is passed explicitly, use it. The only purpose of this param
-        if backwards compatibility with start_transaction(transaction=...).
-
-        If only_if_parent is True, just return an INVALID_SPAN
-        and avoid instrumentation if there's no active parent span.
-        """
-        if otel_span is not None:
-            self._otel_span = otel_span
-        elif span is not None:
-            self._otel_span = span._otel_span
-        else:
-            skip_span = False
-            if only_if_parent and parent_span is None:
-                parent_span_context = get_current_span().get_span_context()
-                skip_span = not parent_span_context.is_valid or parent_span_context.is_remote
-
-            if skip_span:
-                self._otel_span = INVALID_SPAN
-            else:
-
-                if start_timestamp is not None:
-                    # OTel timestamps have nanosecond precision
-                    start_timestamp = convert_to_otel_timestamp(start_timestamp)
-
-                span_name = name or description or op or DEFAULT_SPAN_NAME
-
-                # Prepopulate some attrs so that they're accessible in traces_sampler
-                attributes = attributes or {}
-                if op is not None:
-                    attributes[SentrySpanAttribute.OP] = op
-                if source is not None:
-                    attributes[SentrySpanAttribute.SOURCE] = source
-                if description is not None:
-                    attributes[SentrySpanAttribute.DESCRIPTION] = description
-                if sampled is not None:
-                    attributes[SentrySpanAttribute.CUSTOM_SAMPLED] = sampled
-
-                parent_context = None
-                if parent_span is not None:
-                    parent_context = otel_trace.set_span_in_context(parent_span._otel_span)
-
-                self._otel_span = tracer.start_span(
-                    span_name,
-                    context=parent_context,
-                    start_time=start_timestamp,
-                    attributes=attributes,
-                )
-
-                self.origin = origin or DEFAULT_SPAN_ORIGIN
-                self.description = description
-                self.name = span_name
-
-                if status is not None:
-                    self.set_status(status)
-
-                self.update_active_thread()
-
-    def __eq__(self, other):
-        # type: (object) -> bool
-        if not isinstance(other, Span):
-            return False
-        return self._otel_span == other._otel_span
-
-    def __repr__(self):
-        # type: () -> str
-        return (
-            "<%s(op=%r, name:%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, origin=%r)>"
-            % (
-                self.__class__.__name__,
-                self.op,
-                self.name,
-                self.trace_id,
-                self.span_id,
-                self.parent_span_id,
-                self.sampled,
-                self.origin,
-            )
-        )
-
-    def __enter__(self):
-        # type: () -> Span
-        # XXX use_span? https://github.com/open-telemetry/opentelemetry-python/blob/3836da8543ce9751051e38a110c0468724042e62/opentelemetry-api/src/opentelemetry/trace/__init__.py#L547
-        #
-        # create a Context object with parent set as current span
-        ctx = otel_trace.set_span_in_context(self._otel_span)
-        # set as the implicit current context
-        self._ctx_token = context.attach(ctx)
-
-        # get the new scope that was forked on context.attach
-        self.scope = sentry_sdk_alpha.get_current_scope()
-        self.scope.span = self
-
-        return self
-
-    def __exit__(self, ty, value, tb):
-        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
-        if value is not None and should_be_treated_as_error(ty, value):
-            self.set_status(SPANSTATUS.INTERNAL_ERROR)
-        else:
-            status_unset = (
-                hasattr(self._otel_span, "status")
-                and self._otel_span.status.status_code == StatusCode.UNSET
-            )
-            if status_unset:
-                self.set_status(SPANSTATUS.OK)
-
-        self.finish()
-        context.detach(self._ctx_token)
-        del self._ctx_token
-
-    @property
-    def description(self):
-        # type: () -> Optional[str]
-        return self.get_attribute(SentrySpanAttribute.DESCRIPTION)
-
-    @description.setter
-    def description(self, value):
-        # type: (Optional[str]) -> None
-        self.set_attribute(SentrySpanAttribute.DESCRIPTION, value)
-
-    @property
-    def origin(self):
-        # type: () -> Optional[str]
-        return self.get_attribute(SentrySpanAttribute.ORIGIN)
-
-    @origin.setter
-    def origin(self, value):
-        # type: (Optional[str]) -> None
-        self.set_attribute(SentrySpanAttribute.ORIGIN, value)
-
-    @property
-    def root_span(self):
-        # type: () -> Optional[Span]
-        root_otel_span = cast("Optional[OtelSpan]", get_sentry_meta(self._otel_span, "root_span"))
-        return Span(otel_span=root_otel_span) if root_otel_span else None
-
-    @property
-    def is_root_span(self):
-        # type: () -> bool
-        return self.root_span == self
-
-    @property
-    def parent_span_id(self):
-        # type: () -> Optional[str]
-        if not isinstance(self._otel_span, ReadableSpan) or self._otel_span.parent is None:
-            return None
-        return format_span_id(self._otel_span.parent.span_id)
-
-    @property
-    def trace_id(self):
-        # type: () -> str
-        return format_trace_id(self._otel_span.get_span_context().trace_id)
-
-    @property
-    def span_id(self):
-        # type: () -> str
-        return format_span_id(self._otel_span.get_span_context().span_id)
-
-    @property
-    def is_valid(self):
-        # type: () -> bool
-        return self._otel_span.get_span_context().is_valid and isinstance(
-            self._otel_span, ReadableSpan
-        )
-
-    @property
-    def sampled(self):
-        # type: () -> Optional[bool]
-        return self._otel_span.get_span_context().trace_flags.sampled
-
-    @property
-    def sample_rate(self):
-        # type: () -> Optional[float]
-        sample_rate = self._otel_span.get_span_context().trace_state.get(TRACESTATE_SAMPLE_RATE_KEY)
-        return float(sample_rate) if sample_rate is not None else None
-
-    @property
-    def op(self):
-        # type: () -> Optional[str]
-        return self.get_attribute(SentrySpanAttribute.OP)
-
-    @op.setter
-    def op(self, value):
-        # type: (Optional[str]) -> None
-        self.set_attribute(SentrySpanAttribute.OP, value)
-
-    @property
-    def name(self):
-        # type: () -> Optional[str]
-        return self.get_attribute(SentrySpanAttribute.NAME)
-
-    @name.setter
-    def name(self, value):
-        # type: (Optional[str]) -> None
-        self.set_attribute(SentrySpanAttribute.NAME, value)
-
-    @property
-    def source(self):
-        # type: () -> str
-        return self.get_attribute(SentrySpanAttribute.SOURCE) or TransactionSource.CUSTOM
-
-    @source.setter
-    def source(self, value):
-        # type: (str) -> None
-        self.set_attribute(SentrySpanAttribute.SOURCE, value)
-
-    @property
-    def start_timestamp(self):
-        # type: () -> Optional[datetime]
-        if not isinstance(self._otel_span, ReadableSpan):
-            return None
-
-        start_time = self._otel_span.start_time
-        if start_time is None:
-            return None
-
-        return convert_from_otel_timestamp(start_time)
-
-    @property
-    def timestamp(self):
-        # type: () -> Optional[datetime]
-        if not isinstance(self._otel_span, ReadableSpan):
-            return None
-
-        end_time = self._otel_span.end_time
-        if end_time is None:
-            return None
-
-        return convert_from_otel_timestamp(end_time)
-
-    def start_child(self, **kwargs):
-        # type: (**Any) -> Span
-        return Span(parent_span=self, **kwargs)
-
-    def iter_headers(self):
-        # type: () -> Iterator[Tuple[str, str]]
-        yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent()
-        yield BAGGAGE_HEADER_NAME, serialize_trace_state(self.trace_state)
-
-    def to_traceparent(self):
-        # type: () -> str
-        if self.sampled is True:
-            sampled = "1"
-        elif self.sampled is False:
-            sampled = "0"
-        else:
-            sampled = None
-
-        traceparent = f"{self.trace_id}-{self.span_id}"
-        if sampled is not None:
-            traceparent += f"-{sampled}"
-
-        return traceparent
-
-    @property
-    def trace_state(self):
-        # type: () -> TraceState
-        return get_trace_state(self._otel_span)
-
-    def to_baggage(self):
-        # type: () -> Baggage
-        return self.get_baggage()
-
-    def get_baggage(self):
-        # type: () -> Baggage
-        return baggage_from_trace_state(self.trace_state)
-
-    def set_tag(self, key, value):
-        # type: (str, Any) -> None
-        self.set_attribute(f"{SentrySpanAttribute.TAG}.{key}", value)
-
-    def set_data(self, key, value):
-        # type: (str, Any) -> None
-        warnings.warn(
-            "`Span.set_data` is deprecated. Please use `Span.set_attribute` instead.",
-            DeprecationWarning,
-            stacklevel=2,
-        )
-
-        # TODO-neel-potel we cannot add dicts here
-        self.set_attribute(key, value)
-
-    def get_attribute(self, name):
-        # type: (str) -> Optional[Any]
-        if not isinstance(self._otel_span, ReadableSpan) or not self._otel_span.attributes:
-            return None
-        return self._otel_span.attributes.get(name)
-
-    def set_attribute(self, key, value):
-        # type: (str, Any) -> None
-        # otel doesn't support None as values, preferring to not set the key
-        # at all instead
-        if value is None:
-            return
-        serialized_value = _serialize_span_attribute(value)
-        if serialized_value is None:
-            return
-
-        self._otel_span.set_attribute(key, serialized_value)
-
-    @property
-    def status(self):
-        # type: () -> Optional[str]
-        """
-        Return the Sentry `SPANSTATUS` corresponding to the underlying OTel status.
-        Because differences in possible values in OTel `StatusCode` and
-        Sentry `SPANSTATUS` it can not be guaranteed that the status
-        set in `set_status()` will be the same as the one returned here.
-        """
-        if not isinstance(self._otel_span, ReadableSpan):
-            return None
-
-        if self._otel_span.status.status_code == StatusCode.UNSET:
-            return None
-        elif self._otel_span.status.status_code == StatusCode.OK:
-            return SPANSTATUS.OK
-        else:
-            return SPANSTATUS.UNKNOWN_ERROR
-
-    def set_status(self, status):
-        # type: (str) -> None
-        if status == SPANSTATUS.OK:
-            otel_status = StatusCode.OK
-            otel_description = None
-        else:
-            otel_status = StatusCode.ERROR
-            otel_description = status
-
-        if _OTEL_VERSION is None or _OTEL_VERSION >= (1, 12, 0):
-            self._otel_span.set_status(otel_status, otel_description)
-        else:
-            self._otel_span.set_status(Status(otel_status, otel_description))
-
-    def set_thread(self, thread_id, thread_name):
-        # type: (Optional[int], Optional[str]) -> None
-        if thread_id is not None:
-            self.set_attribute(SPANDATA.THREAD_ID, str(thread_id))
-
-            if thread_name is not None:
-                self.set_attribute(SPANDATA.THREAD_NAME, thread_name)
-
-    def update_active_thread(self):
-        # type: () -> None
-        thread_id, thread_name = get_current_thread_meta()
-        self.set_thread(thread_id, thread_name)
-
-    def set_http_status(self, http_status):
-        # type: (int) -> None
-        self.set_attribute(SPANDATA.HTTP_STATUS_CODE, http_status)
-        self.set_status(get_span_status_from_http_code(http_status))
-
-    def is_success(self):
-        # type: () -> bool
-        return self.status == SPANSTATUS.OK
-
-    def finish(self, end_timestamp=None):
-        # type: (Optional[Union[float, datetime]]) -> None
-        if end_timestamp is not None:
-            self._otel_span.end(convert_to_otel_timestamp(end_timestamp))
-        else:
-            self._otel_span.end()
-
-    def to_json(self):
-        # type: () -> dict[str, Any]
-        """
-        Only meant for testing. Not used internally anymore.
-        """
-        if not isinstance(self._otel_span, ReadableSpan):
-            return {}
-        return json.loads(self._otel_span.to_json())
-
-    def get_trace_context(self):
-        # type: () -> dict[str, Any]
-        if not isinstance(self._otel_span, ReadableSpan):
-            return {}
-
-        return get_trace_context(self._otel_span)
-
-    def set_context(self, key, value):
-        # type: (str, Any) -> None
-        # TODO-neel-potel we cannot add dicts here
-
-        self.set_attribute(f"{SentrySpanAttribute.CONTEXT}.{key}", value)
-
-    def set_flag(self, flag, value):
-        # type: (str, bool) -> None
-        flag_count = self.get_attribute("_flag.count") or 0
-        if flag_count < _FLAGS_CAPACITY:
-            self.set_attribute(f"flag.evaluation.{flag}", value)
-            self.set_attribute("_flag.count", flag_count + 1)
-
-
-# TODO-neel-potel add deprecation
-Transaction = Span
-
-
-if TYPE_CHECKING:
-
-    @overload
-    def trace(func=None):
-        # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]]
-        pass
-
-    @overload
-    def trace(func):
-        # type: (Callable[P, R]) -> Callable[P, R]
-        pass
-
-
-def trace(func=None):
-    # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]]
-    """
-    Decorator to start a child span under the existing current transaction.
-    If there is no current transaction, then nothing will be traced.
-
-    .. code-block::
-        :caption: Usage
-
-        import sentry_sdk
-
-        @sentry_sdk.trace
-        def my_function():
-            ...
-
-        @sentry_sdk.trace
-        async def my_async_function():
-            ...
-    """
-    from sentry_sdk_alpha.tracing_utils import start_child_span_decorator
-
-    # This patterns allows usage of both @sentry_traced and @sentry_traced(...)
-    # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278
-    if func:
-        return start_child_span_decorator(func)
-    else:
-        return start_child_span_decorator
diff --git a/src/sentry_sdk_alpha/tracing_utils.py b/src/sentry_sdk_alpha/tracing_utils.py
deleted file mode 100644
index ec317d7675be18..00000000000000
--- a/src/sentry_sdk_alpha/tracing_utils.py
+++ /dev/null
@@ -1,854 +0,0 @@
-import contextlib
-import decimal
-import inspect
-import os
-import re
-import sys
-import uuid
-from collections.abc import Mapping
-from datetime import datetime, timedelta, timezone
-from decimal import ROUND_DOWN, Decimal, DefaultContext, localcontext
-from functools import wraps
-from random import Random
-from typing import TYPE_CHECKING
-from urllib.parse import quote, unquote
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha.consts import (
-    BAGGAGE_HEADER_NAME,
-    OP,
-    SENTRY_TRACE_HEADER_NAME,
-    SPANDATA,
-    SPANSTATUS,
-)
-from sentry_sdk_alpha.utils import (
-    Dsn,
-    _is_external_source,
-    _is_in_project_root,
-    _module_in_list,
-    capture_internal_exceptions,
-    filename_for_module,
-    is_sentry_url,
-    logger,
-    match_regex_list,
-    qualname_from_function,
-    to_string,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Generator
-    from types import FrameType
-    from typing import Any, Dict, Optional, Union
-
-
-SENTRY_TRACE_REGEX = re.compile(
-    "^[ \t]*"  # whitespace
-    "([0-9a-f]{32})?"  # trace_id
-    "-?([0-9a-f]{16})?"  # span_id
-    "-?([01])?"  # sampled
-    "[ \t]*$"  # whitespace
-)
-
-
-# This is a normal base64 regex, modified to reflect that fact that we strip the
-# trailing = or == off
-base64_stripped = (
-    # any of the characters in the base64 "alphabet", in multiples of 4
-    "([a-zA-Z0-9+/]{4})*"
-    # either nothing or 2 or 3 base64-alphabet characters (see
-    # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding for
-    # why there's never only 1 extra character)
-    "([a-zA-Z0-9+/]{2,3})?"
-)
-
-
-class EnvironHeaders(Mapping):  # type: ignore
-    def __init__(
-        self,
-        environ,  # type: Mapping[str, str]
-        prefix="HTTP_",  # type: str
-    ):
-        # type: (...) -> None
-        self.environ = environ
-        self.prefix = prefix
-
-    def __getitem__(self, key):
-        # type: (str) -> Optional[Any]
-        return self.environ[self.prefix + key.replace("-", "_").upper()]
-
-    def __len__(self):
-        # type: () -> int
-        return sum(1 for _ in iter(self))
-
-    def __iter__(self):
-        # type: () -> Generator[str, None, None]
-        for k in self.environ:
-            if not isinstance(k, str):
-                continue
-
-            k = k.replace("-", "_").upper()
-            if not k.startswith(self.prefix):
-                continue
-
-            yield k[len(self.prefix) :]
-
-
-def has_tracing_enabled(options):
-    # type: (Optional[Dict[str, Any]]) -> bool
-    """
-    Returns True if either traces_sample_rate or traces_sampler is
-    defined.
-    """
-    if options is None:
-        return False
-
-    return bool(
-        options.get("traces_sample_rate") is not None or options.get("traces_sampler") is not None
-    )
-
-
-@contextlib.contextmanager
-def record_sql_queries(
-    cursor,  # type: Any
-    query,  # type: Any
-    params_list,  # type:  Any
-    paramstyle,  # type: Optional[str]
-    executemany,  # type: bool
-    record_cursor_repr=False,  # type: bool
-    span_origin=None,  # type: Optional[str]
-):
-    # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None]
-
-    # TODO: Bring back capturing of params by default
-    if sentry_sdk_alpha.get_client().options["_experiments"].get("record_sql_params", False):
-        if not params_list or params_list == [None]:
-            params_list = None
-
-        if paramstyle == "pyformat":
-            paramstyle = "format"
-    else:
-        params_list = None
-        paramstyle = None
-
-    query = _format_sql(cursor, query)
-
-    data = {}
-    if params_list is not None:
-        data["db.params"] = params_list
-    if paramstyle is not None:
-        data["db.paramstyle"] = paramstyle
-    if executemany:
-        data["db.executemany"] = True
-    if record_cursor_repr and cursor is not None:
-        data["db.cursor"] = cursor
-
-    with capture_internal_exceptions():
-        sentry_sdk_alpha.add_breadcrumb(message=query, category="query", data=data)
-
-    with sentry_sdk_alpha.start_span(
-        op=OP.DB,
-        name=query,
-        origin=span_origin,
-        only_if_parent=True,
-    ) as span:
-        for k, v in data.items():
-            span.set_attribute(k, v)
-        yield span
-
-
-def _get_frame_module_abs_path(frame):
-    # type: (FrameType) -> Optional[str]
-    try:
-        return frame.f_code.co_filename
-    except Exception:
-        return None
-
-
-def _should_be_included(
-    is_sentry_sdk_frame,  # type: bool
-    namespace,  # type: Optional[str]
-    in_app_include,  # type: Optional[list[str]]
-    in_app_exclude,  # type: Optional[list[str]]
-    abs_path,  # type: Optional[str]
-    project_root,  # type: Optional[str]
-):
-    # type: (...) -> bool
-    # in_app_include takes precedence over in_app_exclude
-    should_be_included = _module_in_list(namespace, in_app_include)
-    should_be_excluded = _is_external_source(abs_path) or _module_in_list(namespace, in_app_exclude)
-    return not is_sentry_sdk_frame and (
-        should_be_included
-        or (_is_in_project_root(abs_path, project_root) and not should_be_excluded)
-    )
-
-
-def add_query_source(span):
-    # type: (sentry_sdk.tracing.Span) -> None
-    """
-    Adds OTel compatible source code information to the span
-    """
-    client = sentry_sdk_alpha.get_client()
-    if not client.is_active():
-        return
-
-    if span.start_timestamp is None:
-        return
-
-    should_add_query_source = client.options.get("enable_db_query_source", True)
-    if not should_add_query_source:
-        return
-
-    # We assume here that the query is just ending now. We can't use
-    # the actual end timestamp of the span because in OTel the span
-    # can't be finished in order to set any attributes on it.
-    duration = datetime.now(tz=timezone.utc) - span.start_timestamp
-    threshold = client.options.get("db_query_source_threshold_ms", 0)
-    slow_query = duration / timedelta(milliseconds=1) > threshold
-
-    if not slow_query:
-        return
-
-    project_root = client.options["project_root"]
-    in_app_include = client.options.get("in_app_include")
-    in_app_exclude = client.options.get("in_app_exclude")
-
-    # Find the correct frame
-    frame = sys._getframe()  # type: Union[FrameType, None]
-    while frame is not None:
-        abs_path = _get_frame_module_abs_path(frame)
-
-        try:
-            namespace = frame.f_globals.get("__name__")  # type: Optional[str]
-        except Exception:
-            namespace = None
-
-        is_sentry_sdk_frame = namespace is not None and namespace.startswith("sentry_sdk.")
-
-        should_be_included = _should_be_included(
-            is_sentry_sdk_frame=is_sentry_sdk_frame,
-            namespace=namespace,
-            in_app_include=in_app_include,
-            in_app_exclude=in_app_exclude,
-            abs_path=abs_path,
-            project_root=project_root,
-        )
-        if should_be_included:
-            break
-
-        frame = frame.f_back
-    else:
-        frame = None
-
-    # Set the data
-    if frame is not None:
-        try:
-            lineno = frame.f_lineno
-        except Exception:
-            lineno = None
-        if lineno is not None:
-            span.set_attribute(SPANDATA.CODE_LINENO, frame.f_lineno)
-
-        try:
-            namespace = frame.f_globals.get("__name__")
-        except Exception:
-            namespace = None
-        if namespace is not None:
-            span.set_attribute(SPANDATA.CODE_NAMESPACE, namespace)
-
-        filepath = _get_frame_module_abs_path(frame)
-        if filepath is not None:
-            if namespace is not None:
-                in_app_path = filename_for_module(namespace, filepath)
-            elif project_root is not None and filepath.startswith(project_root):
-                in_app_path = filepath.replace(project_root, "").lstrip(os.sep)
-            else:
-                in_app_path = filepath
-            span.set_attribute(SPANDATA.CODE_FILEPATH, in_app_path)
-
-        try:
-            code_function = frame.f_code.co_name
-        except Exception:
-            code_function = None
-
-        if code_function is not None:
-            span.set_attribute(SPANDATA.CODE_FUNCTION, frame.f_code.co_name)
-
-
-def extract_sentrytrace_data(header):
-    # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]]
-    """
-    Given a `sentry-trace` header string, return a dictionary of data.
-    """
-    if not header:
-        return None
-
-    if header.startswith("00-") and header.endswith("-00"):
-        header = header[3:-3]
-
-    match = SENTRY_TRACE_REGEX.match(header)
-    if not match:
-        return None
-
-    trace_id, parent_span_id, sampled_str = match.groups()
-    parent_sampled = None
-
-    if trace_id:
-        trace_id = f"{int(trace_id, 16):032x}"
-    if parent_span_id:
-        parent_span_id = f"{int(parent_span_id, 16):016x}"
-    if sampled_str:
-        parent_sampled = sampled_str != "0"
-
-    return {
-        "trace_id": trace_id,
-        "parent_span_id": parent_span_id,
-        "parent_sampled": parent_sampled,
-    }
-
-
-def _format_sql(cursor, sql):
-    # type: (Any, str) -> Optional[str]
-
-    real_sql = None
-
-    # If we're using psycopg2, it could be that we're
-    # looking at a query that uses Composed objects. Use psycopg2's mogrify
-    # function to format the query. We lose per-parameter trimming but gain
-    # accuracy in formatting.
-    try:
-        if hasattr(cursor, "mogrify"):
-            real_sql = cursor.mogrify(sql)
-            if isinstance(real_sql, bytes):
-                real_sql = real_sql.decode(cursor.connection.encoding)
-    except Exception:
-        real_sql = None
-
-    return real_sql or to_string(sql)
-
-
-class PropagationContext:
-    """
-    The PropagationContext represents the data of a trace in Sentry.
-    """
-
-    __slots__ = (
-        "_trace_id",
-        "_span_id",
-        "parent_span_id",
-        "parent_sampled",
-        "baggage",
-    )
-
-    def __init__(
-        self,
-        trace_id=None,  # type: Optional[str]
-        span_id=None,  # type: Optional[str]
-        parent_span_id=None,  # type: Optional[str]
-        parent_sampled=None,  # type: Optional[bool]
-        baggage=None,  # type: Optional[Baggage]
-    ):
-        # type: (...) -> None
-        self._trace_id = trace_id
-        """The trace id of the Sentry trace."""
-
-        self._span_id = span_id
-        """The span id of the currently executing span."""
-
-        self.parent_span_id = parent_span_id
-        """The id of the parent span that started this span.
-        The parent span could also be a span in an upstream service."""
-
-        self.parent_sampled = parent_sampled
-        """Boolean indicator if the parent span was sampled.
-        Important when the parent span originated in an upstream service,
-        because we want to sample the whole trace, or nothing from the trace."""
-
-        self.baggage = baggage
-        """Baggage object used for dynamic sampling decisions."""
-
-    @property
-    def dynamic_sampling_context(self):
-        # type: () -> Optional[Dict[str, str]]
-        return self.baggage.dynamic_sampling_context() if self.baggage else None
-
-    @classmethod
-    def from_incoming_data(cls, incoming_data):
-        # type: (Dict[str, Any]) -> Optional[PropagationContext]
-        propagation_context = None
-
-        normalized_data = normalize_incoming_data(incoming_data)
-        baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME)
-        if baggage_header:
-            propagation_context = PropagationContext()
-            propagation_context.baggage = Baggage.from_incoming_header(baggage_header)
-
-        sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME)
-        if sentry_trace_header:
-            sentrytrace_data = extract_sentrytrace_data(sentry_trace_header)
-            if sentrytrace_data is not None:
-                if propagation_context is None:
-                    propagation_context = PropagationContext()
-                propagation_context.update(sentrytrace_data)
-
-        if propagation_context is not None:
-            propagation_context._fill_sample_rand()
-
-        return propagation_context
-
-    @property
-    def trace_id(self):
-        # type: () -> str
-        """The trace id of the Sentry trace."""
-        if not self._trace_id:
-            self._trace_id = uuid.uuid4().hex
-
-        return self._trace_id
-
-    @trace_id.setter
-    def trace_id(self, value):
-        # type: (str) -> None
-        self._trace_id = value
-
-    @property
-    def span_id(self):
-        # type: () -> str
-        """The span id of the currently executed span."""
-        if not self._span_id:
-            self._span_id = uuid.uuid4().hex[16:]
-
-        return self._span_id
-
-    @span_id.setter
-    def span_id(self, value):
-        # type: (str) -> None
-        self._span_id = value
-
-    def to_traceparent(self):
-        # type: () -> str
-        if self.parent_sampled is True:
-            sampled = "1"
-        elif self.parent_sampled is False:
-            sampled = "0"
-        else:
-            sampled = None
-
-        traceparent = f"{self.trace_id}-{self.span_id}"
-        if sampled is not None:
-            traceparent += f"-{sampled}"
-
-        return traceparent
-
-    def update(self, other_dict):
-        # type: (Dict[str, Any]) -> None
-        """
-        Updates the PropagationContext with data from the given dictionary.
-        """
-        for key, value in other_dict.items():
-            try:
-                setattr(self, key, value)
-            except AttributeError:
-                pass
-
-    def _fill_sample_rand(self):
-        # type: () -> None
-        """
-        Ensure that there is a valid sample_rand value in the baggage.
-
-        If there is a valid sample_rand value in the baggage, we keep it.
-        Otherwise, we generate a sample_rand value according to the following:
-
-          - If we have a parent_sampled value and a sample_rate in the DSC, we compute
-            a sample_rand value randomly in the range:
-                - [0, sample_rate) if parent_sampled is True,
-                - or, in the range [sample_rate, 1) if parent_sampled is False.
-
-          - If either parent_sampled or sample_rate is missing, we generate a random
-            value in the range [0, 1).
-
-        The sample_rand is deterministically generated from the trace_id, if present.
-
-        This function does nothing if there is no dynamic_sampling_context.
-        """
-        if self.dynamic_sampling_context is None or self.baggage is None:
-            return
-
-        sentry_baggage = self.baggage.sentry_items
-
-        sample_rand = None
-        if sentry_baggage.get("sample_rand"):
-            try:
-                sample_rand = Decimal(sentry_baggage["sample_rand"])
-            except Exception:
-                logger.debug(f"Failed to convert incoming sample_rand to Decimal: {sample_rand}")
-
-        if sample_rand is not None and 0 <= sample_rand < 1:
-            # sample_rand is present and valid, so don't overwrite it
-            return
-
-        sample_rate = None
-        if sentry_baggage.get("sample_rate"):
-            try:
-                sample_rate = float(sentry_baggage["sample_rate"])
-            except Exception:
-                logger.debug(f"Failed to convert incoming sample_rate to float: {sample_rate}")
-
-        lower, upper = _sample_rand_range(self.parent_sampled, sample_rate)
-
-        try:
-            sample_rand = _generate_sample_rand(self.trace_id, interval=(lower, upper))
-        except ValueError:
-            # ValueError is raised if the interval is invalid, i.e. lower >= upper.
-            # lower >= upper might happen if the incoming trace's sampled flag
-            # and sample_rate are inconsistent, e.g. sample_rate=0.0 but sampled=True.
-            # We cannot generate a sensible sample_rand value in this case.
-            logger.debug(
-                f"Could not backfill sample_rand, since parent_sampled={self.parent_sampled} "
-                f"and sample_rate={sample_rate}."
-            )
-            return
-
-        self.baggage.sentry_items["sample_rand"] = f"{sample_rand:.6f}"  # noqa: E231
-
-    def _sample_rand(self):
-        # type: () -> Optional[str]
-        """Convenience method to get the sample_rand value from the baggage."""
-        if self.baggage is None:
-            return None
-
-        return self.baggage.sentry_items.get("sample_rand")
-
-    def __repr__(self):
-        # type: (...) -> str
-        return "".format(
-            self._trace_id,
-            self._span_id,
-            self.parent_span_id,
-            self.parent_sampled,
-            self.baggage,
-            self.dynamic_sampling_context,
-        )
-
-
-class Baggage:
-    """
-    The W3C Baggage header information (see https://www.w3.org/TR/baggage/).
-
-    Before mutating a `Baggage` object, calling code must check that `mutable` is `True`.
-    Mutating a `Baggage` object that has `mutable` set to `False` is not allowed, but
-    it is the caller's responsibility to enforce this restriction.
-    """
-
-    __slots__ = ("sentry_items", "third_party_items", "mutable")
-
-    SENTRY_PREFIX = "sentry-"
-    SENTRY_PREFIX_REGEX = re.compile("^sentry-")
-
-    def __init__(
-        self,
-        sentry_items,  # type: Dict[str, str]
-        third_party_items="",  # type: str
-        mutable=True,  # type: bool
-    ):
-        self.sentry_items = sentry_items
-        self.third_party_items = third_party_items
-        self.mutable = mutable
-
-    @classmethod
-    def from_incoming_header(
-        cls,
-        header,  # type: Optional[str]
-    ):
-        # type: (...) -> Baggage
-        """
-        freeze if incoming header already has sentry baggage
-        """
-        sentry_items = {}
-        third_party_items = ""
-        mutable = True
-
-        if header:
-            for item in header.split(","):
-                if "=" not in item:
-                    continue
-
-                with capture_internal_exceptions():
-                    item = item.strip()
-                    key, val = item.split("=")
-                    if Baggage.SENTRY_PREFIX_REGEX.match(key):
-                        baggage_key = unquote(key.split("-")[1])
-                        sentry_items[baggage_key] = unquote(val)
-                        mutable = False
-                    else:
-                        third_party_items += ("," if third_party_items else "") + item
-
-        return Baggage(sentry_items, third_party_items, mutable)
-
-    @classmethod
-    def from_options(cls, scope):
-        # type: (sentry_sdk.scope.Scope) -> Optional[Baggage]
-
-        sentry_items = {}  # type: Dict[str, str]
-        third_party_items = ""
-        mutable = False
-
-        client = sentry_sdk_alpha.get_client()
-
-        if not client.is_active() or scope._propagation_context is None:
-            return Baggage(sentry_items)
-
-        options = client.options
-        propagation_context = scope._propagation_context
-
-        if propagation_context is not None:
-            sentry_items["trace_id"] = propagation_context.trace_id
-
-        if options.get("environment"):
-            sentry_items["environment"] = options["environment"]
-
-        if options.get("release"):
-            sentry_items["release"] = options["release"]
-
-        if options.get("dsn"):
-            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
-
-        if options.get("traces_sample_rate"):
-            sentry_items["sample_rate"] = str(options["traces_sample_rate"])
-
-        return Baggage(sentry_items, third_party_items, mutable)
-
-    def freeze(self):
-        # type: () -> None
-        self.mutable = False
-
-    def dynamic_sampling_context(self):
-        # type: () -> Dict[str, str]
-        header = {}
-
-        for key, item in self.sentry_items.items():
-            header[key] = item
-
-        return header
-
-    def serialize(self, include_third_party=False):
-        # type: (bool) -> str
-        items = []
-
-        for key, val in self.sentry_items.items():
-            with capture_internal_exceptions():
-                item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val))
-                items.append(item)
-
-        if include_third_party:
-            items.append(self.third_party_items)
-
-        return ",".join(items)
-
-    @staticmethod
-    def strip_sentry_baggage(header):
-        # type: (str) -> str
-        """Remove Sentry baggage from the given header.
-
-        Given a Baggage header, return a new Baggage header with all Sentry baggage items removed.
-        """
-        return ",".join(
-            item
-            for item in header.split(",")
-            if not Baggage.SENTRY_PREFIX_REGEX.match(item.strip())
-        )
-
-    def __repr__(self):
-        # type: () -> str
-        return f''
-
-
-def should_propagate_trace(client, url):
-    # type: (sentry_sdk.client.BaseClient, str) -> bool
-    """
-    Returns True if url matches trace_propagation_targets configured in the given client. Otherwise, returns False.
-    """
-    trace_propagation_targets = client.options["trace_propagation_targets"]
-
-    if is_sentry_url(client, url):
-        return False
-
-    return match_regex_list(url, trace_propagation_targets, substring_matching=True)
-
-
-def normalize_incoming_data(incoming_data):
-    # type: (Dict[str, Any]) -> Dict[str, Any]
-    """
-    Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes.
-    """
-    data = {}
-    for key, value in incoming_data.items():
-        if key.startswith("HTTP_"):
-            key = key[5:]
-
-        key = key.replace("_", "-").lower()
-        data[key] = value
-
-    return data
-
-
-def start_child_span_decorator(func):
-    # type: (Any) -> Any
-    """
-    Decorator to add child spans for functions.
-
-    See also ``sentry_sdk.tracing.trace()``.
-    """
-    # Asynchronous case
-    if inspect.iscoroutinefunction(func):
-
-        @wraps(func)
-        async def func_with_tracing(*args, **kwargs):
-            # type: (*Any, **Any) -> Any
-
-            span = get_current_span()
-
-            if span is None:
-                logger.debug(
-                    "Cannot create a child span for %s. "
-                    "Please start a Sentry transaction before calling this function.",
-                    qualname_from_function(func),
-                )
-                return await func(*args, **kwargs)
-
-            with span.start_child(
-                op=OP.FUNCTION,
-                name=qualname_from_function(func),
-            ):
-                return await func(*args, **kwargs)
-
-        try:
-            func_with_tracing.__signature__ = inspect.signature(func)  # type: ignore[attr-defined]
-        except Exception:
-            pass
-
-    # Synchronous case
-    else:
-
-        @wraps(func)
-        def func_with_tracing(*args, **kwargs):
-            # type: (*Any, **Any) -> Any
-
-            span = get_current_span()
-
-            if span is None:
-                logger.debug(
-                    "Cannot create a child span for %s. "
-                    "Please start a Sentry transaction before calling this function.",
-                    qualname_from_function(func),
-                )
-                return func(*args, **kwargs)
-
-            with span.start_child(
-                op=OP.FUNCTION,
-                name=qualname_from_function(func),
-            ):
-                return func(*args, **kwargs)
-
-        try:
-            func_with_tracing.__signature__ = inspect.signature(func)  # type: ignore[attr-defined]
-        except Exception:
-            pass
-
-    return func_with_tracing
-
-
-def get_current_span(scope=None):
-    # type: (Optional[sentry_sdk.Scope]) -> Optional[sentry_sdk.tracing.Span]
-    """
-    Returns the currently active span if there is one running, otherwise `None`
-    """
-    scope = scope or sentry_sdk_alpha.get_current_scope()
-    current_span = scope.span
-    return current_span
-
-
-def _generate_sample_rand(
-    trace_id,  # type: Optional[str]
-    interval=(0.0, 1.0),  # type: tuple[float, float]
-):
-    # type: (...) -> Optional[decimal.Decimal]
-    """Generate a sample_rand value from a trace ID.
-
-    The generated value will be pseudorandomly chosen from the provided
-    interval. Specifically, given (lower, upper) = interval, the generated
-    value will be in the range [lower, upper). The value has 6-digit precision,
-    so when printing with .6f, the value will never be rounded up.
-
-    The pseudorandom number generator is seeded with the trace ID.
-    """
-    lower, upper = interval
-    if not lower < upper:  # using `if lower >= upper` would handle NaNs incorrectly
-        raise ValueError("Invalid interval: lower must be less than upper")
-
-    rng = Random(trace_id)
-    sample_rand = upper
-    while sample_rand >= upper:
-        sample_rand = rng.uniform(lower, upper)
-
-    # Round down to exactly six decimal-digit precision.
-    # Setting the context is needed to avoid an InvalidOperation exception
-    # in case the user has changed the default precision or set traps.
-    with localcontext(DefaultContext) as ctx:
-        ctx.prec = 6
-        return Decimal(sample_rand).quantize(
-            Decimal("0.000001"),
-            rounding=ROUND_DOWN,
-        )
-
-
-def _sample_rand_range(parent_sampled, sample_rate):
-    # type: (Optional[bool], Optional[float]) -> tuple[float, float]
-    """
-    Compute the lower (inclusive) and upper (exclusive) bounds of the range of values
-    that a generated sample_rand value must fall into, given the parent_sampled and
-    sample_rate values.
-    """
-    if parent_sampled is None or sample_rate is None:
-        return 0.0, 1.0
-    elif parent_sampled is True:
-        return 0.0, sample_rate
-    else:  # parent_sampled is False
-        return sample_rate, 1.0
-
-
-def get_span_status_from_http_code(http_status_code):
-    # type: (int) -> str
-    """
-    Returns the Sentry status corresponding to the given HTTP status code.
-
-    See: https://develop.sentry.dev/sdk/event-payloads/contexts/#trace-context
-    """
-    if http_status_code < 400:
-        return SPANSTATUS.OK
-
-    elif 400 <= http_status_code < 500:
-        if http_status_code == 403:
-            return SPANSTATUS.PERMISSION_DENIED
-        elif http_status_code == 404:
-            return SPANSTATUS.NOT_FOUND
-        elif http_status_code == 429:
-            return SPANSTATUS.RESOURCE_EXHAUSTED
-        elif http_status_code == 413:
-            return SPANSTATUS.FAILED_PRECONDITION
-        elif http_status_code == 401:
-            return SPANSTATUS.UNAUTHENTICATED
-        elif http_status_code == 409:
-            return SPANSTATUS.ALREADY_EXISTS
-        else:
-            return SPANSTATUS.INVALID_ARGUMENT
-
-    elif 500 <= http_status_code < 600:
-        if http_status_code == 504:
-            return SPANSTATUS.DEADLINE_EXCEEDED
-        elif http_status_code == 501:
-            return SPANSTATUS.UNIMPLEMENTED
-        elif http_status_code == 503:
-            return SPANSTATUS.UNAVAILABLE
-        else:
-            return SPANSTATUS.INTERNAL_ERROR
-
-    return SPANSTATUS.UNKNOWN_ERROR
diff --git a/src/sentry_sdk_alpha/transport.py b/src/sentry_sdk_alpha/transport.py
deleted file mode 100644
index 49764738d9e069..00000000000000
--- a/src/sentry_sdk_alpha/transport.py
+++ /dev/null
@@ -1,793 +0,0 @@
-import gzip
-import io
-import os
-import socket
-import ssl
-import time
-from abc import ABC, abstractmethod
-from collections import defaultdict
-from datetime import datetime, timedelta, timezone
-from urllib.request import getproxies
-
-try:
-    import brotli  # type: ignore
-except ImportError:
-    brotli = None
-
-from typing import TYPE_CHECKING, Dict, List, cast
-
-import certifi
-import urllib3
-
-from sentry_sdk_alpha.consts import EndpointType
-from sentry_sdk_alpha.envelope import Envelope, Item, PayloadRef
-from sentry_sdk_alpha.utils import Dsn, capture_internal_exceptions, logger
-from sentry_sdk_alpha.worker import BackgroundWorker
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterable, Mapping
-    from typing import Any, DefaultDict, Optional, Self, Tuple, Type, Union
-
-    from urllib3.poolmanager import PoolManager, ProxyManager
-
-    from sentry_sdk_alpha._types import EventDataCategory
-
-KEEP_ALIVE_SOCKET_OPTIONS = []
-for option in [
-    (socket.SOL_SOCKET, lambda: getattr(socket, "SO_KEEPALIVE"), 1),  # noqa: B009
-    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPIDLE"), 45),  # noqa: B009
-    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPINTVL"), 10),  # noqa: B009
-    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPCNT"), 6),  # noqa: B009
-]:
-    try:
-        KEEP_ALIVE_SOCKET_OPTIONS.append((option[0], option[1](), option[2]))
-    except AttributeError:
-        # a specific option might not be available on specific systems,
-        # e.g. TCP_KEEPIDLE doesn't exist on macOS
-        pass
-
-
-class Transport(ABC):
-    """Baseclass for all transports.
-
-    A transport is used to send an event to sentry.
-    """
-
-    parsed_dsn = None  # type: Optional[Dsn]
-
-    def __init__(self, options=None):
-        # type: (Self, Optional[Dict[str, Any]]) -> None
-        self.options = options
-        if options and options["dsn"] is not None and options["dsn"]:
-            self.parsed_dsn = Dsn(options["dsn"])
-        else:
-            self.parsed_dsn = None
-
-    @abstractmethod
-    def capture_envelope(self, envelope):
-        # type: (Self, Envelope) -> None
-        """
-        Send an envelope to Sentry.
-
-        Envelopes are a data container format that can hold any type of data
-        submitted to Sentry. We use it to send all event data (including errors,
-        transactions, crons check-ins, etc.) to Sentry.
-        """
-        pass
-
-    def flush(
-        self,
-        timeout,
-        callback=None,
-    ):
-        # type: (Self, float, Optional[Any]) -> None
-        """
-        Wait `timeout` seconds for the current events to be sent out.
-
-        The default implementation is a no-op, since this method may only be relevant to some transports.
-        Subclasses should override this method if necessary.
-        """
-        return None
-
-    def kill(self):
-        # type: (Self) -> None
-        """
-        Forcefully kills the transport.
-
-        The default implementation is a no-op, since this method may only be relevant to some transports.
-        Subclasses should override this method if necessary.
-        """
-        return None
-
-    def record_lost_event(
-        self,
-        reason,  # type: str
-        data_category=None,  # type: Optional[EventDataCategory]
-        item=None,  # type: Optional[Item]
-        *,
-        quantity=1,  # type: int
-    ):
-        # type: (...) -> None
-        """This increments a counter for event loss by reason and
-        data category by the given positive-int quantity (default 1).
-
-        If an item is provided, the data category and quantity are
-        extracted from the item, and the values passed for
-        data_category and quantity are ignored.
-
-        When recording a lost transaction via data_category="transaction",
-        the calling code should also record the lost spans via this method.
-        When recording lost spans, `quantity` should be set to the number
-        of contained spans, plus one for the transaction itself. When
-        passing an Item containing a transaction via the `item` parameter,
-        this method automatically records the lost spans.
-        """
-        return None
-
-    def is_healthy(self):
-        # type: (Self) -> bool
-        return True
-
-    def __del__(self):
-        # type: (Self) -> None
-        try:
-            self.kill()
-        except Exception:
-            pass
-
-
-def _parse_rate_limits(header, now=None):
-    # type: (str, Optional[datetime]) -> Iterable[Tuple[Optional[EventDataCategory], datetime]]
-    if now is None:
-        now = datetime.now(timezone.utc)
-
-    for limit in header.split(","):
-        try:
-            parameters = limit.strip().split(":")
-            retry_after_val, categories = parameters[:2]
-
-            retry_after = now + timedelta(seconds=int(retry_after_val))
-            for category in categories and categories.split(";") or (None,):
-                category = cast("Optional[EventDataCategory]", category)
-                yield category, retry_after
-        except (LookupError, ValueError):
-            continue
-
-
-class BaseHttpTransport(Transport):
-    """The base HTTP transport."""
-
-    TIMEOUT = 30  # seconds
-
-    def __init__(self, options):
-        # type: (Self, Dict[str, Any]) -> None
-        from sentry_sdk_alpha.consts import VERSION
-
-        Transport.__init__(self, options)
-        assert self.parsed_dsn is not None
-        self.options = options  # type: Dict[str, Any]
-        self._worker = BackgroundWorker(queue_size=options["transport_queue_size"])
-        self._auth = self.parsed_dsn.to_auth("sentry.python/%s" % VERSION)
-        self._disabled_until = {}  # type: Dict[Optional[EventDataCategory], datetime]
-        # We only use this Retry() class for the `get_retry_after` method it exposes
-        self._retry = urllib3.util.Retry()
-        self._discarded_events = defaultdict(
-            int
-        )  # type: DefaultDict[Tuple[EventDataCategory, str], int]
-        self._last_client_report_sent = time.time()
-
-        self._pool = self._make_pool()
-
-        experiments = options.get("_experiments", {})
-        compression_level = experiments.get(
-            "transport_compression_level",
-            experiments.get("transport_zlib_compression_level"),
-        )
-        compression_algo = experiments.get(
-            "transport_compression_algo",
-            (
-                "gzip"
-                # if only compression level is set, assume gzip for backwards compatibility
-                # if we don't have brotli available, fallback to gzip
-                if compression_level is not None or brotli is None
-                else "br"
-            ),
-        )
-
-        if compression_algo == "br" and brotli is None:
-            logger.warning(
-                "You asked for brotli compression without the Brotli module, falling back to gzip -9"
-            )
-            compression_algo = "gzip"
-            compression_level = None
-
-        if compression_algo not in ("br", "gzip"):
-            logger.warning("Unknown compression algo %s, disabling compression", compression_algo)
-            self._compression_level = 0
-            self._compression_algo = None
-        else:
-            self._compression_algo = compression_algo
-
-        if compression_level is not None:
-            self._compression_level = compression_level
-        elif self._compression_algo == "gzip":
-            self._compression_level = 9
-        elif self._compression_algo == "br":
-            self._compression_level = 4
-
-    def record_lost_event(
-        self,
-        reason,  # type: str
-        data_category=None,  # type: Optional[EventDataCategory]
-        item=None,  # type: Optional[Item]
-        *,
-        quantity=1,  # type: int
-    ):
-        # type: (...) -> None
-        if not self.options["send_client_reports"]:
-            return
-
-        if item is not None:
-            data_category = item.data_category
-            quantity = 1  # If an item is provided, we always count it as 1 (except for attachments, handled below).
-
-            if data_category == "transaction":
-                # Also record the lost spans
-                event = item.get_transaction_event() or {}
-
-                # +1 for the transaction itself
-                span_count = len(cast(list[dict[str, object]], event.get("spans") or [])) + 1
-                self.record_lost_event(reason, "span", quantity=span_count)
-
-            elif data_category == "attachment":
-                # quantity of 0 is actually 1 as we do not want to count
-                # empty attachments as actually empty.
-                quantity = len(item.get_bytes()) or 1
-
-        elif data_category is None:
-            raise TypeError("data category not provided")
-
-        self._discarded_events[data_category, reason] += quantity
-
-    def _get_header_value(self, response, header):
-        # type: (Self, Any, str) -> Optional[str]
-        return response.headers.get(header)
-
-    def _update_rate_limits(self, response):
-        # type: (Self, Union[urllib3.BaseHTTPResponse, httpcore.Response]) -> None
-
-        # new sentries with more rate limit insights.  We honor this header
-        # no matter of the status code to update our internal rate limits.
-        header = self._get_header_value(response, "x-sentry-rate-limits")
-        if header:
-            logger.warning("Rate-limited via x-sentry-rate-limits")
-            self._disabled_until.update(_parse_rate_limits(header))
-
-        # old sentries only communicate global rate limit hits via the
-        # retry-after header on 429.  This header can also be emitted on new
-        # sentries if a proxy in front wants to globally slow things down.
-        elif response.status == 429:
-            logger.warning("Rate-limited via 429")
-            retry_after_value = self._get_header_value(response, "Retry-After")
-            retry_after = (
-                self._retry.parse_retry_after(retry_after_value)
-                if retry_after_value is not None
-                else None
-            ) or 60
-            self._disabled_until[None] = datetime.now(timezone.utc) + timedelta(seconds=retry_after)
-
-    def _send_request(
-        self,
-        body,
-        headers,
-        endpoint_type=EndpointType.ENVELOPE,
-        envelope=None,
-    ):
-        # type: (Self, bytes, Dict[str, str], EndpointType, Optional[Envelope]) -> None
-
-        def record_loss(reason):
-            # type: (str) -> None
-            if envelope is None:
-                self.record_lost_event(reason, data_category="error")
-            else:
-                for item in envelope.items:
-                    self.record_lost_event(reason, item=item)
-
-        headers.update(
-            {
-                "User-Agent": str(self._auth.client),
-                "X-Sentry-Auth": str(self._auth.to_header()),
-            }
-        )
-        try:
-            response = self._request(
-                "POST",
-                endpoint_type,
-                body,
-                headers,
-            )
-        except Exception:
-            self.on_dropped_event("network")
-            record_loss("network_error")
-            raise
-
-        try:
-            self._update_rate_limits(response)
-
-            if response.status == 429:
-                # if we hit a 429.  Something was rate limited but we already
-                # acted on this in `self._update_rate_limits`.  Note that we
-                # do not want to record event loss here as we will have recorded
-                # an outcome in relay already.
-                self.on_dropped_event("status_429")
-                pass
-
-            elif response.status >= 300 or response.status < 200:
-                logger.error(
-                    "Unexpected status code: %s (body: %s)",
-                    response.status,
-                    getattr(response, "data", getattr(response, "content", None)),
-                )
-                self.on_dropped_event(f"status_{response.status}")
-                record_loss("network_error")
-        finally:
-            response.close()
-
-    def on_dropped_event(self, _reason):
-        # type: (Self, str) -> None
-        return None
-
-    def _fetch_pending_client_report(self, force=False, interval=60):
-        # type: (Self, bool, int) -> Optional[Item]
-        if not self.options["send_client_reports"]:
-            return None
-
-        if not (force or self._last_client_report_sent < time.time() - interval):
-            return None
-
-        discarded_events = self._discarded_events
-        self._discarded_events = defaultdict(int)
-        self._last_client_report_sent = time.time()
-
-        if not discarded_events:
-            return None
-
-        return Item(
-            PayloadRef(
-                json={
-                    "timestamp": time.time(),
-                    "discarded_events": [
-                        {"reason": reason, "category": category, "quantity": quantity}
-                        for (
-                            (category, reason),
-                            quantity,
-                        ) in discarded_events.items()
-                    ],
-                }
-            ),
-            type="client_report",
-        )
-
-    def _flush_client_reports(self, force=False):
-        # type: (Self, bool) -> None
-        client_report = self._fetch_pending_client_report(force=force, interval=60)
-        if client_report is not None:
-            self.capture_envelope(Envelope(items=[client_report]))
-
-    def _check_disabled(self, category):
-        # type: (str) -> bool
-        def _disabled(bucket):
-            # type: (Any) -> bool
-            ts = self._disabled_until.get(bucket)
-            return ts is not None and ts > datetime.now(timezone.utc)
-
-        return _disabled(category) or _disabled(None)
-
-    def _is_rate_limited(self):
-        # type: (Self) -> bool
-        return any(ts > datetime.now(timezone.utc) for ts in self._disabled_until.values())
-
-    def _is_worker_full(self):
-        # type: (Self) -> bool
-        return self._worker.full()
-
-    def is_healthy(self):
-        # type: (Self) -> bool
-        return not (self._is_worker_full() or self._is_rate_limited())
-
-    def _send_envelope(self, envelope):
-        # type: (Self, Envelope) -> None
-
-        # remove all items from the envelope which are over quota
-        new_items = []
-        for item in envelope.items:
-            if self._check_disabled(item.data_category):
-                if item.data_category in ("transaction", "error", "default"):
-                    self.on_dropped_event("self_rate_limits")
-                self.record_lost_event("ratelimit_backoff", item=item)
-            else:
-                new_items.append(item)
-
-        # Since we're modifying the envelope here make a copy so that others
-        # that hold references do not see their envelope modified.
-        envelope = Envelope(headers=envelope.headers, items=new_items)
-
-        if not envelope.items:
-            return None
-
-        # since we're already in the business of sending out an envelope here
-        # check if we have one pending for the stats session envelopes so we
-        # can attach it to this enveloped scheduled for sending.  This will
-        # currently typically attach the client report to the most recent
-        # session update.
-        client_report_item = self._fetch_pending_client_report(interval=30)
-        if client_report_item is not None:
-            envelope.items.append(client_report_item)
-
-        content_encoding, body = self._serialize_envelope(envelope)
-
-        assert self.parsed_dsn is not None
-        logger.debug(
-            "Sending envelope [%s] project:%s host:%s",
-            envelope.description,
-            self.parsed_dsn.project_id,
-            self.parsed_dsn.host,
-        )
-
-        headers = {
-            "Content-Type": "application/x-sentry-envelope",
-        }
-        if content_encoding:
-            headers["Content-Encoding"] = content_encoding
-
-        self._send_request(
-            body.getvalue(),
-            headers=headers,
-            endpoint_type=EndpointType.ENVELOPE,
-            envelope=envelope,
-        )
-        return None
-
-    def _serialize_envelope(self, envelope):
-        # type: (Self, Envelope) -> tuple[Optional[str], io.BytesIO]
-        content_encoding = None
-        body = io.BytesIO()
-        if self._compression_level == 0 or self._compression_algo is None:
-            envelope.serialize_into(body)
-        else:
-            content_encoding = self._compression_algo
-            if self._compression_algo == "br" and brotli is not None:
-                body.write(brotli.compress(envelope.serialize(), quality=self._compression_level))
-            else:  # assume gzip as we sanitize the algo value in init
-                with gzip.GzipFile(
-                    fileobj=body, mode="w", compresslevel=self._compression_level
-                ) as f:
-                    envelope.serialize_into(f)
-
-        return content_encoding, body
-
-    def _get_pool_options(self):
-        # type: (Self) -> Dict[str, Any]
-        raise NotImplementedError()
-
-    def _in_no_proxy(self, parsed_dsn):
-        # type: (Self, Dsn) -> bool
-        no_proxy = getproxies().get("no")
-        if not no_proxy:
-            return False
-        for host in no_proxy.split(","):
-            host = host.strip()
-            if parsed_dsn.host.endswith(host) or parsed_dsn.netloc.endswith(host):
-                return True
-        return False
-
-    def _make_pool(self):
-        # type: (Self) -> Union[PoolManager, ProxyManager, httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool]
-        raise NotImplementedError()
-
-    def _request(
-        self,
-        method,
-        endpoint_type,
-        body,
-        headers,
-    ):
-        # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> Union[urllib3.BaseHTTPResponse, httpcore.Response]
-        raise NotImplementedError()
-
-    def capture_envelope(
-        self, envelope  # type: Envelope
-    ):
-        # type: (...) -> None
-        def send_envelope_wrapper():
-            # type: () -> None
-            with capture_internal_exceptions():
-                self._send_envelope(envelope)
-                self._flush_client_reports()
-
-        if not self._worker.submit(send_envelope_wrapper):
-            self.on_dropped_event("full_queue")
-            for item in envelope.items:
-                self.record_lost_event("queue_overflow", item=item)
-
-    def flush(
-        self,
-        timeout,
-        callback=None,
-    ):
-        # type: (Self, float, Optional[Callable[[int, float], None]]) -> None
-        logger.debug("Flushing HTTP transport")
-
-        if timeout > 0:
-            self._worker.submit(lambda: self._flush_client_reports(force=True))
-            self._worker.flush(timeout, callback)
-
-    def kill(self):
-        # type: (Self) -> None
-        logger.debug("Killing HTTP transport")
-        self._worker.kill()
-
-
-class HttpTransport(BaseHttpTransport):
-    if TYPE_CHECKING:
-        _pool: PoolManager | ProxyManager
-
-    def _get_pool_options(self):
-        # type: (Self) -> Dict[str, Any]
-
-        num_pools = self.options.get("_experiments", {}).get("transport_num_pools")
-        options = {
-            "num_pools": 2 if num_pools is None else int(num_pools),
-            "cert_reqs": "CERT_REQUIRED",
-            "timeout": urllib3.Timeout(total=self.TIMEOUT),
-        }
-
-        socket_options = None  # type: Optional[List[Tuple[int, int, int | bytes]]]
-
-        if self.options["socket_options"] is not None:
-            socket_options = self.options["socket_options"]
-
-        if self.options["keep_alive"]:
-            if socket_options is None:
-                socket_options = []
-
-            used_options = {(o[0], o[1]) for o in socket_options}
-            for default_option in KEEP_ALIVE_SOCKET_OPTIONS:
-                if (default_option[0], default_option[1]) not in used_options:
-                    socket_options.append(default_option)
-
-        if socket_options is not None:
-            options["socket_options"] = socket_options
-
-        options["ca_certs"] = (
-            self.options["ca_certs"]  # User-provided bundle from the SDK init
-            or os.environ.get("SSL_CERT_FILE")
-            or os.environ.get("REQUESTS_CA_BUNDLE")
-            or certifi.where()
-        )
-
-        options["cert_file"] = self.options["cert_file"] or os.environ.get("CLIENT_CERT_FILE")
-        options["key_file"] = self.options["key_file"] or os.environ.get("CLIENT_KEY_FILE")
-
-        return options
-
-    def _make_pool(self):
-        # type: (Self) -> Union[PoolManager, ProxyManager]
-        if self.parsed_dsn is None:
-            raise ValueError("Cannot create HTTP-based transport without valid DSN")
-
-        proxy = None
-        no_proxy = self._in_no_proxy(self.parsed_dsn)
-
-        # try HTTPS first
-        https_proxy = self.options["https_proxy"]
-        if self.parsed_dsn.scheme == "https" and (https_proxy != ""):
-            proxy = https_proxy or (not no_proxy and getproxies().get("https"))
-
-        # maybe fallback to HTTP proxy
-        http_proxy = self.options["http_proxy"]
-        if not proxy and (http_proxy != ""):
-            proxy = http_proxy or (not no_proxy and getproxies().get("http"))
-
-        opts = self._get_pool_options()
-
-        if proxy:
-            proxy_headers = self.options["proxy_headers"]
-            if proxy_headers:
-                opts["proxy_headers"] = proxy_headers
-
-            if proxy.startswith("socks"):
-                use_socks_proxy = True
-                try:
-                    # Check if PySocks dependency is available
-                    from urllib3.contrib.socks import SOCKSProxyManager
-                except ImportError:
-                    use_socks_proxy = False
-                    logger.warning(
-                        "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.",
-                        proxy,
-                    )
-
-                if use_socks_proxy:
-                    return SOCKSProxyManager(proxy, **opts)
-                else:
-                    return urllib3.PoolManager(**opts)
-            else:
-                return urllib3.ProxyManager(proxy, **opts)
-        else:
-            return urllib3.PoolManager(**opts)
-
-    def _request(
-        self,
-        method,
-        endpoint_type,
-        body,
-        headers,
-    ):
-        # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> urllib3.BaseHTTPResponse
-        return self._pool.request(
-            method,
-            self._auth.get_api_url(endpoint_type),
-            body=body,
-            headers=headers,
-        )
-
-
-try:
-    import h2  # noqa: F401
-    import httpcore
-except ImportError:
-    # Sorry, no Http2Transport for you
-    class Http2Transport(HttpTransport):
-        def __init__(self, options):
-            # type: (Self, Dict[str, Any]) -> None
-            super().__init__(options)
-            logger.warning(
-                "You tried to use HTTP2Transport but don't have httpcore[http2] installed. Falling back to HTTPTransport."
-            )
-
-else:
-
-    class Http2Transport(BaseHttpTransport):  # type: ignore
-        """The HTTP2 transport based on httpcore."""
-
-        TIMEOUT = 15
-
-        if TYPE_CHECKING:
-            _pool: httpcore.SOCKSProxy | httpcore.HTTPProxy | httpcore.ConnectionPool
-
-        def _get_header_value(self, response, header):
-            # type: (Self, httpcore.Response, str) -> Optional[str]
-            return next(
-                (
-                    val.decode("ascii")
-                    for key, val in response.headers
-                    if key.decode("ascii").lower() == header
-                ),
-                None,
-            )
-
-        def _request(
-            self,
-            method,
-            endpoint_type,
-            body,
-            headers,
-        ):
-            # type: (Self, str, EndpointType, Any, Mapping[str, str]) -> httpcore.Response
-            response = self._pool.request(
-                method,
-                self._auth.get_api_url(endpoint_type),
-                content=body,
-                headers=headers,  # type: ignore
-                extensions={
-                    "timeout": {
-                        "pool": self.TIMEOUT,
-                        "connect": self.TIMEOUT,
-                        "write": self.TIMEOUT,
-                        "read": self.TIMEOUT,
-                    }
-                },
-            )
-            return response
-
-        def _get_pool_options(self):
-            # type: (Self) -> Dict[str, Any]
-            options = {
-                "http2": self.parsed_dsn is not None and self.parsed_dsn.scheme == "https",
-                "retries": 3,
-            }  # type: Dict[str, Any]
-
-            socket_options = (
-                self.options["socket_options"] if self.options["socket_options"] is not None else []
-            )
-
-            used_options = {(o[0], o[1]) for o in socket_options}
-            for default_option in KEEP_ALIVE_SOCKET_OPTIONS:
-                if (default_option[0], default_option[1]) not in used_options:
-                    socket_options.append(default_option)
-
-            options["socket_options"] = socket_options
-
-            ssl_context = ssl.create_default_context()
-            ssl_context.load_verify_locations(
-                self.options["ca_certs"]  # User-provided bundle from the SDK init
-                or os.environ.get("SSL_CERT_FILE")
-                or os.environ.get("REQUESTS_CA_BUNDLE")
-                or certifi.where()
-            )
-            cert_file = self.options["cert_file"] or os.environ.get("CLIENT_CERT_FILE")
-            key_file = self.options["key_file"] or os.environ.get("CLIENT_KEY_FILE")
-            if cert_file is not None:
-                ssl_context.load_cert_chain(cert_file, key_file)
-
-            options["ssl_context"] = ssl_context
-
-            return options
-
-        def _make_pool(self):
-            # type: (Self) -> Union[httpcore.SOCKSProxy, httpcore.HTTPProxy, httpcore.ConnectionPool]
-            if self.parsed_dsn is None:
-                raise ValueError("Cannot create HTTP-based transport without valid DSN")
-            proxy = None
-            no_proxy = self._in_no_proxy(self.parsed_dsn)
-
-            # try HTTPS first
-            https_proxy = self.options["https_proxy"]
-            if self.parsed_dsn.scheme == "https" and (https_proxy != ""):
-                proxy = https_proxy or (not no_proxy and getproxies().get("https"))
-
-            # maybe fallback to HTTP proxy
-            http_proxy = self.options["http_proxy"]
-            if not proxy and (http_proxy != ""):
-                proxy = http_proxy or (not no_proxy and getproxies().get("http"))
-
-            opts = self._get_pool_options()
-
-            if proxy:
-                proxy_headers = self.options["proxy_headers"]
-                if proxy_headers:
-                    opts["proxy_headers"] = proxy_headers
-
-                if proxy.startswith("socks"):
-                    try:
-                        if "socket_options" in opts:
-                            socket_options = opts.pop("socket_options")
-                            if socket_options:
-                                logger.warning(
-                                    "You have defined socket_options but using a SOCKS proxy which doesn't support these. We'll ignore socket_options."
-                                )
-                        return httpcore.SOCKSProxy(proxy_url=proxy, **opts)
-                    except RuntimeError:
-                        logger.warning(
-                            "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support.",
-                            proxy,
-                        )
-                else:
-                    return httpcore.HTTPProxy(proxy_url=proxy, **opts)
-
-            return httpcore.ConnectionPool(**opts)
-
-
-def make_transport(options):
-    # type: (Dict[str, Any]) -> Optional[Transport]
-    ref_transport = options["transport"]
-
-    use_http2_transport = options.get("_experiments", {}).get("transport_http2", False)
-
-    # By default, we use the http transport class
-    transport_cls = (
-        Http2Transport if use_http2_transport else HttpTransport
-    )  # type: Type[Transport]
-
-    if isinstance(ref_transport, Transport):
-        return ref_transport
-    elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
-        transport_cls = ref_transport
-
-    # if a transport class is given only instantiate it if the dsn is not
-    # empty or None
-    if options["dsn"]:
-        return transport_cls(options)
-
-    return None
diff --git a/src/sentry_sdk_alpha/types.py b/src/sentry_sdk_alpha/types.py
deleted file mode 100644
index 7f3eb912c885ea..00000000000000
--- a/src/sentry_sdk_alpha/types.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-This module contains type definitions for the Sentry SDK's public API.
-The types are re-exported from the internal module `sentry_sdk._types`.
-
-Disclaimer: Since types are a form of documentation, type definitions
-may change in minor releases. Removing a type would be considered a
-breaking change, and so we will only remove type definitions in major
-releases.
-"""
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
-    # Re-export types to make them available in the public API
-    from sentry_sdk_alpha._types import (
-        Breadcrumb,
-        BreadcrumbHint,
-        Event,
-        EventDataCategory,
-        Hint,
-        Log,
-        MonitorConfig,
-        SamplingContext,
-    )
-else:
-    from typing import Any
-
-    # The lines below allow the types to be imported from outside `if TYPE_CHECKING`
-    # guards. The types in this module are only intended to be used for type hints.
-    Breadcrumb = Any
-    BreadcrumbHint = Any
-    Event = Any
-    EventDataCategory = Any
-    Hint = Any
-    Log = Any
-    MonitorConfig = Any
-    SamplingContext = Any
-
-
-__all__ = (
-    "Breadcrumb",
-    "BreadcrumbHint",
-    "Event",
-    "EventDataCategory",
-    "Hint",
-    "Log",
-    "MonitorConfig",
-    "SamplingContext",
-)
diff --git a/src/sentry_sdk_alpha/utils.py b/src/sentry_sdk_alpha/utils.py
deleted file mode 100644
index b7890ea40edd9a..00000000000000
--- a/src/sentry_sdk_alpha/utils.py
+++ /dev/null
@@ -1,1924 +0,0 @@
-import base64
-import json
-import linecache
-import logging
-import math
-import os
-import random
-import re
-import subprocess
-import sys
-import threading
-import time
-from collections import namedtuple
-from datetime import datetime, timezone
-from decimal import Decimal
-from functools import partial, partialmethod, wraps
-from numbers import Real
-from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit
-
-try:
-    # Python 3.11
-    from builtins import BaseExceptionGroup
-except ImportError:
-    # Python 3.10 and below
-    BaseExceptionGroup = None  # type: ignore
-
-from typing import TYPE_CHECKING
-
-import sentry_sdk_alpha
-from sentry_sdk_alpha._types import SENSITIVE_DATA_SUBSTITUTE, Annotated, AnnotatedValue
-from sentry_sdk_alpha.consts import (
-    DEFAULT_ADD_FULL_STACK,
-    DEFAULT_MAX_STACK_FRAMES,
-    DEFAULT_MAX_VALUE_LENGTH,
-    SPANDATA,
-    EndpointType,
-)
-
-if TYPE_CHECKING:
-    from collections.abc import Callable, Iterator
-    from types import FrameType, TracebackType
-    from typing import (
-        Any,
-        ContextManager,
-        Dict,
-        List,
-        NoReturn,
-        Optional,
-        ParamSpec,
-        Set,
-        Tuple,
-        Type,
-        TypeVar,
-        Union,
-        cast,
-        overload,
-    )
-
-    from gevent.hub import Hub as GeventHub
-    from opentelemetry.util.types import AttributeValue
-
-    from sentry_sdk_alpha._types import Event, ExcInfo
-
-    P = ParamSpec("P")
-    R = TypeVar("R")
-
-
-epoch = datetime(1970, 1, 1)
-
-# The logger is created here but initialized in the debug support module
-logger = logging.getLogger("sentry_sdk.errors")
-
-_installed_modules = None
-
-BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
-
-FALSY_ENV_VALUES = frozenset(("false", "f", "n", "no", "off", "0"))
-TRUTHY_ENV_VALUES = frozenset(("true", "t", "y", "yes", "on", "1"))
-
-MAX_STACK_FRAMES = 2000
-"""Maximum number of stack frames to send to Sentry.
-
-If we have more than this number of stack frames, we will stop processing
-the stacktrace to avoid getting stuck in a long-lasting loop. This value
-exceeds the default sys.getrecursionlimit() of 1000, so users will only
-be affected by this limit if they have a custom recursion limit.
-"""
-
-MAX_EXCEPTIONS = 25
-"""Maximum number of exceptions in a chain or group to send to Sentry.
-
-This is a sanity limit to avoid ending in an infinite loop of exceptions when the same exception is in the root and a leave
-of the exception tree.
-"""
-
-
-def env_to_bool(value, *, strict=False):
-    # type: (Any, Optional[bool]) -> bool | None
-    """Casts an ENV variable value to boolean using the constants defined above.
-    In strict mode, it may return None if the value doesn't match any of the predefined values.
-    """
-    normalized = str(value).lower() if value is not None else None
-
-    if normalized in FALSY_ENV_VALUES:
-        return False
-
-    if normalized in TRUTHY_ENV_VALUES:
-        return True
-
-    return None if strict else bool(value)
-
-
-def json_dumps(data):
-    # type: (Any) -> bytes
-    """Serialize data into a compact JSON representation encoded as UTF-8."""
-    return json.dumps(data, allow_nan=False, separators=(",", ":")).encode("utf-8")
-
-
-def get_git_revision():
-    # type: () -> Optional[str]
-    try:
-        with open(os.path.devnull, "w+") as null:
-            # prevent command prompt windows from popping up on windows
-            startupinfo = None
-            if sys.platform == "win32" or sys.platform == "cygwin":
-                startupinfo = subprocess.STARTUPINFO()
-                startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
-
-            revision = (
-                subprocess.Popen(
-                    ["git", "rev-parse", "HEAD"],
-                    startupinfo=startupinfo,
-                    stdout=subprocess.PIPE,
-                    stderr=null,
-                    stdin=null,
-                )
-                .communicate()[0]
-                .strip()
-                .decode("utf-8")
-            )
-    except (OSError, FileNotFoundError):
-        return None
-
-    return revision
-
-
-def get_default_release():
-    # type: () -> Optional[str]
-    """Try to guess a default release."""
-    release = os.environ.get("SENTRY_RELEASE")
-    if release:
-        return release
-
-    release = get_git_revision()
-    if release:
-        return release
-
-    for var in (
-        "HEROKU_SLUG_COMMIT",
-        "SOURCE_VERSION",
-        "CODEBUILD_RESOLVED_SOURCE_VERSION",
-        "CIRCLE_SHA1",
-        "GAE_DEPLOYMENT_ID",
-    ):
-        release = os.environ.get(var)
-        if release:
-            return release
-    return None
-
-
-def get_sdk_name(installed_integrations):
-    # type: (List[str]) -> str
-    """Return the SDK name including the name of the used web framework."""
-
-    # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier
-    # here because if django is not installed the integration is not accessible.
-    framework_integrations = [
-        "django",
-        "flask",
-        "fastapi",
-        "bottle",
-        "falcon",
-        "quart",
-        "sanic",
-        "starlette",
-        "litestar",
-        "starlite",
-        "chalice",
-        "serverless",
-        "pyramid",
-        "tornado",
-        "aiohttp",
-        "aws_lambda",
-        "gcp",
-        "beam",
-        "asgi",
-        "wsgi",
-    ]
-
-    for integration in framework_integrations:
-        if integration in installed_integrations:
-            return f"sentry.python.{integration}"
-
-    return "sentry.python"
-
-
-class CaptureInternalException:
-    __slots__ = ()
-
-    def __enter__(self):
-        # type: () -> ContextManager[Any]
-        return self
-
-    def __exit__(self, ty, value, tb):
-        # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> bool
-        if ty is not None and value is not None:
-            capture_internal_exception((ty, value, tb))
-
-        return True
-
-
-_CAPTURE_INTERNAL_EXCEPTION = CaptureInternalException()
-
-
-def capture_internal_exceptions():
-    # type: () -> ContextManager[Any]
-    return _CAPTURE_INTERNAL_EXCEPTION
-
-
-def capture_internal_exception(exc_info):
-    # type: (ExcInfo) -> None
-    """
-    Capture an exception that is likely caused by a bug in the SDK
-    itself.
-
-    These exceptions do not end up in Sentry and are just logged instead.
-    """
-    if sentry_sdk_alpha.get_client().is_active():
-        logger.error("Internal error in sentry_sdk", exc_info=exc_info)
-
-
-def to_timestamp(value):
-    # type: (datetime) -> float
-    return (value - epoch).total_seconds()
-
-
-def format_timestamp(value):
-    # type: (datetime) -> str
-    """Formats a timestamp in RFC 3339 format.
-
-    Any datetime objects with a non-UTC timezone are converted to UTC, so that all timestamps are formatted in UTC.
-    """
-    utctime = value.astimezone(timezone.utc)
-
-    # We use this custom formatting rather than isoformat for backwards compatibility (we have used this format for
-    # several years now), and isoformat is slightly different.
-    return utctime.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
-
-
-def event_hint_with_exc_info(exc_info=None):
-    # type: (Optional[ExcInfo]) -> Dict[str, Optional[ExcInfo]]
-    """Creates a hint with the exc info filled in."""
-    if exc_info is None:
-        exc_info = sys.exc_info()
-    else:
-        exc_info = exc_info_from_error(exc_info)
-    if exc_info[0] is None:
-        exc_info = None
-    return {"exc_info": exc_info}
-
-
-class BadDsn(ValueError):
-    """Raised on invalid DSNs."""
-
-
-class Dsn:
-    """Represents a DSN."""
-
-    def __init__(self, value):
-        # type: (Union[Dsn, str]) -> None
-        if isinstance(value, Dsn):
-            self.__dict__ = dict(value.__dict__)
-            return
-        parts = urlsplit(str(value))
-
-        if parts.scheme not in ("http", "https"):
-            raise BadDsn("Unsupported scheme %r" % parts.scheme)
-        self.scheme = parts.scheme
-
-        if parts.hostname is None:
-            raise BadDsn("Missing hostname")
-
-        self.host = parts.hostname
-
-        if parts.port is None:
-            self.port = self.scheme == "https" and 443 or 80  # type: int
-        else:
-            self.port = parts.port
-
-        if not parts.username:
-            raise BadDsn("Missing public key")
-
-        self.public_key = parts.username
-        self.secret_key = parts.password
-
-        path = parts.path.rsplit("/", 1)
-
-        try:
-            self.project_id = str(int(path.pop()))
-        except (ValueError, TypeError):
-            raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
-
-        self.path = "/".join(path) + "/"
-
-    @property
-    def netloc(self):
-        # type: () -> str
-        """The netloc part of a DSN."""
-        rv = self.host
-        if (self.scheme, self.port) not in (("http", 80), ("https", 443)):
-            rv = f"{rv}:{self.port}"
-        return rv
-
-    def to_auth(self, client=None):
-        # type: (Optional[Any]) -> Auth
-        """Returns the auth info object for this dsn."""
-        return Auth(
-            scheme=self.scheme,
-            host=self.netloc,
-            path=self.path,
-            project_id=self.project_id,
-            public_key=self.public_key,
-            secret_key=self.secret_key,
-            client=client,
-        )
-
-    def __str__(self):
-        # type: () -> str
-        return "{}://{}{}@{}{}{}".format(
-            self.scheme,
-            self.public_key,
-            self.secret_key and "@" + self.secret_key or "",
-            self.netloc,
-            self.path,
-            self.project_id,
-        )
-
-
-class Auth:
-    """Helper object that represents the auth info."""
-
-    def __init__(
-        self,
-        scheme,
-        host,
-        project_id,
-        public_key,
-        secret_key=None,
-        version=7,
-        client=None,
-        path="/",
-    ):
-        # type: (str, str, str, str, Optional[str], int, Optional[Any], str) -> None
-        self.scheme = scheme
-        self.host = host
-        self.path = path
-        self.project_id = project_id
-        self.public_key = public_key
-        self.secret_key = secret_key
-        self.version = version
-        self.client = client
-
-    def get_api_url(
-        self, type=EndpointType.ENVELOPE  # type: EndpointType
-    ):
-        # type: (...) -> str
-        """Returns the API url for storing events."""
-        return "{}://{}{}api/{}/{}/".format(
-            self.scheme,
-            self.host,
-            self.path,
-            self.project_id,
-            type.value,
-        )
-
-    def to_header(self):
-        # type: () -> str
-        """Returns the auth header a string."""
-        rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
-        if self.client is not None:
-            rv.append(("sentry_client", self.client))
-        if self.secret_key is not None:
-            rv.append(("sentry_secret", self.secret_key))
-        return "Sentry " + ", ".join(f"{key}={value}" for key, value in rv)
-
-
-def get_type_name(cls):
-    # type: (Optional[type]) -> Optional[str]
-    return getattr(cls, "__qualname__", None) or getattr(cls, "__name__", None)
-
-
-def get_type_module(cls):
-    # type: (Optional[type]) -> Optional[str]
-    mod = getattr(cls, "__module__", None)
-    if mod not in (None, "builtins", "__builtins__"):
-        return mod
-    return None
-
-
-def should_hide_frame(frame):
-    # type: (FrameType) -> bool
-    try:
-        mod = frame.f_globals["__name__"]
-        if mod.startswith("sentry_sdk."):
-            return True
-    except (AttributeError, KeyError):
-        pass
-
-    for flag_name in "__traceback_hide__", "__tracebackhide__":
-        try:
-            if frame.f_locals[flag_name]:
-                return True
-        except Exception:
-            pass
-
-    return False
-
-
-def iter_stacks(tb):
-    # type: (Optional[TracebackType]) -> Iterator[TracebackType]
-    tb_ = tb  # type: Optional[TracebackType]
-    while tb_ is not None:
-        if not should_hide_frame(tb_.tb_frame):
-            yield tb_
-        tb_ = tb_.tb_next
-
-
-def get_lines_from_file(
-    filename,  # type: str
-    lineno,  # type: int
-    max_length=None,  # type: Optional[int]
-    loader=None,  # type: Optional[Any]
-    module=None,  # type: Optional[str]
-):
-    # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
-    context_lines = 5
-    source = None
-    if loader is not None and hasattr(loader, "get_source"):
-        try:
-            source_str = loader.get_source(module)  # type: Optional[str]
-        except (ImportError, OSError):
-            source_str = None
-        if source_str is not None:
-            source = source_str.splitlines()
-
-    if source is None:
-        try:
-            source = linecache.getlines(filename)
-        except OSError:
-            return [], None, []
-
-    if not source:
-        return [], None, []
-
-    lower_bound = max(0, lineno - context_lines)
-    upper_bound = min(lineno + 1 + context_lines, len(source))
-
-    try:
-        pre_context = [
-            strip_string(line.strip("\r\n"), max_length=max_length)
-            for line in source[lower_bound:lineno]
-        ]
-        context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length)
-        post_context = [
-            strip_string(line.strip("\r\n"), max_length=max_length)
-            for line in source[(lineno + 1) : upper_bound]
-        ]
-        return pre_context, context_line, post_context
-    except IndexError:
-        # the file may have changed since it was loaded into memory
-        return [], None, []
-
-
-def get_source_context(
-    frame,  # type: FrameType
-    tb_lineno,  # type: Optional[int]
-    max_value_length=None,  # type: Optional[int]
-):
-    # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
-    try:
-        abs_path = frame.f_code.co_filename  # type: Optional[str]
-    except Exception:
-        abs_path = None
-    try:
-        module = frame.f_globals["__name__"]
-    except Exception:
-        return [], None, []
-    try:
-        loader = frame.f_globals["__loader__"]
-    except Exception:
-        loader = None
-
-    if tb_lineno is not None and abs_path:
-        lineno = tb_lineno - 1
-        return get_lines_from_file(abs_path, lineno, max_value_length, loader=loader, module=module)
-
-    return [], None, []
-
-
-def safe_str(value):
-    # type: (Any) -> str
-    try:
-        return str(value)
-    except Exception:
-        return safe_repr(value)
-
-
-def safe_repr(value):
-    # type: (Any) -> str
-    try:
-        return repr(value)
-    except Exception:
-        return ""
-
-
-def filename_for_module(module, abs_path):
-    # type: (Optional[str], Optional[str]) -> Optional[str]
-    if not abs_path or not module:
-        return abs_path
-
-    try:
-        if abs_path.endswith(".pyc"):
-            abs_path = abs_path[:-1]
-
-        base_module = module.split(".", 1)[0]
-        if base_module == module:
-            return os.path.basename(abs_path)
-
-        base_module_path = sys.modules[base_module].__file__
-        if not base_module_path:
-            return abs_path
-
-        return abs_path.split(base_module_path.rsplit(os.sep, 2)[0], 1)[-1].lstrip(os.sep)
-    except Exception:
-        return abs_path
-
-
-def serialize_frame(
-    frame,
-    tb_lineno=None,
-    include_local_variables=True,
-    include_source_context=True,
-    max_value_length=None,
-    custom_repr=None,
-):
-    # type: (FrameType, Optional[int], bool, bool, Optional[int], Optional[Callable[..., Optional[str]]]) -> Dict[str, Any]
-    f_code = getattr(frame, "f_code", None)
-    if not f_code:
-        abs_path = None
-        function = None
-    else:
-        abs_path = frame.f_code.co_filename
-        function = frame.f_code.co_name
-    try:
-        module = frame.f_globals["__name__"]
-    except Exception:
-        module = None
-
-    if tb_lineno is None:
-        tb_lineno = frame.f_lineno
-
-    rv = {
-        "filename": filename_for_module(module, abs_path) or None,
-        "abs_path": os.path.abspath(abs_path) if abs_path else None,
-        "function": function or "",
-        "module": module,
-        "lineno": tb_lineno,
-    }  # type: Dict[str, Any]
-
-    if include_source_context:
-        rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context(
-            frame, tb_lineno, max_value_length
-        )
-
-    if include_local_variables:
-        from sentry_sdk_alpha.serializer import serialize
-
-        rv["vars"] = serialize(dict(frame.f_locals), is_vars=True, custom_repr=custom_repr)
-
-    return rv
-
-
-def current_stacktrace(
-    include_local_variables=True,  # type: bool
-    include_source_context=True,  # type: bool
-    max_value_length=None,  # type: Optional[int]
-):
-    # type: (...) -> Dict[str, Any]
-    __tracebackhide__ = True
-    frames = []
-
-    f = sys._getframe()  # type: Optional[FrameType]
-    while f is not None:
-        if not should_hide_frame(f):
-            frames.append(
-                serialize_frame(
-                    f,
-                    include_local_variables=include_local_variables,
-                    include_source_context=include_source_context,
-                    max_value_length=max_value_length,
-                )
-            )
-        f = f.f_back
-
-    frames.reverse()
-
-    return {"frames": frames}
-
-
-def get_errno(exc_value):
-    # type: (BaseException) -> Optional[Any]
-    return getattr(exc_value, "errno", None)
-
-
-def get_error_message(exc_value):
-    # type: (Optional[BaseException]) -> str
-    message = (
-        getattr(exc_value, "message", "") or getattr(exc_value, "detail", "") or safe_str(exc_value)
-    )  # type: str
-
-    # __notes__ should be a list of strings when notes are added
-    # via add_note, but can be anything else if __notes__ is set
-    # directly. We only support strings in __notes__, since that
-    # is the correct use.
-    notes = getattr(exc_value, "__notes__", None)  # type: object
-    if isinstance(notes, list) and len(notes) > 0:
-        message += "\n" + "\n".join(note for note in notes if isinstance(note, str))
-
-    return message
-
-
-def single_exception_from_error_tuple(
-    exc_type,  # type: Optional[type]
-    exc_value,  # type: Optional[BaseException]
-    tb,  # type: Optional[TracebackType]
-    client_options=None,  # type: Optional[Dict[str, Any]]
-    mechanism=None,  # type: Optional[Dict[str, Any]]
-    exception_id=None,  # type: Optional[int]
-    parent_id=None,  # type: Optional[int]
-    source=None,  # type: Optional[str]
-    full_stack=None,  # type: Optional[list[dict[str, Any]]]
-):
-    # type: (...) -> Dict[str, Any]
-    """
-    Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry.
-
-    See the Exception Interface documentation for more details:
-    https://develop.sentry.dev/sdk/event-payloads/exception/
-    """
-    exception_value = {}  # type: Dict[str, Any]
-    exception_value["mechanism"] = (
-        mechanism.copy() if mechanism else {"type": "generic", "handled": True}
-    )
-    if exception_id is not None:
-        exception_value["mechanism"]["exception_id"] = exception_id
-
-    if exc_value is not None:
-        errno = get_errno(exc_value)
-    else:
-        errno = None
-
-    if errno is not None:
-        exception_value["mechanism"].setdefault("meta", {}).setdefault("errno", {}).setdefault(
-            "number", errno
-        )
-
-    if source is not None:
-        exception_value["mechanism"]["source"] = source
-
-    is_root_exception = exception_id == 0
-    if not is_root_exception and parent_id is not None:
-        exception_value["mechanism"]["parent_id"] = parent_id
-        exception_value["mechanism"]["type"] = "chained"
-
-    if is_root_exception and "type" not in exception_value["mechanism"]:
-        exception_value["mechanism"]["type"] = "generic"
-
-    is_exception_group = BaseExceptionGroup is not None and isinstance(
-        exc_value, BaseExceptionGroup
-    )
-    if is_exception_group:
-        exception_value["mechanism"]["is_exception_group"] = True
-
-    exception_value["module"] = get_type_module(exc_type)
-    exception_value["type"] = get_type_name(exc_type)
-    exception_value["value"] = get_error_message(exc_value)
-
-    if client_options is None:
-        include_local_variables = True
-        include_source_context = True
-        max_value_length = DEFAULT_MAX_VALUE_LENGTH  # fallback
-        custom_repr = None
-    else:
-        include_local_variables = client_options["include_local_variables"]
-        include_source_context = client_options["include_source_context"]
-        max_value_length = client_options["max_value_length"]
-        custom_repr = client_options.get("custom_repr")
-
-    frames = [
-        serialize_frame(
-            tb.tb_frame,
-            tb_lineno=tb.tb_lineno,
-            include_local_variables=include_local_variables,
-            include_source_context=include_source_context,
-            max_value_length=max_value_length,
-            custom_repr=custom_repr,
-        )
-        # Process at most MAX_STACK_FRAMES + 1 frames, to avoid hanging on
-        # processing a super-long stacktrace.
-        for tb, _ in zip(iter_stacks(tb), range(MAX_STACK_FRAMES + 1))
-    ]  # type: List[Dict[str, Any]]
-
-    if len(frames) > MAX_STACK_FRAMES:
-        # If we have more frames than the limit, we remove the stacktrace completely.
-        # We don't trim the stacktrace here because we have not processed the whole
-        # thing (see above, we stop at MAX_STACK_FRAMES + 1). Normally, Relay would
-        # intelligently trim by removing frames in the middle of the stacktrace, but
-        # since we don't have the whole stacktrace, we can't do that. Instead, we
-        # drop the entire stacktrace.
-        exception_value["stacktrace"] = AnnotatedValue.removed_because_over_size_limit(value=None)
-
-    elif frames:
-        if not full_stack:
-            new_frames = frames
-        else:
-            new_frames = merge_stack_frames(frames, full_stack, client_options)
-
-        exception_value["stacktrace"] = {"frames": new_frames}
-
-    return exception_value
-
-
-HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
-
-if HAS_CHAINED_EXCEPTIONS:
-
-    def walk_exception_chain(exc_info):
-        # type: (ExcInfo) -> Iterator[ExcInfo]
-        exc_type, exc_value, tb = exc_info
-
-        seen_exceptions = []
-        seen_exception_ids = set()  # type: Set[int]
-
-        while (
-            exc_type is not None
-            and exc_value is not None
-            and id(exc_value) not in seen_exception_ids
-        ):
-            yield exc_type, exc_value, tb
-
-            # Avoid hashing random types we don't know anything
-            # about. Use the list to keep a ref so that the `id` is
-            # not used for another object.
-            seen_exceptions.append(exc_value)
-            seen_exception_ids.add(id(exc_value))
-
-            if exc_value.__suppress_context__:
-                cause = exc_value.__cause__
-            else:
-                cause = exc_value.__context__
-            if cause is None:
-                break
-            exc_type = type(cause)
-            exc_value = cause
-            tb = getattr(cause, "__traceback__", None)
-
-else:
-
-    def walk_exception_chain(exc_info):
-        # type: (ExcInfo) -> Iterator[ExcInfo]
-        yield exc_info
-
-
-def exceptions_from_error(
-    exc_type,  # type: Optional[type]
-    exc_value,  # type: Optional[BaseException]
-    tb,  # type: Optional[TracebackType]
-    client_options=None,  # type: Optional[Dict[str, Any]]
-    mechanism=None,  # type: Optional[Dict[str, Any]]
-    exception_id=0,  # type: int
-    parent_id=0,  # type: int
-    source=None,  # type: Optional[str]
-    full_stack=None,  # type: Optional[list[dict[str, Any]]]
-):
-    # type: (...) -> Tuple[int, List[Dict[str, Any]]]
-    """
-    Converts the given exception information into the Sentry structured "exception" format.
-    This will return a list of exceptions (a flattened tree of exceptions) in the
-    format of the Exception Interface documentation:
-    https://develop.sentry.dev/sdk/data-model/event-payloads/exception/
-
-    This function can handle:
-    - simple exceptions
-    - chained exceptions (raise .. from ..)
-    - exception groups
-    """
-    base_exception = single_exception_from_error_tuple(
-        exc_type=exc_type,
-        exc_value=exc_value,
-        tb=tb,
-        client_options=client_options,
-        mechanism=mechanism,
-        exception_id=exception_id,
-        parent_id=parent_id,
-        source=source,
-        full_stack=full_stack,
-    )
-    exceptions = [base_exception]
-
-    parent_id = exception_id
-    exception_id += 1
-
-    if exception_id > MAX_EXCEPTIONS - 1:
-        return (exception_id, exceptions)
-
-    causing_exception = None
-    exception_source = None
-
-    # Add any causing exceptions, if present.
-    should_suppress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__  # type: ignore
-    # Note: __suppress_context__ is True if the exception is raised with the `from` keyword.
-    if should_suppress_context:
-        # Explicitly chained exceptions (Like: raise NewException() from OriginalException())
-        # The field `__cause__` is set to OriginalException
-        has_explicit_causing_exception = (
-            exc_value and hasattr(exc_value, "__cause__") and exc_value.__cause__ is not None
-        )
-        if has_explicit_causing_exception:
-            exception_source = "__cause__"
-            causing_exception = exc_value.__cause__  # type: ignore
-    else:
-        # Implicitly chained exceptions (when an exception occurs while handling another exception)
-        # The field `__context__` is set in the exception that occurs while handling another exception,
-        # to the other exception.
-        has_implicit_causing_exception = (
-            exc_value and hasattr(exc_value, "__context__") and exc_value.__context__ is not None
-        )
-        if has_implicit_causing_exception:
-            exception_source = "__context__"
-            causing_exception = exc_value.__context__  # type: ignore
-
-    if causing_exception:
-        # Some frameworks (e.g. FastAPI) wrap the causing exception in an
-        # ExceptionGroup that only contain one exception: the causing exception.
-        # This would lead to an infinite loop, so we skip the causing exception
-        # in this case. (because it is the same as the base_exception above)
-        if (
-            BaseExceptionGroup is not None
-            and isinstance(causing_exception, BaseExceptionGroup)
-            and len(causing_exception.exceptions) == 1
-            and causing_exception.exceptions[0] == exc_value
-        ):
-            causing_exception = None
-
-    if causing_exception:
-        (exception_id, child_exceptions) = exceptions_from_error(
-            exc_type=type(causing_exception),
-            exc_value=causing_exception,
-            tb=getattr(causing_exception, "__traceback__", None),
-            client_options=client_options,
-            mechanism=mechanism,
-            exception_id=exception_id,
-            parent_id=parent_id,
-            source=exception_source,
-            full_stack=full_stack,
-        )
-        exceptions.extend(child_exceptions)
-
-    # Add child exceptions from an ExceptionGroup.
-    is_exception_group = exc_value and hasattr(exc_value, "exceptions")
-    if is_exception_group:
-        for idx, causing_exception in enumerate(exc_value.exceptions):  # type: ignore
-            (exception_id, child_exceptions) = exceptions_from_error(
-                exc_type=type(causing_exception),
-                exc_value=causing_exception,
-                tb=getattr(causing_exception, "__traceback__", None),
-                client_options=client_options,
-                mechanism=mechanism,
-                exception_id=exception_id,
-                parent_id=parent_id,
-                source="exceptions[%s]" % idx,
-                full_stack=full_stack,
-            )
-            exceptions.extend(child_exceptions)
-
-    return (exception_id, exceptions)
-
-
-def exceptions_from_error_tuple(
-    exc_info,  # type: ExcInfo
-    client_options=None,  # type: Optional[Dict[str, Any]]
-    mechanism=None,  # type: Optional[Dict[str, Any]]
-    full_stack=None,  # type: Optional[list[dict[str, Any]]]
-):
-    # type: (...) -> List[Dict[str, Any]]
-    """
-    Convert Python's exception information into Sentry's structured "exception" format in the event.
-    See https://develop.sentry.dev/sdk/data-model/event-payloads/exception/
-    This is the entry point for the exception handling.
-    """
-    # unpack the exception info tuple
-    exc_type, exc_value, tb = exc_info
-
-    # let exceptions_from_error do the actual work
-    _, exceptions = exceptions_from_error(
-        exc_type=exc_type,
-        exc_value=exc_value,
-        tb=tb,
-        client_options=client_options,
-        mechanism=mechanism,
-        exception_id=0,
-        parent_id=0,
-        full_stack=full_stack,
-    )
-
-    # make sure the exceptions are sorted
-    # from the innermost (oldest)
-    # to the outermost (newest) exception
-    exceptions.reverse()
-
-    return exceptions
-
-
-def to_string(value):
-    # type: (str) -> str
-    try:
-        return str(value)
-    except UnicodeDecodeError:
-        return repr(value)[1:-1]
-
-
-def iter_event_stacktraces(event):
-    # type: (Event) -> Iterator[Annotated[Dict[str, Any]]]
-    if "stacktrace" in event:
-        yield event["stacktrace"]
-    if "threads" in event:
-        for thread in event["threads"].get("values") or ():
-            if "stacktrace" in thread:
-                yield thread["stacktrace"]
-    if "exception" in event:
-        for exception in event["exception"].get("values") or ():
-            if isinstance(exception, dict) and "stacktrace" in exception:
-                yield exception["stacktrace"]
-
-
-def iter_event_frames(event):
-    # type: (Event) -> Iterator[Dict[str, Any]]
-    for stacktrace in iter_event_stacktraces(event):
-        if isinstance(stacktrace, AnnotatedValue):
-            stacktrace = stacktrace.value or {}
-
-        yield from stacktrace.get("frames") or ()
-
-
-def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None):
-    # type: (Event, Optional[List[str]], Optional[List[str]], Optional[str]) -> Event
-    for stacktrace in iter_event_stacktraces(event):
-        if isinstance(stacktrace, AnnotatedValue):
-            stacktrace = stacktrace.value or {}
-
-        set_in_app_in_frames(
-            stacktrace.get("frames"),
-            in_app_exclude=in_app_exclude,
-            in_app_include=in_app_include,
-            project_root=project_root,
-        )
-
-    return event
-
-
-def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None):
-    # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any]
-    if not frames:
-        return None
-
-    for frame in frames:
-        # if frame has already been marked as in_app, skip it
-        current_in_app = frame.get("in_app")
-        if current_in_app is not None:
-            continue
-
-        module = frame.get("module")
-
-        # check if module in frame is in the list of modules to include
-        if _module_in_list(module, in_app_include):
-            frame["in_app"] = True
-            continue
-
-        # check if module in frame is in the list of modules to exclude
-        if _module_in_list(module, in_app_exclude):
-            frame["in_app"] = False
-            continue
-
-        # if frame has no abs_path, skip further checks
-        abs_path = frame.get("abs_path")
-        if abs_path is None:
-            continue
-
-        if _is_external_source(abs_path):
-            frame["in_app"] = False
-            continue
-
-        if _is_in_project_root(abs_path, project_root):
-            frame["in_app"] = True
-            continue
-
-    return frames
-
-
-def exc_info_from_error(error):
-    # type: (Union[BaseException, ExcInfo]) -> ExcInfo
-    if isinstance(error, tuple) and len(error) == 3:
-        exc_type, exc_value, tb = error
-    elif isinstance(error, BaseException):
-        tb = getattr(error, "__traceback__", None)
-        if tb is not None:
-            exc_type = type(error)
-            exc_value = error
-        else:
-            exc_type, exc_value, tb = sys.exc_info()
-            if exc_value is not error:
-                tb = None
-                exc_value = error
-                exc_type = type(error)
-
-    else:
-        raise ValueError("Expected Exception object to report, got %s!" % type(error))
-
-    exc_info = (exc_type, exc_value, tb)
-
-    if TYPE_CHECKING:
-        # This cast is safe because exc_type and exc_value are either both
-        # None or both not None.
-        exc_info = cast(ExcInfo, exc_info)
-
-    return exc_info
-
-
-def merge_stack_frames(frames, full_stack, client_options):
-    # type: (List[Dict[str, Any]], List[Dict[str, Any]], Optional[Dict[str, Any]]) -> List[Dict[str, Any]]
-    """
-    Add the missing frames from full_stack to frames and return the merged list.
-    """
-    frame_ids = {
-        (
-            frame["abs_path"],
-            frame["context_line"],
-            frame["lineno"],
-            frame["function"],
-        )
-        for frame in frames
-    }
-
-    new_frames = [
-        stackframe
-        for stackframe in full_stack
-        if (
-            stackframe["abs_path"],
-            stackframe["context_line"],
-            stackframe["lineno"],
-            stackframe["function"],
-        )
-        not in frame_ids
-    ]
-    new_frames.extend(frames)
-
-    # Limit the number of frames
-    max_stack_frames = (
-        client_options.get("max_stack_frames", DEFAULT_MAX_STACK_FRAMES) if client_options else None
-    )
-    if max_stack_frames is not None:
-        new_frames = new_frames[len(new_frames) - max_stack_frames :]
-
-    return new_frames
-
-
-def event_from_exception(
-    exc_info,  # type: Union[BaseException, ExcInfo]
-    client_options=None,  # type: Optional[Dict[str, Any]]
-    mechanism=None,  # type: Optional[Dict[str, Any]]
-):
-    # type: (...) -> Tuple[Event, Dict[str, Any]]
-    exc_info = exc_info_from_error(exc_info)
-    hint = event_hint_with_exc_info(exc_info)
-
-    if client_options and client_options.get("add_full_stack", DEFAULT_ADD_FULL_STACK):
-        full_stack = current_stacktrace(
-            include_local_variables=client_options["include_local_variables"],
-            max_value_length=client_options["max_value_length"],
-        )["frames"]
-    else:
-        full_stack = None
-
-    return (
-        {
-            "level": "error",
-            "exception": {
-                "values": exceptions_from_error_tuple(
-                    exc_info, client_options, mechanism, full_stack
-                )
-            },
-        },
-        hint,
-    )
-
-
-def _module_in_list(name, items):
-    # type: (Optional[str], Optional[List[str]]) -> bool
-    if name is None:
-        return False
-
-    if not items:
-        return False
-
-    for item in items:
-        if item == name or name.startswith(item + "."):
-            return True
-
-    return False
-
-
-def _is_external_source(abs_path):
-    # type: (Optional[str]) -> bool
-    # check if frame is in 'site-packages' or 'dist-packages'
-    if abs_path is None:
-        return False
-
-    external_source = re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None
-    return external_source
-
-
-def _is_in_project_root(abs_path, project_root):
-    # type: (Optional[str], Optional[str]) -> bool
-    if abs_path is None or project_root is None:
-        return False
-
-    # check if path is in the project root
-    if abs_path.startswith(project_root):
-        return True
-
-    return False
-
-
-def _truncate_by_bytes(string, max_bytes):
-    # type: (str, int) -> str
-    """
-    Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes.
-    """
-    truncated = string.encode("utf-8")[: max_bytes - 3].decode("utf-8", errors="ignore")
-
-    return truncated + "..."
-
-
-def _get_size_in_bytes(value):
-    # type: (str) -> Optional[int]
-    try:
-        return len(value.encode("utf-8"))
-    except (UnicodeEncodeError, UnicodeDecodeError):
-        return None
-
-
-def strip_string(value, max_length=None):
-    # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
-    if not value:
-        return value
-
-    if max_length is None:
-        max_length = DEFAULT_MAX_VALUE_LENGTH
-
-    byte_size = _get_size_in_bytes(value)
-    text_size = len(value)
-
-    if byte_size is not None and byte_size > max_length:
-        # truncate to max_length bytes, preserving code points
-        truncated_value = _truncate_by_bytes(value, max_length)
-    elif text_size is not None and text_size > max_length:
-        # fallback to truncating by string length
-        truncated_value = value[: max_length - 3] + "..."
-    else:
-        return value
-
-    return AnnotatedValue(
-        value=truncated_value,
-        metadata={
-            "len": byte_size or text_size,
-            "rem": [["!limit", "x", max_length - 3, max_length]],
-        },
-    )
-
-
-def parse_version(version):
-    # type: (str) -> Optional[Tuple[int, ...]]
-    """
-    Parses a version string into a tuple of integers.
-    This uses the parsing loging from PEP 440:
-    https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
-    """
-    VERSION_PATTERN = r"""  # noqa: N806
-        v?
-        (?:
-            (?:(?P[0-9]+)!)?                           # epoch
-            (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
-            (?P
                                          # pre-release
-                [-_\.]?
-                (?P(a|b|c|rc|alpha|beta|pre|preview))
-                [-_\.]?
-                (?P[0-9]+)?
-            )?
-            (?P                                         # post release
-                (?:-(?P[0-9]+))
-                |
-                (?:
-                    [-_\.]?
-                    (?Ppost|rev|r)
-                    [-_\.]?
-                    (?P[0-9]+)?
-                )
-            )?
-            (?P                                          # dev release
-                [-_\.]?
-                (?Pdev)
-                [-_\.]?
-                (?P[0-9]+)?
-            )?
-        )
-        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
-    """
-
-    pattern = re.compile(
-        r"^\s*" + VERSION_PATTERN + r"\s*$",
-        re.VERBOSE | re.IGNORECASE,
-    )
-
-    try:
-        release = pattern.match(version).groupdict()["release"]  # type: ignore
-        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
-    except (TypeError, ValueError, AttributeError):
-        return None
-
-    return release_tuple
-
-
-def _is_contextvars_broken():
-    # type: () -> bool
-    """
-    Returns whether gevent/eventlet have patched the stdlib in a way where thread locals are now more "correct" than contextvars.
-    """
-    try:
-        import gevent
-        from gevent.monkey import is_object_patched
-
-        # Get the MAJOR and MINOR version numbers of Gevent
-        version_tuple = tuple(
-            [int(part) for part in re.split(r"a|b|rc|\.", gevent.__version__)[:2]]
-        )
-        if is_object_patched("threading", "local"):
-            # Gevent 20.9.0 depends on Greenlet 0.4.17 which natively handles switching
-            # context vars when greenlets are switched, so, Gevent 20.9.0+ is all fine.
-            # Ref: https://github.com/gevent/gevent/blob/83c9e2ae5b0834b8f84233760aabe82c3ba065b4/src/gevent/monkey.py#L604-L609
-            # Gevent 20.5, that doesn't depend on Greenlet 0.4.17 with native support
-            # for contextvars, is able to patch both thread locals and contextvars, in
-            # that case, check if contextvars are effectively patched.
-            if (
-                # Gevent 20.9.0+
-                (sys.version_info >= (3, 7) and version_tuple >= (20, 9))
-                # Gevent 20.5.0+ or Python < 3.7
-                or (is_object_patched("contextvars", "ContextVar"))
-            ):
-                return False
-
-            return True
-    except ImportError:
-        pass
-
-    try:
-        import greenlet
-        from eventlet.patcher import is_monkey_patched  # type: ignore
-
-        greenlet_version = parse_version(greenlet.__version__)
-
-        if greenlet_version is None:
-            logger.error(
-                "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
-            )
-            return False
-
-        if is_monkey_patched("thread") and greenlet_version < (0, 5):
-            return True
-    except ImportError:
-        pass
-
-    return False
-
-
-def _make_threadlocal_contextvars(local):
-    # type: (type) -> type
-    class ContextVar:
-        # Super-limited impl of ContextVar
-
-        def __init__(self, name, default=None):
-            # type: (str, Any) -> None
-            self._name = name
-            self._default = default
-            self._local = local()
-            self._original_local = local()
-
-        def get(self, default=None):
-            # type: (Any) -> Any
-            return getattr(self._local, "value", default or self._default)
-
-        def set(self, value):
-            # type: (Any) -> Any
-            token = str(random.getrandbits(64))
-            original_value = self.get()
-            setattr(self._original_local, token, original_value)
-            self._local.value = value
-            return token
-
-        def reset(self, token):
-            # type: (Any) -> None
-            self._local.value = getattr(self._original_local, token)
-            # delete the original value (this way it works in Python 3.6+)
-            del self._original_local.__dict__[token]
-
-    return ContextVar
-
-
-def _get_contextvars():
-    # type: () -> Tuple[bool, type]
-    """
-    Figure out the "right" contextvars installation to use. Returns a
-    `contextvars.ContextVar`-like class with a limited API.
-
-    See https://docs.sentry.io/platforms/python/contextvars/ for more information.
-    """
-    if not _is_contextvars_broken():
-        # On Python 3.7+ contextvars are functional.
-        try:
-            from contextvars import ContextVar
-
-            return True, ContextVar
-        except ImportError:
-            pass
-
-    # Fall back to basic thread-local usage.
-
-    from threading import local
-
-    return False, _make_threadlocal_contextvars(local)
-
-
-HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
-
-CONTEXTVARS_ERROR_MESSAGE = """
-
-With asyncio/ASGI applications, the Sentry SDK requires a functional
-installation of `contextvars` to avoid leaking scope/context data across
-requests.
-
-Please refer to https://docs.sentry.io/platforms/python/contextvars/ for more information.
-"""
-
-
-def qualname_from_function(func):
-    # type: (Callable[..., Any]) -> Optional[str]
-    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
-    func_qualname = None  # type: Optional[str]
-
-    # Python 2
-    try:
-        return "{}.{}.{}".format(
-            func.im_class.__module__,  # type: ignore
-            func.im_class.__name__,  # type: ignore
-            func.__name__,
-        )
-    except Exception:
-        pass
-
-    prefix, suffix = "", ""
-
-    if isinstance(func, partial) and hasattr(func.func, "__name__"):
-        prefix, suffix = "partial()"
-        func = func.func
-    else:
-        # The _partialmethod attribute of methods wrapped with partialmethod() was renamed to __partialmethod__ in CPython 3.13:
-        # https://github.com/python/cpython/pull/16600
-        partial_method = getattr(func, "_partialmethod", None) or getattr(
-            func, "__partialmethod__", None
-        )
-        if isinstance(partial_method, partialmethod):
-            prefix, suffix = "partialmethod()"
-            func = partial_method.func
-
-    if hasattr(func, "__qualname__"):
-        func_qualname = func.__qualname__
-    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
-        func_qualname = func.__name__
-
-    # Python 3: methods, functions, classes
-    if func_qualname is not None:
-        if hasattr(func, "__module__") and isinstance(func.__module__, str):
-            func_qualname = func.__module__ + "." + func_qualname
-        func_qualname = prefix + func_qualname + suffix
-
-    return func_qualname
-
-
-def transaction_from_function(func):
-    # type: (Callable[..., Any]) -> Optional[str]
-    return qualname_from_function(func)
-
-
-disable_capture_event = ContextVar("disable_capture_event")
-
-
-class ServerlessTimeoutWarning(Exception):  # noqa: N818
-    """Raised when a serverless method is about to reach its timeout."""
-
-    pass
-
-
-class TimeoutThread(threading.Thread):
-    """Creates a Thread which runs (sleeps) for a time duration equal to
-    waiting_time and raises a custom ServerlessTimeout exception.
-    """
-
-    def __init__(self, waiting_time, configured_timeout):
-        # type: (float, int) -> None
-        threading.Thread.__init__(self)
-        self.waiting_time = waiting_time
-        self.configured_timeout = configured_timeout
-        self._stop_event = threading.Event()
-
-    def stop(self):
-        # type: () -> None
-        self._stop_event.set()
-
-    def run(self):
-        # type: () -> None
-
-        self._stop_event.wait(self.waiting_time)
-
-        if self._stop_event.is_set():
-            return
-
-        integer_configured_timeout = int(self.configured_timeout)
-
-        # Setting up the exact integer value of configured time(in seconds)
-        if integer_configured_timeout < self.configured_timeout:
-            integer_configured_timeout = integer_configured_timeout + 1
-
-        # Raising Exception after timeout duration is reached
-        raise ServerlessTimeoutWarning(
-            "WARNING : Function is expected to get timed out. Configured timeout duration = {} seconds.".format(
-                integer_configured_timeout
-            )
-        )
-
-
-def to_base64(original):
-    # type: (str) -> Optional[str]
-    """
-    Convert a string to base64, via UTF-8. Returns None on invalid input.
-    """
-    base64_string = None
-
-    try:
-        utf8_bytes = original.encode("UTF-8")
-        base64_bytes = base64.b64encode(utf8_bytes)
-        base64_string = base64_bytes.decode("UTF-8")
-    except Exception as err:
-        logger.warning(f"Unable to encode {original} to base64:", err)
-
-    return base64_string
-
-
-def from_base64(base64_string):
-    # type: (str) -> Optional[str]
-    """
-    Convert a string from base64, via UTF-8. Returns None on invalid input.
-    """
-    utf8_string = None
-
-    try:
-        only_valid_chars = BASE64_ALPHABET.match(base64_string)
-        assert only_valid_chars
-
-        base64_bytes = base64_string.encode("UTF-8")
-        utf8_bytes = base64.b64decode(base64_bytes)
-        utf8_string = utf8_bytes.decode("UTF-8")
-    except Exception as err:
-        logger.warning(f"Unable to decode {base64_string} from base64:", err)
-
-    return utf8_string
-
-
-Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])
-
-
-def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
-    # type: (str, bool, bool, bool) -> Union[str, Components]
-    """
-    Removes the authority and query parameter values from a given URL.
-    """
-    parsed_url = urlsplit(url)
-    query_params = parse_qs(parsed_url.query, keep_blank_values=True)
-
-    # strip username:password (netloc can be usr:pwd@example.com)
-    if remove_authority:
-        netloc_parts = parsed_url.netloc.split("@")
-        if len(netloc_parts) > 1:
-            netloc = "{}:{}@{}".format(
-                SENSITIVE_DATA_SUBSTITUTE,
-                SENSITIVE_DATA_SUBSTITUTE,
-                netloc_parts[-1],
-            )
-        else:
-            netloc = parsed_url.netloc
-    else:
-        netloc = parsed_url.netloc
-
-    # strip values from query string
-    if remove_query_values:
-        query_string = unquote(urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params}))
-    else:
-        query_string = parsed_url.query
-
-    components = Components(
-        scheme=parsed_url.scheme,
-        netloc=netloc,
-        query=query_string,
-        path=parsed_url.path,
-        fragment=parsed_url.fragment,
-    )
-
-    if split:
-        return components
-    else:
-        return urlunsplit(components)
-
-
-ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])
-
-
-def parse_url(url, sanitize=True):
-    # type: (str, bool) -> ParsedUrl
-    """
-    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
-    parameters will be sanitized to remove sensitive data. The autority (username and password)
-    in the URL will always be removed.
-    """
-    parsed_url = sanitize_url(url, remove_authority=True, remove_query_values=sanitize, split=True)
-
-    base_url = urlunsplit(
-        Components(
-            scheme=parsed_url.scheme,  # type: ignore
-            netloc=parsed_url.netloc,  # type: ignore
-            query="",
-            path=parsed_url.path,  # type: ignore
-            fragment="",
-        )
-    )
-
-    return ParsedUrl(
-        url=base_url,
-        query=parsed_url.query,  # type: ignore
-        fragment=parsed_url.fragment,  # type: ignore
-    )
-
-
-def is_valid_sample_rate(rate, source):
-    # type: (Any, str) -> bool
-    """
-    Checks the given sample rate to make sure it is valid type and value (a
-    boolean or a number between 0 and 1, inclusive).
-    """
-
-    # both booleans and NaN are instances of Real, so a) checking for Real
-    # checks for the possibility of a boolean also, and b) we have to check
-    # separately for NaN and Decimal does not derive from Real so need to check that too
-    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
-        logger.warning(
-            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
-                source=source, rate=rate, type=type(rate)
-            )
-        )
-        return False
-
-    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
-    rate = float(rate)
-    if rate < 0 or rate > 1:
-        logger.warning(
-            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
-                source=source, rate=rate
-            )
-        )
-        return False
-
-    return True
-
-
-def match_regex_list(item, regex_list=None, substring_matching=False):
-    # type: (str, Optional[List[str]], bool) -> bool
-    if regex_list is None:
-        return False
-
-    for item_matcher in regex_list:
-        if not substring_matching and item_matcher[-1] != "$":
-            item_matcher += "$"
-
-        matched = re.search(item_matcher, item)
-        if matched:
-            return True
-
-    return False
-
-
-def is_sentry_url(client, url):
-    # type: (sentry_sdk.client.BaseClient, str) -> bool
-    """
-    Determines whether the given URL matches the Sentry DSN.
-    """
-    return (
-        client is not None
-        and client.transport is not None
-        and client.transport.parsed_dsn is not None
-        and client.transport.parsed_dsn.netloc in url
-    )
-
-
-def _generate_installed_modules():
-    # type: () -> Iterator[Tuple[str, str]]
-    try:
-        from importlib import metadata
-
-        yielded = set()
-        for dist in metadata.distributions():
-            name = dist.metadata.get("Name", None)  # type: ignore[attr-defined]
-            # `metadata` values may be `None`, see:
-            # https://github.com/python/cpython/issues/91216
-            # and
-            # https://github.com/python/importlib_metadata/issues/371
-            if name is not None:
-                normalized_name = _normalize_module_name(name)
-                if dist.version is not None and normalized_name not in yielded:
-                    yield normalized_name, dist.version
-                    yielded.add(normalized_name)
-
-    except ImportError:
-        # < py3.8
-        try:
-            import pkg_resources
-        except ImportError:
-            return
-
-        for info in pkg_resources.working_set:
-            yield _normalize_module_name(info.key), info.version
-
-
-def _normalize_module_name(name):
-    # type: (str) -> str
-    return name.lower()
-
-
-def _get_installed_modules():
-    # type: () -> Dict[str, str]
-    global _installed_modules
-    if _installed_modules is None:
-        _installed_modules = dict(_generate_installed_modules())
-    return _installed_modules
-
-
-def package_version(package):
-    # type: (str) -> Optional[Tuple[int, ...]]
-    installed_packages = _get_installed_modules()
-    version = installed_packages.get(package)
-    if version is None:
-        return None
-
-    return parse_version(version)
-
-
-def reraise(tp, value, tb=None):
-    # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn
-    assert value is not None
-    if value.__traceback__ is not tb:
-        raise value.with_traceback(tb)
-    raise value
-
-
-def _no_op(*_a, **_k):
-    # type: (*Any, **Any) -> None
-    """No-op function for ensure_integration_enabled."""
-    pass
-
-
-if TYPE_CHECKING:
-
-    @overload
-    def ensure_integration_enabled(
-        integration,  # type: type[sentry_sdk.integrations.Integration]
-        original_function,  # type: Callable[P, R]
-    ):
-        # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
-        ...
-
-    @overload
-    def ensure_integration_enabled(
-        integration,  # type: type[sentry_sdk.integrations.Integration]
-    ):
-        # type: (...) -> Callable[[Callable[P, None]], Callable[P, None]]
-        ...
-
-
-def ensure_integration_enabled(
-    integration,  # type: type[sentry_sdk.integrations.Integration]
-    original_function=_no_op,  # type: Union[Callable[P, R], Callable[P, None]]
-):
-    # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
-    """
-    Ensures a given integration is enabled prior to calling a Sentry-patched function.
-
-    The function takes as its parameters the integration that must be enabled and the original
-    function that the SDK is patching. The function returns a function that takes the
-    decorated (Sentry-patched) function as its parameter, and returns a function that, when
-    called, checks whether the given integration is enabled. If the integration is enabled, the
-    function calls the decorated, Sentry-patched function. If the integration is not enabled,
-    the original function is called.
-
-    The function also takes care of preserving the original function's signature and docstring.
-
-    Example usage:
-
-    ```python
-    @ensure_integration_enabled(MyIntegration, my_function)
-    def patch_my_function():
-        with sentry_sdk.start_span(...):
-            return my_function()
-    ```
-    """
-    if TYPE_CHECKING:
-        # Type hint to ensure the default function has the right typing. The overloads
-        # ensure the default _no_op function is only used when R is None.
-        original_function = cast(Callable[P, R], original_function)
-
-    def patcher(sentry_patched_function):
-        # type: (Callable[P, R]) -> Callable[P, R]
-        def runner(*args: "P.args", **kwargs: "P.kwargs"):
-            # type: (...) -> R
-            if sentry_sdk_alpha.get_client().get_integration(integration) is None:
-                return original_function(*args, **kwargs)
-
-            return sentry_patched_function(*args, **kwargs)
-
-        if original_function is _no_op:
-            return wraps(sentry_patched_function)(runner)
-
-        return wraps(original_function)(runner)
-
-    return patcher
-
-
-def now():
-    # type: () -> float
-    return time.perf_counter()
-
-
-try:
-    from gevent import get_hub as get_gevent_hub
-    from gevent.monkey import is_module_patched
-except ImportError:
-
-    # it's not great that the signatures are different, get_hub can't return None
-    # consider adding an if TYPE_CHECKING to change the signature to Optional[GeventHub]
-    def get_gevent_hub():  # type: ignore[misc]
-        # type: () -> Optional[GeventHub]
-        return None
-
-    def is_module_patched(mod_name):
-        # type: (str) -> bool
-        # unable to import from gevent means no modules have been patched
-        return False
-
-
-def is_gevent():
-    # type: () -> bool
-    return is_module_patched("threading") or is_module_patched("_thread")
-
-
-def get_current_thread_meta(thread=None):
-    # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]]
-    """
-    Try to get the id of the current thread, with various fall backs.
-    """
-
-    # if a thread is specified, that takes priority
-    if thread is not None:
-        try:
-            thread_id = thread.ident
-            thread_name = thread.name
-            if thread_id is not None:
-                return thread_id, thread_name
-        except AttributeError:
-            pass
-
-    # if the app is using gevent, we should look at the gevent hub first
-    # as the id there differs from what the threading module reports
-    if is_gevent():
-        gevent_hub = get_gevent_hub()
-        if gevent_hub is not None:
-            try:
-                # this is undocumented, so wrap it in try except to be safe
-                return gevent_hub.thread_ident, None
-            except AttributeError:
-                pass
-
-    # use the current thread's id if possible
-    try:
-        thread = threading.current_thread()
-        thread_id = thread.ident
-        thread_name = thread.name
-        if thread_id is not None:
-            return thread_id, thread_name
-    except AttributeError:
-        pass
-
-    # if we can't get the current thread id, fall back to the main thread id
-    try:
-        thread = threading.main_thread()
-        thread_id = thread.ident
-        thread_name = thread.name
-        if thread_id is not None:
-            return thread_id, thread_name
-    except AttributeError:
-        pass
-
-    # we've tried everything, time to give up
-    return None, None
-
-
-def _serialize_span_attribute(value):
-    # type: (Any) -> Optional[AttributeValue]
-    """Serialize an object so that it's OTel-compatible and displays nicely in Sentry."""
-    # check for allowed primitives
-    if isinstance(value, (int, str, float, bool)):
-        return value
-
-    # lists are allowed too, as long as they don't mix types
-    if isinstance(value, (list, tuple)):
-        for type_ in (int, str, float, bool):
-            if all(isinstance(item, type_) for item in value):
-                return list(value)
-
-    # if this is anything else, just try to coerce to string
-    # we prefer json.dumps since this makes things like dictionaries display
-    # nicely in the UI
-    try:
-        return json.dumps(value)
-    except TypeError:
-        try:
-            return str(value)
-        except Exception:
-            return None
-
-
-ISO_TZ_SEPARATORS = frozenset(("+", "-"))
-
-
-def datetime_from_isoformat(value):
-    # type: (str) -> datetime
-    try:
-        result = datetime.fromisoformat(value)
-    except (AttributeError, ValueError):
-        # py 3.6
-        timestamp_format = "%Y-%m-%dT%H:%M:%S.%f" if "." in value else "%Y-%m-%dT%H:%M:%S"
-        if value.endswith("Z"):
-            value = value[:-1] + "+0000"
-
-        if value[-6] in ISO_TZ_SEPARATORS:
-            timestamp_format += "%z"
-            value = value[:-3] + value[-2:]
-        elif value[-5] in ISO_TZ_SEPARATORS:
-            timestamp_format += "%z"
-
-        result = datetime.strptime(value, timestamp_format)
-    return result.astimezone(timezone.utc)
-
-
-def should_be_treated_as_error(ty, value):
-    # type: (Any, Any) -> bool
-    if ty == SystemExit and hasattr(value, "code") and value.code in (0, None):
-        # https://docs.python.org/3/library/exceptions.html#SystemExit
-        return False
-
-    return True
-
-
-def http_client_status_to_breadcrumb_level(status_code):
-    # type: (Optional[int]) -> str
-    if status_code is not None:
-        if 500 <= status_code <= 599:
-            return "error"
-        elif 400 <= status_code <= 499:
-            return "warning"
-
-    return "info"
-
-
-def set_thread_info_from_span(data, span):
-    # type: (Dict[str, Any], sentry_sdk.tracing.Span) -> None
-    if span.get_attribute(SPANDATA.THREAD_ID) is not None:
-        data[SPANDATA.THREAD_ID] = span.get_attribute(SPANDATA.THREAD_ID)
-        if span.get_attribute(SPANDATA.THREAD_NAME) is not None:
-            data[SPANDATA.THREAD_NAME] = span.get_attribute(SPANDATA.THREAD_NAME)
diff --git a/src/sentry_sdk_alpha/worker.py b/src/sentry_sdk_alpha/worker.py
deleted file mode 100644
index 09828bdefb2178..00000000000000
--- a/src/sentry_sdk_alpha/worker.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import os
-import threading
-from time import sleep, time
-from typing import TYPE_CHECKING
-
-from sentry_sdk_alpha._queue import FullError, Queue
-from sentry_sdk_alpha.consts import DEFAULT_QUEUE_SIZE
-from sentry_sdk_alpha.utils import logger
-
-if TYPE_CHECKING:
-    from collections.abc import Callable
-    from typing import Any, Optional
-
-
-_TERMINATOR = object()
-
-
-class BackgroundWorker:
-    def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
-        # type: (int) -> None
-        self._queue = Queue(queue_size)  # type: Queue
-        self._lock = threading.Lock()
-        self._thread = None  # type: Optional[threading.Thread]
-        self._thread_for_pid = None  # type: Optional[int]
-
-    @property
-    def is_alive(self):
-        # type: () -> bool
-        if self._thread_for_pid != os.getpid():
-            return False
-        if not self._thread:
-            return False
-        return self._thread.is_alive()
-
-    def _ensure_thread(self):
-        # type: () -> None
-        if not self.is_alive:
-            self.start()
-
-    def _timed_queue_join(self, timeout):
-        # type: (float) -> bool
-        deadline = time() + timeout
-        queue = self._queue
-
-        queue.all_tasks_done.acquire()
-
-        try:
-            while queue.unfinished_tasks:
-                delay = deadline - time()
-                if delay <= 0:
-                    return False
-                queue.all_tasks_done.wait(timeout=delay)
-
-            return True
-        finally:
-            queue.all_tasks_done.release()
-
-    def start(self):
-        # type: () -> None
-        with self._lock:
-            if not self.is_alive:
-                self._thread = threading.Thread(
-                    target=self._target, name="sentry-sdk.BackgroundWorker"
-                )
-                self._thread.daemon = True
-                try:
-                    self._thread.start()
-                    self._thread_for_pid = os.getpid()
-                except RuntimeError:
-                    # At this point we can no longer start because the interpreter
-                    # is already shutting down.  Sadly at this point we can no longer
-                    # send out events.
-                    self._thread = None
-
-    def kill(self):
-        # type: () -> None
-        """
-        Kill worker thread. Returns immediately. Not useful for
-        waiting on shutdown for events, use `flush` for that.
-        """
-        logger.debug("background worker got kill request")
-        with self._lock:
-            if self._thread:
-                try:
-                    self._queue.put_nowait(_TERMINATOR)
-                except FullError:
-                    logger.debug("background worker queue full, kill failed")
-
-                self._thread = None
-                self._thread_for_pid = None
-
-    def flush(self, timeout, callback=None):
-        # type: (float, Optional[Any]) -> None
-        logger.debug("background worker got flush request")
-        with self._lock:
-            if self.is_alive and timeout > 0.0:
-                self._wait_flush(timeout, callback)
-        logger.debug("background worker flushed")
-
-    def full(self):
-        # type: () -> bool
-        return self._queue.full()
-
-    def _wait_flush(self, timeout, callback):
-        # type: (float, Optional[Any]) -> None
-        initial_timeout = min(0.1, timeout)
-        if not self._timed_queue_join(initial_timeout):
-            pending = self._queue.qsize() + 1
-            logger.debug("%d event(s) pending on flush", pending)
-            if callback is not None:
-                callback(pending, timeout)
-
-            if not self._timed_queue_join(timeout - initial_timeout):
-                pending = self._queue.qsize() + 1
-                logger.error("flush timed out, dropped %s events", pending)
-
-    def submit(self, callback):
-        # type: (Callable[[], None]) -> bool
-        self._ensure_thread()
-        try:
-            self._queue.put_nowait(callback)
-            return True
-        except FullError:
-            return False
-
-    def _target(self):
-        # type: () -> None
-        while True:
-            callback = self._queue.get()
-            try:
-                if callback is _TERMINATOR:
-                    break
-                try:
-                    callback()
-                except Exception:
-                    logger.error("Failed processing job", exc_info=True)
-            finally:
-                self._queue.task_done()
-            sleep(0)

From 895dc0765ce618a9ff2ec655a3d02f13ea4ec583 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 12:33:59 +0200
Subject: [PATCH 14/22] new dummy alpha package from pypi

---
 requirements-dev-frozen.txt | 1 +
 requirements-frozen.txt     | 1 +
 2 files changed, 2 insertions(+)

diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index e0ee3093baa450..4fdf39514c6586 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -193,6 +193,7 @@ sentry-protos==0.2.0
 sentry-redis-tools==0.5.0
 sentry-relay==0.9.9
 sentry-sdk==2.27.0
+anton-testing-deleteme-123==3.0.0a1
 sentry-usage-accountant==0.0.10
 simplejson==3.17.6
 six==1.16.0
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 16bc157de5f8a0..78e72e86b341cc 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -131,6 +131,7 @@ sentry-protos==0.2.0
 sentry-redis-tools==0.5.0
 sentry-relay==0.9.9
 sentry-sdk==2.27.0
+anton-testing-deleteme-123==3.0.0a1
 sentry-usage-accountant==0.0.10
 simplejson==3.17.6
 six==1.16.0

From 85b17020b9d3268a65fd42d7ed624adeb16cfc2d Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 14:18:39 +0200
Subject: [PATCH 15/22] move import to call it later

---
 src/sentry/runner/commands/devserver.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py
index 5f5f23427df00a..b7b35caf9ab8d2 100644
--- a/src/sentry/runner/commands/devserver.py
+++ b/src/sentry/runner/commands/devserver.py
@@ -7,7 +7,6 @@
 from typing import NoReturn
 
 import click
-import sentry_sdk
 
 from sentry.runner.commands.devservices import get_docker_client
 from sentry.runner.decorators import configuration, log_options
@@ -174,6 +173,8 @@ def devserver(
     taskworker_scheduler: bool,
 ) -> NoReturn:
     "Starts a lightweight web server for development."
+    import sentry_sdk
+
     sentry_sdk.init(
         dsn=os.environ.get("SENTRY_DEVSERVICES_DSN", ""),
         traces_sample_rate=1.0,

From 7ed319d3b7e89277c2ef650d8734a5ffaab9515c Mon Sep 17 00:00:00 2001
From: "getsantry[bot]" <66042841+getsantry[bot]@users.noreply.github.com>
Date: Mon, 19 May 2025 12:19:53 +0000
Subject: [PATCH 16/22] :snowflake: re-freeze requirements

---
 requirements-dev-frozen.txt | 1 -
 requirements-frozen.txt     | 1 -
 2 files changed, 2 deletions(-)

diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index 4fdf39514c6586..e0ee3093baa450 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -193,7 +193,6 @@ sentry-protos==0.2.0
 sentry-redis-tools==0.5.0
 sentry-relay==0.9.9
 sentry-sdk==2.27.0
-anton-testing-deleteme-123==3.0.0a1
 sentry-usage-accountant==0.0.10
 simplejson==3.17.6
 six==1.16.0
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 78e72e86b341cc..16bc157de5f8a0 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -131,7 +131,6 @@ sentry-protos==0.2.0
 sentry-redis-tools==0.5.0
 sentry-relay==0.9.9
 sentry-sdk==2.27.0
-anton-testing-deleteme-123==3.0.0a1
 sentry-usage-accountant==0.0.10
 simplejson==3.17.6
 six==1.16.0

From f4a847b84158fbe586caee1814c2d6f2301c3a9c Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 16:14:53 +0200
Subject: [PATCH 17/22] Doing the import redirect earlier

---
 src/sentry/runner/initializer.py | 45 ++++++++++++++++++++++++++++++++
 src/sentry/utils/sdk.py          | 42 +++--------------------------
 2 files changed, 48 insertions(+), 39 deletions(-)

diff --git a/src/sentry/runner/initializer.py b/src/sentry/runner/initializer.py
index 2e5191fe35cd09..2f2e0ebf940b12 100644
--- a/src/sentry/runner/initializer.py
+++ b/src/sentry/runner/initializer.py
@@ -9,6 +9,7 @@
 import click
 from django.conf import settings
 
+from sentry.options.rollout import in_random_rollout
 from sentry.silo.patches.silo_aware_transaction_patch import patch_silo_aware_atomic
 from sentry.utils import warnings
 from sentry.utils.sdk import configure_sdk
@@ -373,6 +374,8 @@ def initialize_app(config: dict[str, Any], skip_service_validation: bool = False
 
     bind_cache_to_option_store()
 
+    redirect_imports_for_sentry_sdk_alpha()
+
     register_plugins(settings)
 
     initialize_receivers()
@@ -701,3 +704,45 @@ def import_grouptype() -> None:
     from sentry.issues.grouptype import import_grouptype
 
     import_grouptype()
+
+
+def redirect_imports_for_sentry_sdk_alpha():
+    """
+    Patch the Python import system to redirect imports of sentry_sdk to sentry_sdk_alpha based on a Sentry option.
+    This allows us to gradually roll out the alpha version of the SDK to a subset of users.
+    """
+
+    class ImportRedirector(importlib.abc.MetaPathFinder, importlib.abc.Loader):
+        def __init__(self, original_module, target_module):
+            self.original_module = original_module
+            self.target_module = target_module
+
+        def find_spec(self, fullname, path, target=None):
+            if fullname == self.original_module:
+                # Create a spec for the target module
+                spec = importlib.machinery.ModuleSpec(
+                    fullname,
+                    self,
+                    origin=f"redirected from {self.original_module} to {self.target_module}",
+                )
+                return spec
+            return None
+
+        def create_module(self, spec):
+            return importlib.import_module(self.target_module)
+
+        def exec_module(self, module):
+            pass
+
+    def redirect_import(original_module, target_module):
+        redirector = ImportRedirector(original_module, target_module)
+        sys.meta_path.insert(0, redirector)
+        # TODO: Not sure the original module should be deleted....
+        # iterating over a copy to be able to delete from the original
+        for cached_module in sys.modules.copy():
+            if cached_module.startswith(original_module):
+                # cleaning up cache if the module is already imported
+                del sys.modules[cached_module]
+
+    if in_random_rollout("sentry-sdk.use-python-sdk-alpha"):
+        redirect_import("sentry_sdk", "sentry_sdk_alpha")
diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index fc7a1eb6ab6560..e98b0f5a932f87 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -1,7 +1,6 @@
 from __future__ import annotations
 
 import copy
-import importlib.machinery
 import logging
 import sys
 from collections.abc import Generator, Mapping, Sequence, Sized
@@ -311,6 +310,8 @@ def configure_sdk():
     """
     Setup and initialize the Sentry SDK.
     """
+    import sentry_sdk
+
     sdk_options, dsns = _get_sdk_options()
     if settings.SPOTLIGHT:
         sdk_options["spotlight"] = (
@@ -486,48 +487,11 @@ def flush(
             LoggingIntegration(event_level=None, sentry_logs_level=logging.INFO),
             RustInfoIntegration(),
             RedisIntegration(),
-            ThreadingIntegration(propagate_hub=True),
+            ThreadingIntegration(),
         ],
         **sdk_options,
     )
 
-    # monkey patch sentry
-    class ImportRedirector(importlib.abc.MetaPathFinder, importlib.abc.Loader):
-        def __init__(self, original_module, target_module):
-            self.original_module = original_module
-            self.target_module = target_module
-
-        def find_spec(self, fullname, path, target=None):
-            if fullname == self.original_module:
-                # Create a spec for the target module
-                spec = importlib.machinery.ModuleSpec(
-                    fullname,
-                    self,
-                    origin=f"redirected from {self.original_module} to {self.target_module}",
-                )
-                return spec
-            return None
-
-        def create_module(self, spec):
-            return importlib.import_module(self.target_module)
-
-        def exec_module(self, module):
-            pass
-
-    def redirect_import(original_module, target_module):
-        redirector = ImportRedirector(original_module, target_module)
-        sys.meta_path.insert(0, redirector)
-        # TODO: Not sure the original module should be deleted....
-        # iterating over a copy to be able to delete from the original
-        for cached_module in sys.modules.copy():
-            if cached_module.startswith(original_module):
-                # cleaning up cache if the module is already imported
-                del sys.modules[cached_module]
-
-    # monkey patch to anything but sentry_sdk
-    if in_random_rollout("sentry-sdk.use-python-sdk-alpha") or True:
-        redirect_import("sentry_sdk", "sentry_sdk_alpha")
-
 
 def check_tag_for_scope_bleed(
     tag_key: str, expected_value: str | int, add_to_scope: bool = True

From 7dfa36572292aa0e49ec472ad26bcd661fc4db44 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 16:36:45 +0200
Subject: [PATCH 18/22] updated deps

---
 requirements-base.txt       | 1 +
 requirements-dev-frozen.txt | 7 +++++++
 requirements-dev.txt        | 1 +
 requirements-frozen.txt     | 8 ++++++++
 4 files changed, 17 insertions(+)

diff --git a/requirements-base.txt b/requirements-base.txt
index 519060224af0a7..143e22ca48efee 100644
--- a/requirements-base.txt
+++ b/requirements-base.txt
@@ -1,5 +1,6 @@
 --index-url https://pypi.devinfra.sentry.io/simple
 
+anton-testing-deleteme-123==3.0.0a1
 beautifulsoup4>=4.7.1
 boto3>=1.34.128
 botocore>=1.34.8
diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt
index e0ee3093baa450..50c55683a9bec5 100644
--- a/requirements-dev-frozen.txt
+++ b/requirements-dev-frozen.txt
@@ -7,6 +7,7 @@
 --index-url https://pypi.devinfra.sentry.io/simple
 
 amqp==5.3.1
+anton-testing-deleteme-123==3.0.0a1
 anyio==3.7.1
 asgiref==3.8.1
 attrs==24.2.0
@@ -37,6 +38,7 @@ cryptography==44.0.1
 cssselect==1.0.3
 cssutils==2.9.0
 datadog==0.49.1
+deprecated==1.2.18
 devservices==1.1.5
 distlib==0.3.8
 distro==1.8.0
@@ -82,6 +84,7 @@ httpx==0.25.2
 hyperframe==6.1.0
 identify==2.6.1
 idna==3.7
+importlib-metadata==8.6.1
 inflection==0.5.1
 iniconfig==1.1.1
 iso3166==2.1.1
@@ -114,6 +117,9 @@ openapi-core==0.18.2
 openapi-pydantic==0.4.0
 openapi-schema-validator==0.6.2
 openapi-spec-validator==0.7.1
+opentelemetry-api==1.33.1
+opentelemetry-sdk==1.33.1
+opentelemetry-semantic-conventions==0.54b1
 orjson==3.10.10
 outcome==1.2.0
 packaging==24.1
@@ -248,6 +254,7 @@ wheel==0.38.4
 wrapt==1.17.0
 wsproto==1.1.0
 xmlsec==1.3.14
+zipp==3.21.0
 zstandard==0.18.0
 
 # The following packages are considered to be unsafe in a requirements file:
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 81540aa1a9fef1..6629d395e65456 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,5 +1,6 @@
 --index-url https://pypi.devinfra.sentry.io/simple
 
+anton-testing-deleteme-123==3.0.0a1
 sentry-devenv>=1.20
 devservices>=1.1.5
 
diff --git a/requirements-frozen.txt b/requirements-frozen.txt
index 16bc157de5f8a0..dd9f367852d07f 100644
--- a/requirements-frozen.txt
+++ b/requirements-frozen.txt
@@ -7,6 +7,7 @@
 --index-url https://pypi.devinfra.sentry.io/simple
 
 amqp==5.3.1
+anton-testing-deleteme-123==3.0.0a1
 anyio==3.7.1
 asgiref==3.8.1
 attrs==24.2.0
@@ -32,6 +33,7 @@ cryptography==44.0.1
 cssselect==1.0.3
 cssutils==2.9.0
 datadog==0.49.1
+deprecated==1.2.18
 distro==1.8.0
 django==5.2.1
 django-crispy-forms==1.14.0
@@ -67,6 +69,7 @@ httpcore==1.0.2
 httpx==0.25.2
 hyperframe==6.1.0
 idna==3.7
+importlib-metadata==8.6.1
 inflection==0.5.1
 iso3166==2.1.1
 isodate==0.6.1
@@ -82,6 +85,9 @@ mmh3==4.0.0
 msgpack==1.1.0
 oauthlib==3.1.0
 openai==1.3.5
+opentelemetry-api==1.33.1
+opentelemetry-sdk==1.33.1
+opentelemetry-semantic-conventions==0.54b1
 orjson==3.10.10
 packaging==24.1
 parsimonious==0.10.0
@@ -156,7 +162,9 @@ urllib3==2.2.2
 vine==5.1.0
 vroomrs==0.1.4
 wcwidth==0.2.10
+wrapt==1.17.2
 xmlsec==1.3.14
+zipp==3.21.0
 zstandard==0.18.0
 
 # The following packages are considered to be unsafe in a requirements file:

From dd022bf59dce9055c16e3c6b670fb0110c2c9326 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Mon, 19 May 2025 16:49:30 +0200
Subject: [PATCH 19/22] cleanup

---
 src/sentry/api/endpoints/warmup.py | 3 ---
 src/sentry/utils/sdk.py            | 5 +++--
 2 files changed, 3 insertions(+), 5 deletions(-)

diff --git a/src/sentry/api/endpoints/warmup.py b/src/sentry/api/endpoints/warmup.py
index 4f417cabdf9390..0a86017d18cdd5 100644
--- a/src/sentry/api/endpoints/warmup.py
+++ b/src/sentry/api/endpoints/warmup.py
@@ -15,9 +15,6 @@
 
 logger = logging.getLogger(__name__)
 
-import sentry_sdk
-from sentry_sdk.consts import VERSION as SDK_VERSION
-
 
 @all_silo_endpoint
 class WarmupEndpoint(Endpoint):
diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py
index e98b0f5a932f87..3d49d5b97a8419 100644
--- a/src/sentry/utils/sdk.py
+++ b/src/sentry/utils/sdk.py
@@ -7,11 +7,12 @@
 from types import FrameType
 from typing import TYPE_CHECKING, Any, NamedTuple
 
-# Reexport sentry_sdk just in case we ever have to write another shim like we
-# did for raven
 import sentry_sdk
 from django.conf import settings
 from rest_framework.request import Request
+
+# Reexport sentry_sdk just in case we ever have to write another shim like we
+# did for raven
 from sentry_sdk import Scope, capture_exception, capture_message, isolation_scope
 from sentry_sdk._types import AnnotatedValue
 from sentry_sdk.client import get_options

From 4bfb2a2dab7b6adf4f316f2fcf0233470da56b9f Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Tue, 20 May 2025 09:50:56 +0200
Subject: [PATCH 20/22] fixed requirements

---
 requirements-dev.txt | 1 -
 1 file changed, 1 deletion(-)

diff --git a/requirements-dev.txt b/requirements-dev.txt
index 6629d395e65456..81540aa1a9fef1 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,6 +1,5 @@
 --index-url https://pypi.devinfra.sentry.io/simple
 
-anton-testing-deleteme-123==3.0.0a1
 sentry-devenv>=1.20
 devservices>=1.1.5
 

From 7f2395f6b4589c818b538d2c429cc305c5a4dde4 Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Tue, 20 May 2025 09:51:25 +0200
Subject: [PATCH 21/22] comment

---
 src/sentry/options/defaults.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/sentry/options/defaults.py b/src/sentry/options/defaults.py
index eee317ddada98d..dbfa94365ece92 100644
--- a/src/sentry/options/defaults.py
+++ b/src/sentry/options/defaults.py
@@ -2368,6 +2368,7 @@
     flags=FLAG_AUTOMATOR_MODIFIABLE,
 )
 # Gradually roll out Python SDK alpha version for dogfooding
+# TODO-anton: set to a sane value
 register(
     "sentry-sdk.use-python-sdk-alpha", default=1.0, type=Float, flags=FLAG_AUTOMATOR_MODIFIABLE
 )

From fb3db63f190aaed5eae31580fc0c1eac744a73ee Mon Sep 17 00:00:00 2001
From: Anton Pirker 
Date: Tue, 20 May 2025 09:55:28 +0200
Subject: [PATCH 22/22] metrics_noop not needed anymore, because there are no
 metrics, just span.data

---
 src/sentry/metrics/minimetrics.py | 2 --
 1 file changed, 2 deletions(-)

diff --git a/src/sentry/metrics/minimetrics.py b/src/sentry/metrics/minimetrics.py
index 446b878d002bbb..e83ed2673ab41d 100644
--- a/src/sentry/metrics/minimetrics.py
+++ b/src/sentry/metrics/minimetrics.py
@@ -2,7 +2,6 @@
 from datetime import datetime, timedelta, timezone
 
 import sentry_sdk
-from sentry_sdk.metrics import metrics_noop
 from sentry_sdk.tracing import Span
 
 from sentry.metrics.base import MetricsBackend, Tags
@@ -14,7 +13,6 @@ def _attach_tags(span: Span, tags: Tags | None) -> None:
             span.set_data(tag_key, tag_value)
 
 
-@metrics_noop
 def _set_metric_on_span(key: str, value: float | int, op: str, tags: Tags | None = None) -> None:
     span_or_tx = sentry_sdk.get_current_span()
     if span_or_tx is None: