diff --git a/pyproject.toml b/pyproject.toml index d1c9eb9b27961d..029ef7417a5f55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,10 @@ filterwarnings = [ # pytest has not yet implemented the replacement for this yet "ignore:The --looponfail command line argument.*", + + # TODO-anton: just for testing locally, remove before merging + "ignore::DeprecationWarning:setuptools._distutils.version", + "ignore::DeprecationWarning:redis.connection" ] looponfailroots = ["src", "tests"] diff --git a/requirements-base.txt b/requirements-base.txt index 3f459947e240a9..c93a0a73adddec 100644 --- a/requirements-base.txt +++ b/requirements-base.txt @@ -70,7 +70,7 @@ sentry-ophio>=1.1.3 sentry-protos==0.2.0 sentry-redis-tools>=0.5.0 sentry-relay>=0.9.9 -sentry-sdk[http2]>=2.29.1 +sentry-sdk[http2]==3.0.0a1 slack-sdk>=3.27.2 snuba-sdk>=3.0.43 simplejson>=3.17.6 diff --git a/requirements-dev-frozen.txt b/requirements-dev-frozen.txt index af524686183db6..841930c11e449a 100644 --- a/requirements-dev-frozen.txt +++ b/requirements-dev-frozen.txt @@ -37,6 +37,7 @@ cryptography==44.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 +deprecated==1.2.18 devservices==1.1.6 distlib==0.3.8 distro==1.8.0 @@ -82,6 +83,7 @@ httpx==0.25.2 hyperframe==6.1.0 identify==2.6.1 idna==3.7 +importlib-metadata==8.6.1 inflection==0.5.1 iniconfig==1.1.1 iso3166==2.1.1 @@ -114,6 +116,9 @@ openapi-core==0.18.2 openapi-pydantic==0.4.0 openapi-schema-validator==0.6.2 openapi-spec-validator==0.7.1 +opentelemetry-api==1.33.1 +opentelemetry-sdk==1.33.1 +opentelemetry-semantic-conventions==0.54b1 orjson==3.10.10 outcome==1.2.0 packaging==24.1 @@ -151,7 +156,7 @@ pytest-fail-slow==0.3.0 pytest-json-report==1.5.0 pytest-metadata==3.1.1 pytest-rerunfailures==15.0 -pytest-sentry==0.3.0 +pytest-sentry==0.4.1 pytest-workaround-12888==1.0.0 pytest-xdist==3.0.2 python-dateutil==2.9.0.post0 @@ -190,7 +195,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.29.1 +sentry-sdk==3.0.0a1 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.17.0 @@ -246,6 +251,7 @@ wheel==0.38.4 wrapt==1.17.0 wsproto==1.1.0 xmlsec==1.3.14 +zipp==3.21.0 zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements-dev.txt b/requirements-dev.txt index f73849148f1f64..f18ba808ca4e82 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,7 +16,7 @@ pytest-django>=4.9.0 pytest-fail-slow>=0.3.0 pytest-json-report>=1.5.0 pytest-rerunfailures>=15 -pytest-sentry>=0.3.0,<0.4.0 +pytest-sentry>=0.4.0 pytest-workaround-12888 pytest-xdist>=3 responses>=0.23.1 diff --git a/requirements-frozen.txt b/requirements-frozen.txt index 49e56b78df3146..17ef5c9f4f8b85 100644 --- a/requirements-frozen.txt +++ b/requirements-frozen.txt @@ -32,6 +32,7 @@ cryptography==44.0.1 cssselect==1.0.3 cssutils==2.9.0 datadog==0.49.1 +deprecated==1.2.18 distro==1.8.0 django==5.2.1 django-crispy-forms==1.14.0 @@ -67,6 +68,7 @@ httpcore==1.0.2 httpx==0.25.2 hyperframe==6.1.0 idna==3.7 +importlib-metadata==8.6.1 inflection==0.5.1 iso3166==2.1.1 isodate==0.6.1 @@ -82,6 +84,9 @@ mmh3==4.0.0 msgpack==1.1.0 oauthlib==3.1.0 openai==1.3.5 +opentelemetry-api==1.33.1 +opentelemetry-sdk==1.33.1 +opentelemetry-semantic-conventions==0.54b1 orjson==3.10.10 packaging==24.1 parsimonious==0.10.0 @@ -128,7 +133,7 @@ sentry-ophio==1.1.3 sentry-protos==0.2.0 sentry-redis-tools==0.5.0 sentry-relay==0.9.9 -sentry-sdk==2.29.1 +sentry-sdk==3.0.0a1 sentry-usage-accountant==0.0.10 simplejson==3.17.6 six==1.17.0 @@ -154,7 +159,9 @@ urllib3==2.2.2 vine==5.1.0 vroomrs==0.1.4 wcwidth==0.2.13 +wrapt==1.17.2 xmlsec==1.3.14 +zipp==3.21.0 zstandard==0.18.0 # The following packages are considered to be unsafe in a requirements file: diff --git a/src/sentry/api/base.py b/src/sentry/api/base.py index 18d1e1c00ac94c..cc4cae9ec6c600 100644 --- a/src/sentry/api/base.py +++ b/src/sentry/api/base.py @@ -441,7 +441,9 @@ def dispatch(self, request: Request, *args, **kwargs) -> Response: op="base.dispatch.sleep", name=type(self).__name__, ) as span: - span.set_data("SENTRY_API_RESPONSE_DELAY", settings.SENTRY_API_RESPONSE_DELAY) + span.set_attribute( + "SENTRY_API_RESPONSE_DELAY", settings.SENTRY_API_RESPONSE_DELAY + ) time.sleep(settings.SENTRY_API_RESPONSE_DELAY / 1000.0 - duration) # Only enforced in dev environment diff --git a/src/sentry/api/bases/organization.py b/src/sentry/api/bases/organization.py index 5fb069cde2dbdf..f5d43b88a117a3 100644 --- a/src/sentry/api/bases/organization.py +++ b/src/sentry/api/bases/organization.py @@ -408,7 +408,7 @@ def get_projects( with sentry_sdk.start_span(op="fetch_organization_projects") as span: projects = list(qs) - span.set_data("Project Count", len(projects)) + span.set_attribute("Project Count", len(projects)) filter_by_membership = not bool(ids) and not bool(slugs) filtered_projects = self._filter_projects_by_permissions( @@ -434,7 +434,7 @@ def _filter_projects_by_permissions( include_all_accessible: bool = False, ) -> list[Project]: with sentry_sdk.start_span(op="apply_project_permissions") as span: - span.set_data("Project Count", len(projects)) + span.set_attribute("Project Count", len(projects)) if force_global_perms: span.set_tag("mode", "force_global_perms") return projects diff --git a/src/sentry/api/endpoints/group_current_release.py b/src/sentry/api/endpoints/group_current_release.py index e8c93360f08d83..b48886fcd36d2b 100644 --- a/src/sentry/api/endpoints/group_current_release.py +++ b/src/sentry/api/endpoints/group_current_release.py @@ -65,8 +65,9 @@ def get(self, request: Request, group) -> Response: environments = get_environments(request, group.project.organization) with sentry_sdk.start_span(op="CurrentReleaseEndpoint.get.current_release") as span: - span.set_data("Environment Count", len(environments)) - span.set_data( + span.set_attribute("Environment Count", len(environments)) + # TODO-anton: split dict into multiple attributes + span.set_attribute( "Raw Parameters", { "group.id": group.id, diff --git a/src/sentry/api/endpoints/organization_events_facets.py b/src/sentry/api/endpoints/organization_events_facets.py index 42637597b0633e..d1ea0444144e15 100644 --- a/src/sentry/api/endpoints/organization_events_facets.py +++ b/src/sentry/api/endpoints/organization_events_facets.py @@ -56,7 +56,7 @@ def data_fn(offset, limit): ) with sentry_sdk.start_span(op="discover.endpoint", name="populate_results") as span: - span.set_data("facet_count", len(facets or [])) + span.set_attribute("facet_count", len(facets or [])) resp: dict[str, _KeyTopValues] resp = defaultdict(lambda: {"key": "", "topValues": []}) for row in facets: diff --git a/src/sentry/api/endpoints/organization_events_facets_performance.py b/src/sentry/api/endpoints/organization_events_facets_performance.py index d2ed58692709eb..c1476d9c189ad5 100644 --- a/src/sentry/api/endpoints/organization_events_facets_performance.py +++ b/src/sentry/api/endpoints/organization_events_facets_performance.py @@ -262,7 +262,7 @@ def query_tag_data( Returns None if query was not successful which causes the endpoint to return early """ with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span: - span.set_data("query", filter_query) + span.set_attribute("query", filter_query) tag_query = DiscoverQueryBuilder( dataset=Dataset.Discover, params={}, @@ -391,7 +391,7 @@ def query_facet_performance( tag_key_limit = limit if tag_key else 1 with sentry_sdk.start_span(op="discover.discover", name="facets.filter_transform") as span: - span.set_data("query", filter_query) + span.set_attribute("query", filter_query) tag_query = DiscoverQueryBuilder( dataset=Dataset.Discover, params={}, @@ -417,8 +417,8 @@ def query_facet_performance( ) with sentry_sdk.start_span(op="discover.discover", name="facets.aggregate_tags"): - span.set_data("sample_rate", sample_rate) - span.set_data("target_sample", target_sample) + span.set_attribute("sample_rate", sample_rate) + span.set_attribute("target_sample", target_sample) aggregate_comparison = transaction_aggregate * 1.005 if transaction_aggregate else 0 aggregate_column = Function("avg", [translated_aggregate_column], "aggregate") tag_query.where.append(excluded_tags) diff --git a/src/sentry/api/endpoints/organization_events_meta.py b/src/sentry/api/endpoints/organization_events_meta.py index 3cb8c82408c885..0211844284448b 100644 --- a/src/sentry/api/endpoints/organization_events_meta.py +++ b/src/sentry/api/endpoints/organization_events_meta.py @@ -155,7 +155,7 @@ def get(self, request: Request, organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="serialize_results") as span: results = list(results_cursor) - span.set_data("result_length", len(results)) + span.set_attribute("result_length", len(results)) context = serialize( results, request.user, diff --git a/src/sentry/api/endpoints/organization_events_stats.py b/src/sentry/api/endpoints/organization_events_stats.py index 0c0aaae1365901..db1dab6a06f5ca 100644 --- a/src/sentry/api/endpoints/organization_events_stats.py +++ b/src/sentry/api/endpoints/organization_events_stats.py @@ -190,7 +190,8 @@ def get(self, request: Request, organization: Organization) -> Response: query_source = self.get_request_source(request) with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - span.set_data("organization", organization) + # TODO-anton: split dict into multiple attributes + span.set_attribute("organization", organization) top_events = 0 diff --git a/src/sentry/api/endpoints/organization_events_timeseries.py b/src/sentry/api/endpoints/organization_events_timeseries.py index f5b36a2c6ed9d7..5d2d54ee5162f0 100644 --- a/src/sentry/api/endpoints/organization_events_timeseries.py +++ b/src/sentry/api/endpoints/organization_events_timeseries.py @@ -152,7 +152,8 @@ def get_comparison_delta(self, request: Request) -> timedelta | None: def get(self, request: Request, organization: Organization) -> Response: with sentry_sdk.start_span(op="discover.endpoint", name="filter_params") as span: - span.set_data("organization", organization) + # TODO-anton: split dict into multiple attributes + span.set_attribute("organization", organization) top_events = self.get_top_events(request) comparison_delta = self.get_comparison_delta(request) diff --git a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py index cb2a9310a375e6..b16751fc006fc1 100644 --- a/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py +++ b/src/sentry/api/endpoints/organization_on_demand_metrics_estimation_stats.py @@ -67,7 +67,8 @@ def get(self, request: Request, organization: Organization) -> Response: return Response({"detail": "missing required parameter yAxis"}, status=400) with sentry_sdk.start_span(op="discover.metrics.endpoint", name="get_full_metrics") as span: - span.set_data("organization", organization) + # TODO-anton: split dict into multiple attributes + span.set_attribute("organization", organization) try: # the discover stats diff --git a/src/sentry/api/endpoints/organization_users.py b/src/sentry/api/endpoints/organization_users.py index c37e81d046f539..29f351feb6dcfc 100644 --- a/src/sentry/api/endpoints/organization_users.py +++ b/src/sentry/api/endpoints/organization_users.py @@ -47,8 +47,8 @@ def get(self, request: Request, organization) -> Response: organization_members = list(qs) - span.set_data("Project Count", len(projects)) - span.set_data("Member Count", len(organization_members)) + span.set_attribute("Project Count", len(projects)) + span.set_attribute("Member Count", len(organization_members)) return Response( serialize( diff --git a/src/sentry/api/serializers/base.py b/src/sentry/api/serializers/base.py index 6242e26c073868..3195cbe9726e6c 100644 --- a/src/sentry/api/serializers/base.py +++ b/src/sentry/api/serializers/base.py @@ -65,7 +65,7 @@ def serialize( else: return objects with sentry_sdk.start_span(op="serialize", name=type(serializer).__name__) as span: - span.set_data("Object Count", len(objects)) + span.set_attribute("Object Count", len(objects)) with sentry_sdk.start_span(op="serialize.get_attrs", name=type(serializer).__name__): attrs = serializer.get_attrs( diff --git a/src/sentry/api/serializers/models/project.py b/src/sentry/api/serializers/models/project.py index ff3f534a8bd506..248c992593f96f 100644 --- a/src/sentry/api/serializers/models/project.py +++ b/src/sentry/api/serializers/models/project.py @@ -339,7 +339,7 @@ def get_attrs( ) -> dict[Project, dict[str, Any]]: def measure_span(op_tag): span = sentry_sdk.start_span(op=f"serialize.get_attrs.project.{op_tag}") - span.set_data("Object Count", len(item_list)) + span.set_attribute("Object Count", len(item_list)) return span with measure_span("preamble"): diff --git a/src/sentry/auth/access.py b/src/sentry/auth/access.py index fd8a9e19e01335..3f50d95b683274 100644 --- a/src/sentry/auth/access.py +++ b/src/sentry/auth/access.py @@ -273,8 +273,8 @@ def project_ids_with_team_membership(self) -> frozenset[int]: .distinct() .values_list("id", flat=True) ) - span.set_data("Project Count", len(projects)) - span.set_data("Team Count", len(teams)) + span.set_attribute("Project Count", len(projects)) + span.set_attribute("Team Count", len(teams)) return projects @@ -353,7 +353,7 @@ def has_any_project_scope(self, project: Project, scopes: Collection[str]) -> bo ] span.set_tag("organization", self._member.organization.id) span.set_tag("organization.slug", self._member.organization.slug) - span.set_data("membership_count", len(memberships)) + span.set_attribute("membership_count", len(memberships)) for membership in memberships: team_scopes = membership.get_scopes() @@ -564,7 +564,7 @@ def has_any_project_scope(self, project: Project, scopes: Collection[str]) -> bo span.set_tag( "organization.slug", self.rpc_user_organization_context.organization.slug ) - span.set_data("membership_count", len(orgmember_teams)) + span.set_attribute("membership_count", len(orgmember_teams)) for member_team in orgmember_teams: if not member_team.role: diff --git a/src/sentry/cache/base.py b/src/sentry/cache/base.py index bbbf28ca961316..b608d09c51b5f6 100644 --- a/src/sentry/cache/base.py +++ b/src/sentry/cache/base.py @@ -46,9 +46,9 @@ def _mark_transaction(self, op): if not self.is_default_cache: return - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() # Do not set this tag if we're in the global scope (which roughly - # equates to having a transaction). - if scope.transaction: + # equates to having a root span). + if scope.root_span: sentry_sdk.set_tag(f"{op}_default_cache", "true") sentry_sdk.set_tag("used_default_cache", "true") diff --git a/src/sentry/consumers/validate_schema.py b/src/sentry/consumers/validate_schema.py index e5138b690b9c1d..0f2b21caad3e6c 100644 --- a/src/sentry/consumers/validate_schema.py +++ b/src/sentry/consumers/validate_schema.py @@ -46,8 +46,8 @@ def submit(self, message: Message[KafkaPayload]) -> None: now = time.time() if self.__last_record_time is None or self.__last_record_time + 1.0 < now: with sentry_sdk.isolation_scope() as scope: - scope.add_attachment(bytes=message.payload.value, filename="message.txt") scope.set_tag("topic", self.__topic) + sentry_sdk.add_attachment(bytes=message.payload.value, filename="message.txt") if self.__codec is None: logger.warning("No validator configured for topic") diff --git a/src/sentry/data_export/tasks.py b/src/sentry/data_export/tasks.py index d070749e84f87a..65657ee09b6e96 100644 --- a/src/sentry/data_export/tasks.py +++ b/src/sentry/data_export/tasks.py @@ -381,7 +381,7 @@ def merge_export_blobs(data_export_id, **kwargs): def _set_data_on_scope(data_export): - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if data_export.user_id: user = dict(id=data_export.user_id) scope.set_user(user) diff --git a/src/sentry/debug_files/upload.py b/src/sentry/debug_files/upload.py index cb5fde4bc84425..b28bba7d0ba939 100644 --- a/src/sentry/debug_files/upload.py +++ b/src/sentry/debug_files/upload.py @@ -10,7 +10,7 @@ def find_missing_chunks(organization_id: int, chunks: set[str]): """Returns a list of chunks which are missing for an org.""" with sentry_sdk.start_span(op="find_missing_chunks") as span: span.set_tag("organization_id", organization_id) - span.set_data("chunks_size", len(chunks)) + span.set_attribute("chunks_size", len(chunks)) if not chunks: return [] diff --git a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py index c4e72b6c27a65e..d60892defe8716 100644 --- a/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py +++ b/src/sentry/dynamic_sampling/rules/helpers/latest_releases.py @@ -385,8 +385,8 @@ def on_release_boosted() -> None: "dynamic_sampling.observe_release_status", "(release, environment) pair observed and boosted", ) - span.set_data("release", release.id) - span.set_data("environment", environment) + span.set_attribute("release", release.id) + span.set_attribute("environment", environment) schedule_invalidate_project_config( project_id=project.id, diff --git a/src/sentry/features/manager.py b/src/sentry/features/manager.py index 15f98d10998f96..baee3e505818cb 100644 --- a/src/sentry/features/manager.py +++ b/src/sentry/features/manager.py @@ -114,9 +114,9 @@ def has_for_batch( name=f"{type(handler).__name__} ({name})", ) as span: batch_size = len(remaining) - span.set_data("Batch Size", batch_size) - span.set_data("Feature Name", name) - span.set_data("Handler Type", type(handler).__name__) + span.set_attribute("Batch Size", batch_size) + span.set_attribute("Feature Name", name) + span.set_attribute("Handler Type", type(handler).__name__) batch = FeatureCheckBatch(self, name, organization, remaining, actor) handler_result = handler.has_for_batch(batch) @@ -124,7 +124,7 @@ def has_for_batch( if flag is not None: remaining.remove(obj) result[obj] = flag - span.set_data("Flags Found", batch_size - len(remaining)) + span.set_attribute("Flags Found", batch_size - len(remaining)) default_flag = settings.SENTRY_FEATURES.get(name, False) for obj in remaining: diff --git a/src/sentry/hybridcloud/models/outbox.py b/src/sentry/hybridcloud/models/outbox.py index 55d481d66a8b1e..8eefdd352ff270 100644 --- a/src/sentry/hybridcloud/models/outbox.py +++ b/src/sentry/hybridcloud/models/outbox.py @@ -289,8 +289,8 @@ def process_coalesced( def _set_span_data_for_coalesced_message(self, span: Span, message: OutboxBase) -> None: tag_for_outbox = OutboxScope.get_tag_name(message.shard_scope) span.set_tag(tag_for_outbox, message.shard_identifier) - span.set_data("outbox_id", message.id) - span.set_data("outbox_shard_id", message.shard_identifier) + span.set_attribute("outbox_id", message.id) + span.set_attribute("outbox_shard_id", message.shard_identifier) span.set_tag("outbox_category", OutboxCategory(message.category).name) span.set_tag("outbox_scope", OutboxScope(message.shard_scope).name) diff --git a/src/sentry/hybridcloud/rpc/service.py b/src/sentry/hybridcloud/rpc/service.py index d630c0ca9862ea..b117ccdebdf84e 100644 --- a/src/sentry/hybridcloud/rpc/service.py +++ b/src/sentry/hybridcloud/rpc/service.py @@ -596,7 +596,7 @@ def _remote_exception(self, message: str) -> RpcRemoteException: def _raise_from_response_status_error(self, response: requests.Response) -> NoReturn: rpc_method = f"{self.service_name}.{self.method_name}" - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("rpc_method", rpc_method) scope.set_tag("rpc_status_code", response.status_code) diff --git a/src/sentry/ingest/consumer/processors.py b/src/sentry/ingest/consumer/processors.py index 490708ab31c5e9..ac8cc765396984 100644 --- a/src/sentry/ingest/consumer/processors.py +++ b/src/sentry/ingest/consumer/processors.py @@ -54,12 +54,9 @@ def inner(*args, **kwargs): default=getattr(settings, "SENTRY_INGEST_CONSUMER_APM_SAMPLING", 0), ) ) - # New behavior is to add a custom `sample_rate` that is picked up by `traces_sampler` - span_kwargs.setdefault( - "custom_sampling_context", - {"sample_rate": sample_rate}, - ) - with sentry_sdk.start_transaction(**span_kwargs): + # New behavior is to add a custom `sentry.sample_rate` that is picked up by `traces_sampler` + span_kwargs.setdefault("attributes", {}).setdefault("sentry.sample_rate", sample_rate) + with sentry_sdk.start_span(**span_kwargs): return f(*args, **kwargs) return inner diff --git a/src/sentry/ingest/transaction_clusterer/rules.py b/src/sentry/ingest/transaction_clusterer/rules.py index 901694971dd7da..4d08193dcd2824 100644 --- a/src/sentry/ingest/transaction_clusterer/rules.py +++ b/src/sentry/ingest/transaction_clusterer/rules.py @@ -147,7 +147,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet: if self.MERGE_MAX_RULES < len(rules): set_span_data("discarded_rules", len(rules) - self.MERGE_MAX_RULES) - sentry_sdk.Scope.get_isolation_scope().set_context( + sentry_sdk.get_isolation_scope().set_context( "clustering_rules_max", { "num_existing_rules": len(rules), diff --git a/src/sentry/ingest/transaction_clusterer/tasks.py b/src/sentry/ingest/transaction_clusterer/tasks.py index d61cbcd997abbf..9cfac20a8d86a7 100644 --- a/src/sentry/ingest/transaction_clusterer/tasks.py +++ b/src/sentry/ingest/transaction_clusterer/tasks.py @@ -89,7 +89,7 @@ def cluster_projects(project_ids: Sequence[int]) -> None: try: for project in projects: with sentry_sdk.start_span(op="txcluster_project") as span: - span.set_data("project_id", project.id) + span.set_attribute("project_id", project.id) tx_names = list(redis.get_transaction_names(project)) new_rules = [] if len(tx_names) >= MERGE_THRESHOLD: diff --git a/src/sentry/integrations/source_code_management/commit_context.py b/src/sentry/integrations/source_code_management/commit_context.py index b0ac2f586aa7c0..0aefd81f55ad4a 100644 --- a/src/sentry/integrations/source_code_management/commit_context.py +++ b/src/sentry/integrations/source_code_management/commit_context.py @@ -266,7 +266,7 @@ def queue_pr_comment_task_if_needed( ), extra={"organization_id": commit.organization_id, "merge_commit_sha": commit.key}, ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("queue_comment_check.merge_commit_sha", commit.key) scope.set_tag("queue_comment_check.organization_id", commit.organization_id) diff --git a/src/sentry/integrations/source_code_management/repository.py b/src/sentry/integrations/source_code_management/repository.py index 91d97e36d5deb8..386b289d0cc021 100644 --- a/src/sentry/integrations/source_code_management/repository.py +++ b/src/sentry/integrations/source_code_management/repository.py @@ -178,7 +178,7 @@ def get_stacktrace_link( "organization_id": repo.organization_id, } ) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("stacktrace_link.tried_version", False) def encode_url(url: str) -> str: diff --git a/src/sentry/integrations/utils/scope.py b/src/sentry/integrations/utils/scope.py index 9a99e3349deaac..efccbe0f2fb44d 100644 --- a/src/sentry/integrations/utils/scope.py +++ b/src/sentry/integrations/utils/scope.py @@ -24,7 +24,7 @@ def clear_tags_and_context() -> None: """Clear certain tags and context since it should not be set.""" reset_values = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() for tag in ["organization", "organization.slug"]: if tag in scope._tags: diff --git a/src/sentry/integrations/web/organization_integration_setup.py b/src/sentry/integrations/web/organization_integration_setup.py index fc207e76199eee..7a25e9ae25a91e 100644 --- a/src/sentry/integrations/web/organization_integration_setup.py +++ b/src/sentry/integrations/web/organization_integration_setup.py @@ -3,7 +3,7 @@ import sentry_sdk from django.http import Http404, HttpRequest from django.http.response import HttpResponseBase -from sentry_sdk.tracing import TransactionSource +from sentry_sdk.consts import TransactionSource from sentry import features from sentry.features.exceptions import FeatureNotRegistered @@ -21,7 +21,7 @@ class OrganizationIntegrationSetupView(ControlSiloOrganizationView): csrf_protect = False def handle(self, request: HttpRequest, organization, provider_id) -> HttpResponseBase: - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() scope.set_transaction_name(f"integration.{provider_id}", source=TransactionSource.VIEW) pipeline = IntegrationPipeline( diff --git a/src/sentry/issues/endpoints/organization_issues_count.py b/src/sentry/issues/endpoints/organization_issues_count.py index 8bd60902176c17..3f833ab4de18a0 100644 --- a/src/sentry/issues/endpoints/organization_issues_count.py +++ b/src/sentry/issues/endpoints/organization_issues_count.py @@ -63,7 +63,8 @@ def _count( query_kwargs["actor"] = request.user with start_span(op="start_search") as span: - span.set_data("query_kwargs", query_kwargs) + # TODO-anton: split dict into multiple attributes + span.set_attribute("query_kwargs", query_kwargs) result = search.backend.query(**query_kwargs) return result.hits diff --git a/src/sentry/issues/grouptype.py b/src/sentry/issues/grouptype.py index afb4b414cd66de..60e3acefa4674e 100644 --- a/src/sentry/issues/grouptype.py +++ b/src/sentry/issues/grouptype.py @@ -126,7 +126,8 @@ def get_visible( span.set_tag("has_batch_features", batch_features is not None) span.set_tag("released", released) span.set_tag("enabled", enabled) - span.set_data("feature_to_grouptype", feature_to_grouptype) + # TODO-anton: split dict into multiple attributes + span.set_attribute("feature_to_grouptype", feature_to_grouptype) return released + enabled def get_all_group_type_ids(self) -> set[int]: diff --git a/src/sentry/issues/occurrence_consumer.py b/src/sentry/issues/occurrence_consumer.py index db49bdb2989bb0..1999f69d3dd56a 100644 --- a/src/sentry/issues/occurrence_consumer.py +++ b/src/sentry/issues/occurrence_consumer.py @@ -394,7 +394,7 @@ def _process_message( :raises InvalidEventPayloadError: when the message is invalid :raises EventLookupError: when the provided event_id in the message couldn't be found. """ - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_message", name="issues.occurrence_consumer", ) as txn: @@ -463,7 +463,7 @@ def process_occurrence_batch( # Number of groups we've collected to be processed in parallel metrics.gauge("occurrence_consumer.checkin.parallel_batch_groups", len(occcurrence_mapping)) # Submit occurrences & status changes for processing - with sentry_sdk.start_transaction(op="process_batch", name="occurrence.occurrence_consumer"): + with sentry_sdk.start_span(op="process_batch", name="occurrence.occurrence_consumer"): futures = [ worker.submit(process_occurrence_group, group) for group in occcurrence_mapping.values() ] diff --git a/src/sentry/metrics/minimetrics.py b/src/sentry/metrics/minimetrics.py index 446b878d002bbb..6eab805c72cbb3 100644 --- a/src/sentry/metrics/minimetrics.py +++ b/src/sentry/metrics/minimetrics.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta, timezone import sentry_sdk -from sentry_sdk.metrics import metrics_noop from sentry_sdk.tracing import Span from sentry.metrics.base import MetricsBackend, Tags @@ -11,16 +10,15 @@ def _attach_tags(span: Span, tags: Tags | None) -> None: if tags: for tag_key, tag_value in tags.items(): - span.set_data(tag_key, tag_value) + span.set_attribute(tag_key, tag_value) -@metrics_noop def _set_metric_on_span(key: str, value: float | int, op: str, tags: Tags | None = None) -> None: span_or_tx = sentry_sdk.get_current_span() if span_or_tx is None: return - span_or_tx.set_data(key, value) + span_or_tx.set_attribute(key, value) _attach_tags(span_or_tx, tags) diff --git a/src/sentry/middleware/integrations/classifications.py b/src/sentry/middleware/integrations/classifications.py index cf1dbf119f7f69..5200e0b492ec70 100644 --- a/src/sentry/middleware/integrations/classifications.py +++ b/src/sentry/middleware/integrations/classifications.py @@ -124,7 +124,7 @@ def get_response(self, request: HttpRequest) -> HttpResponseBase: parser_class = self.integration_parsers.get(provider) if not parser_class: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("provider", provider) scope.set_tag("path", request.path) sentry_sdk.capture_exception( diff --git a/src/sentry/models/project.py b/src/sentry/models/project.py index 81bb959f67029c..0e9a43a323a416 100644 --- a/src/sentry/models/project.py +++ b/src/sentry/models/project.py @@ -368,8 +368,8 @@ def next_short_id(self, delta: int = 1) -> int: sentry_sdk.start_span(op="project.next_short_id") as span, metrics.timer("project.next_short_id"), ): - span.set_data("project_id", self.id) - span.set_data("project_slug", self.slug) + span.set_attribute("project_id", self.id) + span.set_attribute("project_slug", self.slug) return Counter.increment(self, delta) def _save_project(self, *args, **kwargs): diff --git a/src/sentry/monitors/consumers/incident_occurrences_consumer.py b/src/sentry/monitors/consumers/incident_occurrences_consumer.py index 73e546b57da8c5..e20f5a63356ec7 100644 --- a/src/sentry/monitors/consumers/incident_occurrences_consumer.py +++ b/src/sentry/monitors/consumers/incident_occurrences_consumer.py @@ -126,7 +126,7 @@ def has_all(checkins: list[MonitorCheckIn | None]) -> TypeGuard[list[MonitorChec def process_incident_occurrence(message: Message[KafkaPayload | FilteredPayload]): - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_incident_occurrence", name="monitors.incident_occurrence_consumer", ) as txn: diff --git a/src/sentry/monitors/consumers/monitor_consumer.py b/src/sentry/monitors/consumers/monitor_consumer.py index a7a2f3462c77a6..1ed7ff6d41e7cb 100644 --- a/src/sentry/monitors/consumers/monitor_consumer.py +++ b/src/sentry/monitors/consumers/monitor_consumer.py @@ -934,7 +934,7 @@ def process_checkin(item: CheckinItem) -> None: Process an individual check-in """ try: - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="_process_checkin", name="monitors.monitor_consumer", ) as txn: @@ -1004,7 +1004,7 @@ def process_batch( metrics.gauge("monitors.checkin.parallel_batch_groups", len(checkin_mapping)) # Submit check-in groups for processing - with sentry_sdk.start_transaction(op="process_batch", name="monitors.monitor_consumer"): + with sentry_sdk.start_span(op="process_batch", name="monitors.monitor_consumer"): futures = [ executor.submit(process_checkin_group, group) for group in checkin_mapping.values() ] diff --git a/src/sentry/processing/backpressure/monitor.py b/src/sentry/processing/backpressure/monitor.py index f6a9e5e386908c..39f0bed5ef90c3 100644 --- a/src/sentry/processing/backpressure/monitor.py +++ b/src/sentry/processing/backpressure/monitor.py @@ -128,7 +128,7 @@ def start_service_monitoring() -> None: time.sleep(options.get("backpressure.monitoring.interval")) continue - with sentry_sdk.start_transaction(name="backpressure.monitoring", sampled=True): + with sentry_sdk.start_span(name="backpressure.monitoring", sampled=True): # first, check each base service and record its health unhealthy_services = check_service_health(services) diff --git a/src/sentry/relay/config/__init__.py b/src/sentry/relay/config/__init__.py index d09123114abb13..dfee8fdc03dcdf 100644 --- a/src/sentry/relay/config/__init__.py +++ b/src/sentry/relay/config/__init__.py @@ -294,7 +294,7 @@ def get_project_config( with sentry_sdk.isolation_scope() as scope: scope.set_tag("project", project.id) with ( - sentry_sdk.start_transaction(name="get_project_config"), + sentry_sdk.start_span(name="get_project_config"), metrics.timer("relay.config.get_project_config.duration"), ): return _get_project_config(project, project_keys=project_keys) diff --git a/src/sentry/relay/config/metric_extraction.py b/src/sentry/relay/config/metric_extraction.py index c17886d080a3ff..4158c933b2720e 100644 --- a/src/sentry/relay/config/metric_extraction.py +++ b/src/sentry/relay/config/metric_extraction.py @@ -793,7 +793,8 @@ def _convert_aggregate_and_query_to_metrics( } with sentry_sdk.start_span(op="converting_aggregate_and_query") as span: - span.set_data("widget_query_args", {"query": query, "aggregate": aggregate}) + # TODO-anton: split dict into multiple attributes + span.set_attribute("widget_query_args", {"query": query, "aggregate": aggregate}) # Create as many specs as we support for spec_version in OnDemandMetricSpecVersioning.get_spec_versions(): try: diff --git a/src/sentry/remote_subscriptions/consumers/result_consumer.py b/src/sentry/remote_subscriptions/consumers/result_consumer.py index 04098a9dfa8a2a..0feb4a72b3aff4 100644 --- a/src/sentry/remote_subscriptions/consumers/result_consumer.py +++ b/src/sentry/remote_subscriptions/consumers/result_consumer.py @@ -272,7 +272,7 @@ def process_batch(self, message: Message[ValuesBatch[KafkaPayload]]): partitioned_values = self.partition_message_batch(message) # Submit groups for processing - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( op="process_batch", name=f"monitors.{self.identifier}.result_consumer" ): futures = [ diff --git a/src/sentry/replays/consumers/recording.py b/src/sentry/replays/consumers/recording.py index 02e270d894bf62..bb611eecfa52e1 100644 --- a/src/sentry/replays/consumers/recording.py +++ b/src/sentry/replays/consumers/recording.py @@ -62,10 +62,10 @@ def create_with_partitions( def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage | FilteredPayload: - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="replays.consumer.recording_buffered.process_message", op="replays.consumer.recording_buffered.process_message", - custom_sampling_context={ + attributes={ "sample_rate": getattr(settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0) }, ): @@ -79,12 +79,12 @@ def process_message(message: Message[KafkaPayload]) -> ProcessedRecordingMessage def commit_message(message: Message[ProcessedRecordingMessage]) -> None: - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() - with sentry_sdk.scope.use_isolation_scope(isolation_scope): - with sentry_sdk.start_transaction( + isolation_scope = sentry_sdk.get_isolation_scope().fork() + with sentry_sdk.use_isolation_scope(isolation_scope): + with sentry_sdk.start_span( name="replays.consumer.recording_buffered.commit_message", op="replays.consumer.recording_buffered.commit_message", - custom_sampling_context={ + attributes={ "sample_rate": getattr( settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0 ) diff --git a/src/sentry/replays/usecases/ingest/__init__.py b/src/sentry/replays/usecases/ingest/__init__.py index 1e44af9acde5e6..d1ebacbfd6f85c 100644 --- a/src/sentry/replays/usecases/ingest/__init__.py +++ b/src/sentry/replays/usecases/ingest/__init__.py @@ -92,13 +92,13 @@ class RecordingIngestMessage: def ingest_recording(message_bytes: bytes) -> None: """Ingest non-chunked recording messages.""" - isolation_scope = sentry_sdk.Scope.get_isolation_scope().fork() + isolation_scope = sentry_sdk.get_isolation_scope().fork() - with sentry_sdk.scope.use_isolation_scope(isolation_scope): - with sentry_sdk.start_transaction( + with sentry_sdk.use_isolation_scope(isolation_scope): + with sentry_sdk.start_span( name="replays.consumer.process_recording", op="replays.consumer", - custom_sampling_context={ + attributes={ "sample_rate": getattr( settings, "SENTRY_REPLAY_RECORDINGS_CONSUMER_APM_SAMPLING", 0 ) diff --git a/src/sentry/reprocessing2.py b/src/sentry/reprocessing2.py index ba3a99f5f29efc..73635481fa1606 100644 --- a/src/sentry/reprocessing2.py +++ b/src/sentry/reprocessing2.py @@ -220,7 +220,7 @@ def reprocess_event(project_id: int, event_id: str, start_time: float) -> None: for attachment_id, attachment in enumerate(attachments): with sentry_sdk.start_span(op="reprocess_event._copy_attachment_into_cache") as span: - span.set_data("attachment_id", attachment.id) + span.set_attribute("attachment_id", attachment.id) attachment_objects.append( _copy_attachment_into_cache( attachment_id=attachment_id, @@ -372,7 +372,7 @@ def buffered_delete_old_primary_hash( old_primary_hashes.add(old_primary_hash) reprocessing_store.add_hash(project_id, group_id, old_primary_hash) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("project_id", project_id) scope.set_tag("old_group_id", group_id) scope.set_tag("old_primary_hash", old_primary_hash) diff --git a/src/sentry/runner/commands/cleanup.py b/src/sentry/runner/commands/cleanup.py index 7e4bc6ce989b94..353b18e3c9fa81 100644 --- a/src/sentry/runner/commands/cleanup.py +++ b/src/sentry/runner/commands/cleanup.py @@ -178,7 +178,7 @@ def cleanup( transaction = None # Making sure we're not running in local dev to prevent a local error if not os.environ.get("SENTRY_DEVENV_HOME"): - transaction = sentry_sdk.start_transaction(op="cleanup", name="cleanup") + transaction = sentry_sdk.start_span(op="cleanup", name="cleanup") transaction.__enter__() transaction.set_tag("router", router) transaction.set_tag("model", model) diff --git a/src/sentry/runner/commands/devserver.py b/src/sentry/runner/commands/devserver.py index fb2ccf8779fc22..eac9fe138eed91 100644 --- a/src/sentry/runner/commands/devserver.py +++ b/src/sentry/runner/commands/devserver.py @@ -179,7 +179,7 @@ def devserver( dsn=os.environ.get("SENTRY_DEVSERVICES_DSN", ""), traces_sample_rate=1.0, ) - with sentry_sdk.start_transaction(op="command", name="sentry.devserver"): + with sentry_sdk.start_span(op="command", name="sentry.devserver"): passed_options = { p.name: ctx.params[p.name] for p in ctx.command.params diff --git a/src/sentry/runner/main.py b/src/sentry/runner/main.py index 9fdf90600d08ca..68351532163c2f 100644 --- a/src/sentry/runner/main.py +++ b/src/sentry/runner/main.py @@ -130,12 +130,12 @@ def main() -> None: # We do this here because `configure_structlog` executes later logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO) logger = logging.getLogger(__name__) - try: func(**kwargs) except Exception as e: # This reports errors sentry-devservices - with sentry_sdk.init(dsn=os.environ["SENTRY_DEVSERVICES_DSN"]): + with sentry_sdk.new_scope() as scope: + scope.set_client(sentry_sdk.Client(dsn=os.environ["SENTRY_DEVSERVICES_DSN"])) if os.environ.get("USER"): sentry_sdk.set_user({"username": os.environ.get("USER")}) sentry_sdk.capture_exception(e) diff --git a/src/sentry/scim/endpoints/members.py b/src/sentry/scim/endpoints/members.py index 147c68a9db8160..5cf88a7c4c6740 100644 --- a/src/sentry/scim/endpoints/members.py +++ b/src/sentry/scim/endpoints/members.py @@ -530,7 +530,7 @@ def post(self, request: Request, organization) -> Response: """ update_role = False - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() if "sentryOrgRole" in request.data and request.data["sentryOrgRole"]: role = request.data["sentryOrgRole"].lower() diff --git a/src/sentry/scim/endpoints/utils.py b/src/sentry/scim/endpoints/utils.py index b40562e373f7ec..54e9ddaa725aca 100644 --- a/src/sentry/scim/endpoints/utils.py +++ b/src/sentry/scim/endpoints/utils.py @@ -25,9 +25,9 @@ class SCIMApiError(APIException): def __init__(self, detail, status_code=400): - transaction = sentry_sdk.Scope.get_current_scope().transaction - if transaction is not None: - transaction.set_tag("http.status_code", status_code) + root_span = sentry_sdk.get_current_scope().root_span + if root_span is not None: + root_span.set_tag("http.status_code", status_code) super().__init__({"schemas": [SCIM_API_ERROR], "detail": detail}) self.status_code = status_code diff --git a/src/sentry/search/events/builder/base.py b/src/sentry/search/events/builder/base.py index 829a1225abcfdc..186fb6ea5f8033 100644 --- a/src/sentry/search/events/builder/base.py +++ b/src/sentry/search/events/builder/base.py @@ -1562,7 +1562,7 @@ def run_query( def process_results(self, results: Any) -> EventsResponse: with sentry_sdk.start_span(op="QueryBuilder", name="process_results") as span: - span.set_data("result_count", len(results.get("data", []))) + span.set_attribute("result_count", len(results.get("data", []))) translated_columns = self.alias_to_typed_tag_map if self.builder_config.transform_alias_to_input_format: translated_columns.update( diff --git a/src/sentry/search/snuba/executors.py b/src/sentry/search/snuba/executors.py index 4ab58f147c8f43..f33abd00e91a05 100644 --- a/src/sentry/search/snuba/executors.py +++ b/src/sentry/search/snuba/executors.py @@ -885,8 +885,8 @@ def query( group_ids = list( group_queryset.using_replica().values_list("id", flat=True)[: max_candidates + 1] ) - span.set_data("Max Candidates", max_candidates) - span.set_data("Result Size", len(group_ids)) + span.set_attribute("Max Candidates", max_candidates) + span.set_attribute("Result Size", len(group_ids)) metrics.distribution("snuba.search.num_candidates", len(group_ids)) too_many_candidates = False if not group_ids: @@ -1702,8 +1702,8 @@ def query( : max_candidates + 1 ] ) - span.set_data("Max Candidates", max_candidates) - span.set_data("Result Size", len(group_ids_to_pass_to_snuba)) + span.set_attribute("Max Candidates", max_candidates) + span.set_attribute("Result Size", len(group_ids_to_pass_to_snuba)) if too_many_candidates := (len(group_ids_to_pass_to_snuba) > max_candidates): metrics.incr( diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py index 00126c0d5dcf07..a9cefb385e6856 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_authorizations.py @@ -39,7 +39,7 @@ class SentryAppAuthorizationsEndpoint(SentryAppAuthorizationsBaseEndpoint): } def post(self, request: Request, installation) -> Response: - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() scope.set_tag("organization", installation.organization_id) scope.set_tag("sentry_app_id", installation.sentry_app.id) diff --git a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py index efef233bbee2ec..e0b9ae7052acb2 100644 --- a/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py +++ b/src/sentry/sentry_apps/api/endpoints/sentry_app_components.py @@ -61,7 +61,7 @@ def get( components = [] errors = {} - with sentry_sdk.start_transaction(name="sentry.api.sentry_app_components.get"): + with sentry_sdk.start_span(name="sentry.api.sentry_app_components.get"): with sentry_sdk.start_span(op="sentry-app-components.get_installs"): installs = SentryAppInstallation.objects.get_installed_for_organization( organization.id diff --git a/src/sentry/sentry_metrics/consumers/indexer/processing.py b/src/sentry/sentry_metrics/consumers/indexer/processing.py index 43b04ad8a5cb25..2bd435d773942f 100644 --- a/src/sentry/sentry_metrics/consumers/indexer/processing.py +++ b/src/sentry/sentry_metrics/consumers/indexer/processing.py @@ -78,9 +78,9 @@ def process_messages(self, outer_message: Message[MessageBatch]) -> IndexerOutpu settings.SENTRY_METRICS_INDEXER_TRANSACTIONS_SAMPLE_RATE * settings.SENTRY_BACKEND_APM_SAMPLING ) - with sentry_sdk.start_transaction( + with sentry_sdk.start_span( name="sentry.sentry_metrics.consumers.indexer.processing.process_messages", - custom_sampling_context={"sample_rate": sample_rate}, + attributes={"sample_rate": sample_rate}, ): return self._process_messages_impl(outer_message) diff --git a/src/sentry/shared_integrations/client/base.py b/src/sentry/shared_integrations/client/base.py index 11f0a6cc32d701..e8560f8cf06102 100644 --- a/src/sentry/shared_integrations/client/base.py +++ b/src/sentry/shared_integrations/client/base.py @@ -231,7 +231,7 @@ def _request( ) if self.integration_type: - sentry_sdk.Scope.get_isolation_scope().set_tag(self.integration_type, self.name) + sentry_sdk.get_isolation_scope().set_tag(self.integration_type, self.name) request = Request( method=method.upper(), diff --git a/src/sentry/snuba/discover.py b/src/sentry/snuba/discover.py index d83480c276db80..1e0fdf7ca9a83f 100644 --- a/src/sentry/snuba/discover.py +++ b/src/sentry/snuba/discover.py @@ -560,7 +560,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: Mapping[int, str | None] = {} @@ -734,7 +734,7 @@ def get_facets( individual_tags.append(tag) with sentry_sdk.start_span(op="discover.discover", name="facets.individual_tags") as span: - span.set_data("tag_count", len(individual_tags)) + span.set_attribute("tag_count", len(individual_tags)) for tag_name in individual_tags: tag = f"tags[{tag_name}]" tag_value_builder = DiscoverQueryBuilder( diff --git a/src/sentry/snuba/errors.py b/src/sentry/snuba/errors.py index 50c6868226199e..d63f6d7df07f78 100644 --- a/src/sentry/snuba/errors.py +++ b/src/sentry/snuba/errors.py @@ -322,7 +322,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="discover.errors", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: Mapping[int, str | None] = {} diff --git a/src/sentry/snuba/functions.py b/src/sentry/snuba/functions.py index 43fbf6279a530e..85cfd0c0c7ad3a 100644 --- a/src/sentry/snuba/functions.py +++ b/src/sentry/snuba/functions.py @@ -238,7 +238,7 @@ def format_top_events_timeseries_results( with sentry_sdk.start_span(op="discover.discover", name="top_events.transform_results") as span: result = query_builder.strip_alias_prefix(result) - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) processed_result = query_builder.process_results(result) if result_key_order is None: diff --git a/src/sentry/snuba/metrics/fields/histogram.py b/src/sentry/snuba/metrics/fields/histogram.py index b5a3be0b8eab3b..5418a89067b212 100644 --- a/src/sentry/snuba/metrics/fields/histogram.py +++ b/src/sentry/snuba/metrics/fields/histogram.py @@ -52,8 +52,8 @@ def rebucket_histogram( with sentry_sdk.start_span( op="sentry.snuba.metrics.fields.histogram.rebucket_histogram" ) as span: - span.set_data("len_data", len(data)) - span.set_data("len_rv", len(rv)) + span.set_attribute("len_data", len(data)) + span.set_attribute("len_rv", len(rv)) # XXX: quadratic function assert len(data) < 300 diff --git a/src/sentry/snuba/query_subscriptions/consumer.py b/src/sentry/snuba/query_subscriptions/consumer.py index f05291ede2262a..4fa7525881a077 100644 --- a/src/sentry/snuba/query_subscriptions/consumer.py +++ b/src/sentry/snuba/query_subscriptions/consumer.py @@ -163,15 +163,16 @@ def handle_message( tags={"dataset": dataset}, ), ): - span.set_data("payload", contents) - span.set_data("subscription_dataset", subscription.snuba_query.dataset) - span.set_data("subscription_query", subscription.snuba_query.query) - span.set_data("subscription_aggregation", subscription.snuba_query.aggregate) - span.set_data("subscription_time_window", subscription.snuba_query.time_window) - span.set_data("subscription_resolution", subscription.snuba_query.resolution) - span.set_data("message_offset", message_offset) - span.set_data("message_partition", message_partition) - span.set_data("message_value", message_value) + # TODO-anton: split dict into multiple attributes + span.set_attribute("payload", contents) + span.set_attribute("subscription_dataset", subscription.snuba_query.dataset) + span.set_attribute("subscription_query", subscription.snuba_query.query) + span.set_attribute("subscription_aggregation", subscription.snuba_query.aggregate) + span.set_attribute("subscription_time_window", subscription.snuba_query.time_window) + span.set_attribute("subscription_resolution", subscription.snuba_query.resolution) + span.set_attribute("message_offset", message_offset) + span.set_attribute("message_partition", message_partition) + span.set_attribute("message_value", message_value) callback(contents, subscription) diff --git a/src/sentry/snuba/query_subscriptions/run.py b/src/sentry/snuba/query_subscriptions/run.py index a2e157a26fbdca..3e6d34ce29a237 100644 --- a/src/sentry/snuba/query_subscriptions/run.py +++ b/src/sentry/snuba/query_subscriptions/run.py @@ -78,10 +78,10 @@ def process_message( from sentry.utils import metrics with ( - sentry_sdk.start_transaction( + sentry_sdk.start_span( op="handle_message", name="query_subscription_consumer_process_message", - custom_sampling_context={"sample_rate": options.get("subscriptions-query.sample-rate")}, + attributes={"sample_rate": options.get("subscriptions-query.sample-rate")}, ), metrics.timer("snuba_query_subscriber.handle_message", tags={"dataset": dataset.value}), ): diff --git a/src/sentry/snuba/spans_indexed.py b/src/sentry/snuba/spans_indexed.py index 5cbab6d25172cf..d47c1372fb8e45 100644 --- a/src/sentry/snuba/spans_indexed.py +++ b/src/sentry/snuba/spans_indexed.py @@ -245,7 +245,7 @@ def top_events_timeseries( rollup, ) with sentry_sdk.start_span(op="spans_indexed", name="top_events.transform_results") as span: - span.set_data("result_count", len(result.get("data", []))) + span.set_attribute("result_count", len(result.get("data", []))) result = top_events_builder.process_results(result) issues: dict[int, str | None] = {} diff --git a/src/sentry/snuba/tasks.py b/src/sentry/snuba/tasks.py index 16c87f07a54858..804c9b8fd90d14 100644 --- a/src/sentry/snuba/tasks.py +++ b/src/sentry/snuba/tasks.py @@ -359,8 +359,8 @@ def subscription_checker(**kwargs): date_updated__lt=timezone.now() - SUBSCRIPTION_STATUS_MAX_AGE, ): with sentry_sdk.start_span(op="repair_subscription") as span: - span.set_data("subscription_id", subscription.id) - span.set_data("status", subscription.status) + span.set_attribute("subscription_id", subscription.id) + span.set_attribute("status", subscription.status) count += 1 if subscription.status == QuerySubscription.Status.CREATING.value: create_subscription_in_snuba.delay(query_subscription_id=subscription.id) diff --git a/src/sentry/stacktraces/processing.py b/src/sentry/stacktraces/processing.py index efb2368a6212c6..20bd892decc189 100644 --- a/src/sentry/stacktraces/processing.py +++ b/src/sentry/stacktraces/processing.py @@ -620,10 +620,10 @@ def process_stacktraces( with sentry_sdk.start_span( op="stacktraces.processing.process_stacktraces.preprocess_step" ) as span: - span.set_data("processor", processor.__class__.__name__) + span.set_attribute("processor", processor.__class__.__name__) if processor.preprocess_step(processing_task): changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) # Process all stacktraces for stacktrace_info, processable_frames in processing_task.iter_processable_stacktraces(): @@ -633,10 +633,10 @@ def process_stacktraces( with sentry_sdk.start_span( op="stacktraces.processing.process_stacktraces.process_exception" ) as span: - span.set_data("processor", processor.__class__.__name__) + span.set_attribute("processor", processor.__class__.__name__) if processor.process_exception(stacktrace_info.container): changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) # If the stacktrace is empty we skip it for processing if not stacktrace_info.stacktrace: @@ -650,7 +650,7 @@ def process_stacktraces( if new_frames is not None: stacktrace_info.stacktrace["frames"] = new_frames changed = True - span.set_data("data_changed", True) + span.set_attribute("data_changed", True) if ( set_raw_stacktrace and new_raw_frames is not None diff --git a/src/sentry/tagstore/snuba/backend.py b/src/sentry/tagstore/snuba/backend.py index 11a00972820058..be9def0ece12ee 100644 --- a/src/sentry/tagstore/snuba/backend.py +++ b/src/sentry/tagstore/snuba/backend.py @@ -335,14 +335,15 @@ def __get_tag_keys_for_projects( ) as span: result = cache.get(cache_key, None) - span.set_data("cache.key", [cache_key]) + # TODO-anton: are array attributes really supported? + span.set_attribute("cache.key", [cache_key]) if result is not None: - span.set_data("cache.hit", True) - span.set_data("cache.item_size", len(str(result))) + span.set_attribute("cache.hit", True) + span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.hit") else: - span.set_data("cache.hit", False) + span.set_attribute("cache.hit", False) metrics.incr("testing.tagstore.cache_tag_key.miss") if result is None: @@ -364,8 +365,9 @@ def __get_tag_keys_for_projects( op="cache.put", name="sentry.tagstore.cache.__get_tag_keys_for_projects" ) as span: cache.set(cache_key, result, 300) - span.set_data("cache.key", [cache_key]) - span.set_data("cache.item_size", len(str(result))) + # TODO-anton: are array attributes really supported? + span.set_attribute("cache.key", [cache_key]) + span.set_attribute("cache.item_size", len(str(result))) metrics.incr("testing.tagstore.cache_tag_key.len", amount=len(result)) ctor: _KeyCallable[TagKey, Never] | _KeyCallable[GroupTagKey, Never] diff --git a/src/sentry/tasks/store.py b/src/sentry/tasks/store.py index d688edcb4ce45c..b10580f7610356 100644 --- a/src/sentry/tasks/store.py +++ b/src/sentry/tasks/store.py @@ -386,8 +386,8 @@ def _continue_to_save_event() -> None: # Default event processors. for plugin in plugins.all(version=2): with sentry_sdk.start_span(op="task.store.process_event.preprocessors") as span: - span.set_data("plugin", plugin.slug) - span.set_data("from_symbolicate", from_symbolicate) + span.set_attribute("plugin", plugin.slug) + span.set_attribute("from_symbolicate", from_symbolicate) processors = safe_execute(plugin.get_event_preprocessors, data=data) for processor in processors or (): try: diff --git a/src/sentry/tasks/symbolication.py b/src/sentry/tasks/symbolication.py index 68c4d0dd4001b9..8f688549977a01 100644 --- a/src/sentry/tasks/symbolication.py +++ b/src/sentry/tasks/symbolication.py @@ -185,7 +185,7 @@ def on_symbolicator_request(): ): try: symbolicated_data = symbolication_function(symbolicator, data) - span.set_data("symbolicated_data", bool(symbolicated_data)) + span.set_attribute("symbolicated_data", bool(symbolicated_data)) if symbolicated_data: data = symbolicated_data diff --git a/src/sentry/taskworker/registry.py b/src/sentry/taskworker/registry.py index b78aedb239c42f..d970791db814d5 100644 --- a/src/sentry/taskworker/registry.py +++ b/src/sentry/taskworker/registry.py @@ -151,9 +151,9 @@ def send_task(self, activation: TaskActivation, wait_for_delivery: bool = False) origin="taskworker", ) as span: # TODO(taskworker) add monitor headers - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) - span.set_data(SPANDATA.MESSAGING_SYSTEM, "taskworker") + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) + span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") produce_future = self._producer(topic).produce( ArroyoTopic(name=topic.value), diff --git a/src/sentry/taskworker/workerchild.py b/src/sentry/taskworker/workerchild.py index e3406c534ec128..d21f5770e69119 100644 --- a/src/sentry/taskworker/workerchild.py +++ b/src/sentry/taskworker/workerchild.py @@ -292,48 +292,48 @@ def _execute_activation(task_func: Task[Any, Any], activation: TaskActivation) - kwargs = parameters.get("kwargs", {}) headers = {k: v for k, v in activation.headers.items()} - transaction = sentry_sdk.continue_trace( - environ_or_headers=headers, - op="queue.task.taskworker", - name=f"{activation.namespace}:{activation.taskname}", - origin="taskworker", - ) - with ( - track_memory_usage("taskworker.worker.memory_change"), - sentry_sdk.start_transaction(transaction), - ): - transaction.set_data( - "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} - ) - task_added_time = activation.received_at.ToDatetime().timestamp() - latency = time.time() - task_added_time - - with sentry_sdk.start_span( - op=OP.QUEUE_PROCESS, - name=activation.taskname, - origin="taskworker", - ) as span: - span.set_data(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) - span.set_data(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) - span.set_data(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) - span.set_data( - SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, activation.retry_state.attempts + with sentry_sdk.continue_trace(headers): + with ( + track_memory_usage("taskworker.worker.memory_change"), + sentry_sdk.start_span( + op="queue.task.taskworker", + name=f"{activation.namespace}:{activation.taskname}", + origin="taskworker", + ) as root_span, + ): + # TODO-anton: split dict into multiple attributes + root_span.set_attribute( + "taskworker-task", {"args": args, "kwargs": kwargs, "id": activation.id} ) - span.set_data(SPANDATA.MESSAGING_SYSTEM, "taskworker") - - # TODO(taskworker) remove this when doing cleanup - # The `__start_time` parameter is spliced into task parameters by - # sentry.celery.SentryTask._add_metadata and needs to be removed - # from kwargs like sentry.tasks.base.instrumented_task does. - if "__start_time" in kwargs: - kwargs.pop("__start_time") - - try: - task_func(*args, **kwargs) - transaction.set_status(SPANSTATUS.OK) - except Exception: - transaction.set_status(SPANSTATUS.INTERNAL_ERROR) - raise + task_added_time = activation.received_at.ToDatetime().timestamp() + latency = time.time() - task_added_time + + with sentry_sdk.start_span( + op=OP.QUEUE_PROCESS, + name=activation.taskname, + origin="taskworker", + ) as span: + span.set_attribute(SPANDATA.MESSAGING_DESTINATION_NAME, activation.namespace) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_ID, activation.id) + span.set_attribute(SPANDATA.MESSAGING_MESSAGE_RECEIVE_LATENCY, latency) + span.set_attribute( + SPANDATA.MESSAGING_MESSAGE_RETRY_COUNT, activation.retry_state.attempts + ) + span.set_attribute(SPANDATA.MESSAGING_SYSTEM, "taskworker") + + # TODO(taskworker) remove this when doing cleanup + # The `__start_time` parameter is spliced into task parameters by + # sentry.celery.SentryTask._add_metadata and needs to be removed + # from kwargs like sentry.tasks.base.instrumented_task does. + if "__start_time" in kwargs: + kwargs.pop("__start_time") + + try: + task_func(*args, **kwargs) + root_span.set_status(SPANSTATUS.OK) + except Exception: + root_span.set_status(SPANSTATUS.INTERNAL_ERROR) + raise def record_task_execution( activation: TaskActivation, diff --git a/src/sentry/tempest/tasks.py b/src/sentry/tempest/tasks.py index 9bbe07984c84d3..ab70770c994794 100644 --- a/src/sentry/tempest/tasks.py +++ b/src/sentry/tempest/tasks.py @@ -193,7 +193,7 @@ def fetch_latest_id_from_tempest( span = sentry_sdk.get_current_span() if span is not None: - span.set_data("response_text", response.text) + span.set_attribute("response_text", response.text) return response @@ -231,6 +231,6 @@ def fetch_items_from_tempest( span = sentry_sdk.get_current_span() if span is not None: - span.set_data("response_text", response.text) + span.set_attribute("response_text", response.text) return response diff --git a/src/sentry/testutils/pytest/sentry.py b/src/sentry/testutils/pytest/sentry.py index 09fc1412fab558..fde3c33130819e 100644 --- a/src/sentry/testutils/pytest/sentry.py +++ b/src/sentry/testutils/pytest/sentry.py @@ -286,7 +286,7 @@ def pytest_configure(config: pytest.Config) -> None: from sentry.runner.initializer import initialize_app initialize_app({"settings": settings, "options": None}) - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) register_extensions() from sentry.utils.redis import clusters @@ -362,7 +362,7 @@ def pytest_runtest_teardown(item: pytest.Item) -> None: ProjectOption.objects.clear_local_cache() UserOption.objects.clear_local_cache() - sentry_sdk.Scope.get_global_scope().set_client(None) + sentry_sdk.get_global_scope().set_client(None) def _shuffle(items: list[pytest.Item], r: random.Random) -> None: diff --git a/src/sentry/utils/concurrent.py b/src/sentry/utils/concurrent.py index 82e1b706d2fa47..4f3ebab76b6077 100644 --- a/src/sentry/utils/concurrent.py +++ b/src/sentry/utils/concurrent.py @@ -202,8 +202,8 @@ def __worker(self): while True: priority, item = queue.get(True) thread_isolation_scope, thread_current_scope, function, future = item - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): if not future.set_running_or_notify_cancel(): continue try: @@ -247,8 +247,8 @@ def submit[ task = PriorityTask( priority, ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), callable, future, ), diff --git a/src/sentry/utils/db.py b/src/sentry/utils/db.py index e6035f46b5acfe..2b8977556edea3 100644 --- a/src/sentry/utils/db.py +++ b/src/sentry/utils/db.py @@ -59,7 +59,7 @@ def setup_once(): def _enter(self): self._sentry_sdk_span = sentry_sdk.start_span(op="transaction.atomic") - self._sentry_sdk_span.set_data("using", self.using) + self._sentry_sdk_span.set_attribute("using", self.using) self._sentry_sdk_span.__enter__() return original_enter(self) diff --git a/src/sentry/utils/pagination_factory.py b/src/sentry/utils/pagination_factory.py index 7c59c0a96546bf..1a105c3c6e7023 100644 --- a/src/sentry/utils/pagination_factory.py +++ b/src/sentry/utils/pagination_factory.py @@ -67,6 +67,6 @@ def get_paginator( def annotate_span_with_pagination_args(span: Span, per_page: int) -> None: - span.set_data("Limit", per_page) + span.set_attribute("Limit", per_page) sentry_sdk.set_tag("query.per_page", per_page) sentry_sdk.set_tag("query.per_page.grouped", format_grouped_length(per_page, [1, 10, 50, 100])) diff --git a/src/sentry/utils/performance_issues/performance_detection.py b/src/sentry/utils/performance_issues/performance_detection.py index d7f4c6daa4e7a0..f3026a4d99749a 100644 --- a/src/sentry/utils/performance_issues/performance_detection.py +++ b/src/sentry/utils/performance_issues/performance_detection.py @@ -503,8 +503,8 @@ def report_metrics_for_detectors( sdk_name = get_sdk_name(event) try: - # Setting a tag isn't critical, the transaction doesn't exist sometimes, if it's called outside prod code (eg. load-mocks / tests) - set_tag = sdk_span.containing_transaction.set_tag + # Setting a tag isn't critical, the root span doesn't exist sometimes, if it's called outside prod code (eg. load-mocks / tests) + set_tag = sdk_span.root_span.set_tag except AttributeError: set_tag = lambda *args: None diff --git a/src/sentry/utils/sdk.py b/src/sentry/utils/sdk.py index 4383dbad606e97..9ebf1564dbe15a 100644 --- a/src/sentry/utils/sdk.py +++ b/src/sentry/utils/sdk.py @@ -181,6 +181,11 @@ def traces_sampler(sampling_context): if wsgi_path and wsgi_path in SAMPLED_ROUTES: return SAMPLED_ROUTES[wsgi_path] + # make it fail to I know what tests use this. + custom_sample_rate = sampling_context.get("attributes", {}).get("sentry.sample_rate") + if custom_sample_rate is not None: + raise Exception("some error") + # Apply sample_rate from custom_sampling_context custom_sample_rate = sampling_context.get("sample_rate") if custom_sample_rate is not None: @@ -237,9 +242,13 @@ def before_send_transaction(event: Event, _: Hint) -> Event | None: num_of_spans = len(event["spans"]) event["tags"]["spans_over_limit"] = str(num_of_spans >= 1000) - if not event["measurements"]: - event["measurements"] = {} - event["measurements"]["num_of_spans"] = { + + # `measurements` are deprecated and have already been removed from the Python SDK. + # We ignore those lines in the mypy check because the SDKs Event type does not have `measurements` anymore. + # (on ingest and in the product measurements are still there so it is fine to set them for the time being) + if not event["measurements"]: # type: ignore[typeddict-item] + event["measurements"] = {} # type: ignore[typeddict-unknown-key] + event["measurements"]["num_of_spans"] = { # type: ignore[typeddict-item] "value": num_of_spans, "unit": None, } @@ -484,7 +493,7 @@ def flush( LoggingIntegration(event_level=None, sentry_logs_level=logging.INFO), RustInfoIntegration(), RedisIntegration(), - ThreadingIntegration(propagate_hub=True), + ThreadingIntegration(), ], **sdk_options, ) @@ -578,7 +587,7 @@ def check_current_scope_transaction( Note: Ignores scope `transaction` values with `source = "custom"`, indicating a value which has been set maunually. """ - scope = sentry_sdk.Scope.get_current_scope() + scope = sentry_sdk.get_current_scope() transaction_from_request = get_transaction_name_from_request(request) if ( @@ -693,19 +702,10 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str: ) -def set_measurement(measurement_name, value, unit=None): - try: - transaction = sentry_sdk.Scope.get_current_scope().transaction - if transaction is not None: - transaction.set_measurement(measurement_name, value, unit) - except Exception: - pass - - def set_span_data(data_name, value): span = sentry_sdk.get_current_span() if span is not None: - span.set_data(data_name, value) + span.set_attribute(data_name, value) def merge_context_into_scope( @@ -744,6 +744,5 @@ def merge_context_into_scope( "patch_transport_for_instrumentation", "isolation_scope", "set_current_event_project", - "set_measurement", "traces_sampler", ) diff --git a/src/sentry/utils/snuba.py b/src/sentry/utils/snuba.py index d112d1e3902668..fc5ed54270ccb6 100644 --- a/src/sentry/utils/snuba.py +++ b/src/sentry/utils/snuba.py @@ -1091,9 +1091,9 @@ def _apply_cache_and_build_results( use_cache: bool | None = False, ) -> ResultSet: parent_api: str = "" - scope = sentry_sdk.Scope.get_current_scope() - if scope.transaction: - parent_api = scope.transaction.name + scope = sentry_sdk.get_current_scope() + if scope.root_span and scope.root_span.name: + parent_api = scope.root_span.name # Store the original position of the query so that we can maintain the order snuba_requests_list: list[tuple[int, SnubaRequest]] = [] @@ -1161,8 +1161,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: _snuba_query, [ ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), snuba_request, ) for snuba_request in snuba_requests_list @@ -1174,8 +1174,8 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: query_results = [ _snuba_query( ( - sentry_sdk.Scope.get_isolation_scope(), - sentry_sdk.Scope.get_current_scope(), + sentry_sdk.get_isolation_scope(), + sentry_sdk.get_current_scope(), snuba_requests_list[0], ) ) @@ -1208,7 +1208,7 @@ def _bulk_snuba_query(snuba_requests: Sequence[SnubaRequest]) -> ResultSet: allocation_policy_prefix = "allocation_policy." bytes_scanned = body.get("profile", {}).get("progress_bytes", None) if bytes_scanned is not None: - span.set_data(f"{allocation_policy_prefix}.bytes_scanned", bytes_scanned) + span.set_attribute(f"{allocation_policy_prefix}.bytes_scanned", bytes_scanned) if _is_rejected_query(body): quota_allowance_summary = body["quota_allowance"]["summary"] for k, v in quota_allowance_summary.items(): @@ -1285,8 +1285,8 @@ def _snuba_query( # Eventually we can get rid of this wrapper, but for now it's cleaner to unwrap # the params here than in the calling function. (bc of thread .map) thread_isolation_scope, thread_current_scope, snuba_request = params - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): headers = snuba_request.headers request = snuba_request.request try: diff --git a/src/sentry/utils/snuba_rpc.py b/src/sentry/utils/snuba_rpc.py index 42c92e4400f74f..eda933f969b55f 100644 --- a/src/sentry/utils/snuba_rpc.py +++ b/src/sentry/utils/snuba_rpc.py @@ -122,8 +122,8 @@ def _make_rpc_requests( # Sets the thread parameters once so we're not doing it in the map repeatedly partial_request = partial( _make_rpc_request, - thread_isolation_scope=sentry_sdk.Scope.get_isolation_scope(), - thread_current_scope=sentry_sdk.Scope.get_current_scope(), + thread_isolation_scope=sentry_sdk.get_isolation_scope(), + thread_current_scope=sentry_sdk.get_current_scope(), ) response = [ result @@ -248,25 +248,23 @@ def _make_rpc_request( thread_current_scope: sentry_sdk.Scope | None = None, ) -> BaseHTTPResponse: thread_isolation_scope = ( - sentry_sdk.Scope.get_isolation_scope() + sentry_sdk.get_isolation_scope() if thread_isolation_scope is None else thread_isolation_scope ) thread_current_scope = ( - sentry_sdk.Scope.get_current_scope() - if thread_current_scope is None - else thread_current_scope + sentry_sdk.get_current_scope() if thread_current_scope is None else thread_current_scope ) if SNUBA_INFO: from google.protobuf.json_format import MessageToJson log_snuba_info(f"{referrer}.body:\n{MessageToJson(req)}") # type: ignore[arg-type] - with sentry_sdk.scope.use_isolation_scope(thread_isolation_scope): - with sentry_sdk.scope.use_scope(thread_current_scope): + with sentry_sdk.use_isolation_scope(thread_isolation_scope): + with sentry_sdk.use_scope(thread_current_scope): with sentry_sdk.start_span(op="snuba_rpc.run", name=req.__class__.__name__) as span: if referrer: span.set_tag("snuba.referrer", referrer) - span.set_data("snuba.query", req) + span.set_attribute("snuba.query", req) try: http_resp = _snuba_pool.urlopen( "POST", diff --git a/tests/relay_integration/test_sdk.py b/tests/relay_integration/test_sdk.py index d9853f27447b21..ee8bac7c907956 100644 --- a/tests/relay_integration/test_sdk.py +++ b/tests/relay_integration/test_sdk.py @@ -46,8 +46,8 @@ def inner(*args, **kwargs): assert event_id is not None sentry_sdk.flush() - with sentry_sdk.scope.use_scope(current_scope): - with sentry_sdk.scope.use_isolation_scope(isolation_scope): + with sentry_sdk.use_scope(current_scope): + with sentry_sdk.use_isolation_scope(isolation_scope): return wait_for_ingest_consumer( lambda: eventstore.backend.get_event_by_id(settings.SENTRY_PROJECT, event_id) ) @@ -108,7 +108,7 @@ def test_bind_organization_context(default_organization): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id assert scope._tags["organization.slug"] == default_organization.slug assert scope._contexts["organization"] == { @@ -130,7 +130,7 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization.test"] == "1" @@ -146,5 +146,5 @@ def add_context(scope, organization, **kwargs): with override_settings(SENTRY_ORGANIZATION_CONTEXT_HELPER=add_context): bind_organization_context(default_organization) - scope = sentry_sdk.Scope.get_isolation_scope() + scope = sentry_sdk.get_isolation_scope() assert scope._tags["organization"] == default_organization.id diff --git a/tests/sentry/event_manager/test_event_manager.py b/tests/sentry/event_manager/test_event_manager.py index 21a3697745a24f..d40275079366c9 100644 --- a/tests/sentry/event_manager/test_event_manager.py +++ b/tests/sentry/event_manager/test_event_manager.py @@ -2425,7 +2425,7 @@ def test_category_match_group(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2519,7 +2519,7 @@ def test_perf_issue_creation(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_update(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2560,7 +2560,7 @@ def test_perf_issue_update(self) -> None: @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_error_issue_no_associate_perf_event(self) -> None: """Test that you can't associate a performance event with an error issue""" - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")) ) @@ -2581,7 +2581,7 @@ def test_error_issue_no_associate_perf_event(self) -> None: @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_no_associate_error_event(self) -> None: """Test that you can't associate an error event with a performance issue""" - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): manager = EventManager(make_event()) manager.normalize() event = manager.save(self.project.id) @@ -2601,7 +2601,7 @@ def test_perf_issue_no_associate_error_event(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation_ignored(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=2, @@ -2612,7 +2612,7 @@ def test_perf_issue_creation_ignored(self) -> None: @override_options({"performance.issues.all.problem-detection": 1.0}) @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) def test_perf_issue_creation_over_ignored_threshold(self) -> None: - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): event_1 = self.create_performance_issue( event_data=make_event(**get_event("n-plus-one-in-django-index-view")), noise_limit=3 ) diff --git a/tests/sentry/integrations/api/bases/test_integration.py b/tests/sentry/integrations/api/bases/test_integration.py index 362cc430fa90dd..451dc6437caf15 100644 --- a/tests/sentry/integrations/api/bases/test_integration.py +++ b/tests/sentry/integrations/api/bases/test_integration.py @@ -15,7 +15,7 @@ class IntegrationEndpointTest(TestCase): # Since both `IntegrationEndpoint.handle_exception_with_details` and `Endpoint.handle_exception_with_details` potentially # run, and they both call their own module's copy of `capture_exception`, in order to prove that # neither one is not called, we assert on the underlying method from the SDK - @patch("sentry_sdk.Scope.capture_exception") + @patch("sentry_sdk.capture_exception") def test_handle_rest_framework_exception( self, mock_capture_exception: MagicMock, mock_stderror_write: MagicMock ): diff --git a/tests/sentry/metrics/test_minimetrics.py b/tests/sentry/metrics/test_minimetrics.py index 4091b7f4d464ac..f7739b703c70b3 100644 --- a/tests/sentry/metrics/test_minimetrics.py +++ b/tests/sentry/metrics/test_minimetrics.py @@ -51,7 +51,7 @@ def scope(): traces_sample_rate=1.0, ), ) - with sentry_sdk.scope.use_scope(scope): + with sentry_sdk.use_scope(scope): yield scope @@ -67,7 +67,7 @@ def backend(): def test_incr(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo") @@ -79,7 +79,7 @@ def test_incr(backend, scope): def test_incr_with_tag(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo", tags={"x": "y"}) @@ -92,7 +92,7 @@ def test_incr_with_tag(backend, scope): def test_incr_multi(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="foo", tags={"x": "y"}) backend.incr(key="foo", tags={"x": "z"}) @@ -106,7 +106,7 @@ def test_incr_multi(backend, scope): def test_gauge(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.gauge(key="foo", value=0) backend.gauge(key="foo", value=42.0) @@ -119,7 +119,7 @@ def test_gauge(backend, scope): def test_distribution(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.distribution(key="foo", value=0) backend.distribution(key="foo", value=42.0) @@ -132,7 +132,7 @@ def test_distribution(backend, scope): def test_timing(backend, scope): - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.timing(key="foo", value=42.1, tags={"x": "y"}) @@ -150,7 +150,7 @@ def test_timing(backend, scope): def test_timing_duplicate(backend, scope): - with scope.start_transaction(): + with scope.start_span(): # We often manually track a span + a timer with same name. In this case # we want no additional span. with scope.start_span(op="test"): @@ -188,7 +188,7 @@ def __getattr__(self, name): # make sure the backend feeds back to itself with mock.patch("sentry.utils.metrics.backend", new=TrackingCompositeBackend()) as backend: - with scope.start_transaction(): + with scope.start_span(): with scope.start_span(op="test"): backend.incr(key="sentrytest.composite", tags={"x": "bar"}) full_flush(scope) diff --git a/tests/sentry/taskworker/test_task.py b/tests/sentry/taskworker/test_task.py index e435771be0e213..626f9c6176801e 100644 --- a/tests/sentry/taskworker/test_task.py +++ b/tests/sentry/taskworker/test_task.py @@ -258,7 +258,7 @@ def test_create_activation_tracing(task_namespace: TaskNamespace) -> None: def with_parameters(one: str, two: int, org_id: int) -> None: raise NotImplementedError - with sentry_sdk.start_transaction(op="test.task"): + with sentry_sdk.start_span(op="test.task"): activation = with_parameters.create_activation(["one", 22], {"org_id": 99}) headers = activation.headers @@ -271,7 +271,7 @@ def test_create_activation_tracing_headers(task_namespace: TaskNamespace) -> Non def with_parameters(one: str, two: int, org_id: int) -> None: raise NotImplementedError - with sentry_sdk.start_transaction(op="test.task"): + with sentry_sdk.start_span(op="test.task"): activation = with_parameters.create_activation( ["one", 22], {"org_id": 99}, {"key": "value"} ) diff --git a/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py b/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py index 6c87d64415bc87..12f2acc978e179 100644 --- a/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py +++ b/tests/sentry/utils/performance_issues/experiments/test_m_n_plus_one_db_detector.py @@ -213,7 +213,7 @@ def test_m_n_plus_one_detector_enabled(self): event = get_event("m-n-plus-one-db/m-n-plus-one-graphql") sdk_span_mock = Mock() _detect_performance_problems(event, sdk_span_mock, self.create_project()) - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + sdk_span_mock.root_span.set_tag.assert_has_calls( [ # Current + Experimental Detector call("_pi_all_issue_count", 2), diff --git a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py index 7668cfb0e7ad1b..734dfaddcfac01 100644 --- a/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py +++ b/tests/sentry/utils/performance_issues/test_m_n_plus_one_db_detector.py @@ -127,7 +127,7 @@ def test_m_n_plus_one_detector_enabled(self): event = get_event("m-n-plus-one-db/m-n-plus-one-graphql") sdk_span_mock = Mock() _detect_performance_problems(event, sdk_span_mock, self.create_project()) - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + sdk_span_mock.root_span.set_tag.assert_has_calls( [ call("_pi_all_issue_count", 1), call("_pi_sdk_name", "sentry.javascript.node"), diff --git a/tests/sentry/utils/performance_issues/test_performance_detection.py b/tests/sentry/utils/performance_issues/test_performance_detection.py index 7b6b07ab49c518..ce31c2b4dcf1ce 100644 --- a/tests/sentry/utils/performance_issues/test_performance_detection.py +++ b/tests/sentry/utils/performance_issues/test_performance_detection.py @@ -414,8 +414,8 @@ def test_detects_performance_issues_in_n_plus_one_query(self): perf_problems = _detect_performance_problems(n_plus_one_event, sdk_span_mock, self.project) - assert sdk_span_mock.containing_transaction.set_tag.call_count == 6 - sdk_span_mock.containing_transaction.set_tag.assert_has_calls( + assert sdk_span_mock.root_span.set_tag.call_count == 6 + sdk_span_mock.root_span.set_tag.assert_has_calls( [ call( "_pi_all_issue_count", diff --git a/tests/sentry/utils/test_sdk.py b/tests/sentry/utils/test_sdk.py index 1609caa4ed86d4..592547b863d681 100644 --- a/tests/sentry/utils/test_sdk.py +++ b/tests/sentry/utils/test_sdk.py @@ -216,7 +216,7 @@ def test_scope_has_correct_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/dogs/{name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch is None @@ -225,7 +225,7 @@ def test_scope_has_wrong_transaction(self, mock_resolve: MagicMock): mock_scope = Scope() mock_scope._transaction = "/tricks/{trick_name}/" - with patch("sentry.utils.sdk.sentry_sdk.Scope.get_current_scope", return_value=mock_scope): + with patch("sentry.utils.sdk.sentry_sdk.get_current_scope", return_value=mock_scope): mismatch = check_current_scope_transaction(Request(HttpRequest())) assert mismatch == { "scope_transaction": "/tricks/{trick_name}/", diff --git a/tests/sentry/web/frontend/test_newest_performance_issue.py b/tests/sentry/web/frontend/test_newest_performance_issue.py index ff56059ad67ce8..71a59f48536eb0 100644 --- a/tests/sentry/web/frontend/test_newest_performance_issue.py +++ b/tests/sentry/web/frontend/test_newest_performance_issue.py @@ -46,7 +46,7 @@ def setUp(self): self.login_as(self.user) def test_simple(self): - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): latest_event_time = time() older_event_time = latest_event_time - 300 @@ -80,7 +80,7 @@ def test_simple(self): @override_options({"performance.issues.n_plus_one_db.problem-creation": 1.0}) @with_feature("system:multi-region") def test_simple_customer_domains(self): - with mock.patch("sentry_sdk.tracing.Span.containing_transaction"): + with mock.patch("sentry_sdk.tracing.Span.root_span"): latest_event_time = time() older_event_time = latest_event_time - 300