Skip to content

feat: upgrade sentry python sdk to 2.28.0 #91717

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion requirements-dev-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ sentry-ophio==1.1.3
sentry-protos==0.2.0
sentry-redis-tools==0.5.0
sentry-relay==0.9.9
sentry-sdk==2.27.0
sentry-sdk==2.28.0
sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
Expand Down
2 changes: 1 addition & 1 deletion requirements-frozen.txt
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ sentry-ophio==1.1.3
sentry-protos==0.2.0
sentry-redis-tools==0.5.0
sentry-relay==0.9.9
sentry-sdk==2.27.0
sentry-sdk==2.28.0
sentry-usage-accountant==0.0.10
simplejson==3.17.6
six==1.16.0
Expand Down
6 changes: 3 additions & 3 deletions src/sentry/api/endpoints/organization_event_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from datetime import datetime, timedelta
from typing import Any

import sentry_sdk
from rest_framework.request import Request
from rest_framework.response import Response
from snuba_sdk import Column, Condition, Function, Op
Expand All @@ -23,6 +22,7 @@
from sentry.snuba.dataset import Dataset
from sentry.snuba.query_sources import QuerySource
from sentry.snuba.referrer import Referrer
from sentry.utils.sdk import set_measurement

VALID_AVERAGE_COLUMNS = {"span.self_time", "span.duration"}

Expand All @@ -39,7 +39,7 @@ def add_comparison_to_event(event, average_columns, request: Request):
group_to_span_map[group].append(span)

# Nothing to add comparisons to
sentry_sdk.set_measurement("query.groups", len(group_to_span_map))
set_measurement("query.groups", len(group_to_span_map))
if len(group_to_span_map) == 0:
return

Expand Down Expand Up @@ -77,7 +77,7 @@ def add_comparison_to_event(event, average_columns, request: Request):
),
)
)
sentry_sdk.set_measurement("query.groups_found", len(result["data"]))
set_measurement("query.groups_found", len(result["data"]))
for row in result["data"]:
group = row["span.group"]
for span in group_to_span_map[group]:
Expand Down
8 changes: 4 additions & 4 deletions src/sentry/api/endpoints/organization_events_trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@ def build_span_query(trace_id: str, spans_params: SnubaParams, query_spans: list
# Performance improvement, snuba's parser is extremely slow when we're sending thousands of
# span_ids here, using a `splitByChar` means that snuba will not parse the giant list of spans
span_minimum = options.get("performance.traces.span_query_minimum_spans")
sentry_sdk.set_measurement("trace_view.spans.span_minimum", span_minimum)
set_measurement("trace_view.spans.span_minimum", span_minimum)
sentry_sdk.set_tag("trace_view.split_by_char.optimization", len(query_spans) > span_minimum)
if len(query_spans) > span_minimum:
# TODO: because we're not doing an IN on a list of literals, snuba will not optimize the query with the HexInt
Expand Down Expand Up @@ -813,14 +813,14 @@ def augment_transactions_with_spans(
projects.add(error["project.id"])
ts_params = find_timestamp_params(transactions)
time_buffer = options.get("performance.traces.span_query_timebuffer_hours")
sentry_sdk.set_measurement("trace_view.spans.time_buffer", time_buffer)
set_measurement("trace_view.spans.time_buffer", time_buffer)
if ts_params["min"]:
params.start = ts_params["min"] - timedelta(hours=time_buffer)
if ts_params["max"]:
params.end = ts_params["max"] + timedelta(hours=time_buffer)

if ts_params["max"] and ts_params["min"]:
sentry_sdk.set_measurement(
set_measurement(
"trace_view.trace_duration", (ts_params["max"] - ts_params["min"]).total_seconds()
)
sentry_sdk.set_tag("trace_view.missing_timestamp_constraints", False)
Expand Down Expand Up @@ -899,7 +899,7 @@ def augment_transactions_with_spans(
total_chunks = 3
else:
total_chunks = 4
sentry_sdk.set_measurement("trace_view.span_query.total_chunks", total_chunks)
set_measurement("trace_view.span_query.total_chunks", total_chunks)
chunks = chunked(list_spans, (len(list_spans) // total_chunks) + 1)
queries = [build_span_query(trace_id, spans_params, chunk) for chunk in chunks]
results = bulk_snuba_queries(
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
from sentry.silo.base import SiloMode
from sentry.types.region import get_local_region
from sentry.utils.dates import parse_stats_period
from sentry.utils.sdk import capture_exception, merge_context_into_scope
from sentry.utils.sdk import capture_exception, merge_context_into_scope, set_measurement
from sentry.utils.snuba import (
DatasetSelectionError,
QueryConnectionFailed,
Expand Down Expand Up @@ -454,7 +454,7 @@ def update_snuba_params_with_timestamp(
# While possible, the majority of traces shouldn't take more than a week
# Starting with 3d for now, but potentially something we can increase if this becomes a problem
time_buffer = options.get("performance.traces.transaction_query_timebuffer_days")
sentry_sdk.set_measurement("trace_view.transactions.time_buffer", time_buffer)
set_measurement("trace_view.transactions.time_buffer", time_buffer)
example_start = example_timestamp - timedelta(days=time_buffer)
example_end = example_timestamp + timedelta(days=time_buffer)
# If timestamp is being passed it should always overwrite the statsperiod or start & end
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/ingest/transaction_clusterer/rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from sentry.ingest.transaction_clusterer.rule_validator import RuleValidator
from sentry.models.project import Project
from sentry.utils import metrics
from sentry.utils.sdk import set_measurement

from .base import ReplacementRule

Expand Down Expand Up @@ -145,7 +146,7 @@ def _trim_rules(self, rules: RuleSet) -> RuleSet:
sorted_rules = [rule for rule in sorted_rules if rule[1] >= last_seen_deadline]

if self.MERGE_MAX_RULES < len(rules):
sentry_sdk.set_measurement("discarded_rules", len(rules) - self.MERGE_MAX_RULES)
set_measurement("discarded_rules", len(rules) - self.MERGE_MAX_RULES)
sentry_sdk.Scope.get_isolation_scope().set_context(
"clustering_rules_max",
{
Expand Down
3 changes: 2 additions & 1 deletion src/sentry/models/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from sentry.utils.db import atomic_transaction
from sentry.utils.hashlib import hash_values, md5_text
from sentry.utils.numbers import validate_bigint
from sentry.utils.sdk import set_measurement

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -644,7 +645,7 @@ def set_commits(self, commit_list):
This will clear any existing commit log and replace it with the given
commits.
"""
sentry_sdk.set_measurement("release.set_commits", len(commit_list))
set_measurement("release.set_commits", len(commit_list))
from sentry.models.releases.set_commits import set_commits

set_commits(self, commit_list)
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/profiles/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def get_from_profiling_service(
with sentry_sdk.start_span(op="json.dumps"):
data = json.dumps(json_data).encode("utf-8")

set_measurement("payload.size", len(data), unit="byte")
set_measurement("payload.size", len(data))
if metric:
metric_name, metric_tags = metric
metrics.distribution(metric_name, len(data), tags=metric_tags)
Expand Down
7 changes: 3 additions & 4 deletions src/sentry/snuba/rpc_dataset_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from sentry.search.events.fields import get_function_alias
from sentry.search.events.types import SAMPLING_MODES, EventsMeta, SnubaData, SnubaParams
from sentry.utils import json, snuba_rpc
from sentry.utils.sdk import set_measurement
from sentry.utils.snuba import process_value

logger = logging.getLogger("sentry.snuba.spans_rpc")
Expand Down Expand Up @@ -313,9 +314,7 @@ def process_table_response(
assert len(column_value.results) == len(column_value.reliabilities), Exception(
"Length of rpc results do not match length of rpc reliabilities"
)
sentry_sdk.set_measurement(
f"SearchResolver.result_size.{attribute}", len(column_value.results)
)
set_measurement(f"SearchResolver.result_size.{attribute}", len(column_value.results))

while len(final_data) < len(column_value.results):
final_data.append({})
Expand All @@ -333,7 +332,7 @@ def process_table_response(
final_confidence[index][attribute] = CONFIDENCES.get(
column_value.reliabilities[index], None
)
sentry_sdk.set_measurement("SearchResolver.result_size.final_data", len(final_data))
set_measurement("SearchResolver.result_size.final_data", len(final_data))

if debug:
final_meta["query"] = json.loads(MessageToJson(table_request.rpc_request))
Expand Down
4 changes: 2 additions & 2 deletions src/sentry/utils/sdk.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,11 +695,11 @@ def parse_org_slug(x: Organization | RpcOrganization | str) -> str:
)


def set_measurement(measurement_name, value, unit=None):
def set_measurement(measurement_name, value):
try:
transaction = sentry_sdk.Scope.get_current_scope().transaction
if transaction is not None:
transaction.set_measurement(measurement_name, value, unit)
transaction.set_data(measurement_name, value)
except Exception:
pass

Expand Down
Loading