From 1c886e623f7cbb941acb4dc2ec508d684ce8b442 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Tue, 29 Nov 2022 09:37:48 -0800 Subject: [PATCH 01/12] fix(profiling): Resolve inherited method class names (#1756) Methods may be inherited from a parent class. If multiple classes inherit from the same class and uses the inherited method, we'd want it to report the parent class's name instead of the individual child classes since they'd have the same filename and lineno of the parent class and not the children. --- sentry_sdk/profiler.py | 8 ++++-- tests/test_profiler.py | 56 +++++++++++++++++++++++++++++++++++++++++- 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py index 28e96016ca..3d3b7cf5a0 100644 --- a/sentry_sdk/profiler.py +++ b/sentry_sdk/profiler.py @@ -211,7 +211,9 @@ def get_frame_name(frame): and f_code.co_varnames[0] == "self" and "self" in frame.f_locals ): - return "{}.{}".format(frame.f_locals["self"].__class__.__name__, name) + for cls in frame.f_locals["self"].__class__.__mro__: + if name in cls.__dict__: + return "{}.{}".format(cls.__name__, name) except AttributeError: pass @@ -225,7 +227,9 @@ def get_frame_name(frame): and f_code.co_varnames[0] == "cls" and "cls" in frame.f_locals ): - return "{}.{}".format(frame.f_locals["cls"].__name__, name) + for cls in frame.f_locals["cls"].__mro__: + if name in cls.__dict__: + return "{}.{}".format(cls.__name__, name) except AttributeError: pass diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 11e92630cf..42721044ce 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -82,7 +82,35 @@ def get_frame(depth=1): return inspect.currentframe() -class GetFrame: +class GetFrameBase: + def inherited_instance_method(self): + return inspect.currentframe() + + def inherited_instance_method_wrapped(self): + def wrapped(): + self + return inspect.currentframe() + + return wrapped + + @classmethod + def inherited_class_method(cls): + return inspect.currentframe() + + @classmethod + def inherited_class_method_wrapped(cls): + def wrapped(): + cls + return inspect.currentframe() + + return wrapped + + @staticmethod + def inherited_static_method(): + return inspect.currentframe() + + +class GetFrame(GetFrameBase): def instance_method(self): return inspect.currentframe() @@ -149,6 +177,32 @@ def static_method(): id="static_method", marks=pytest.mark.skip(reason="unsupported"), ), + pytest.param( + GetFrame().inherited_instance_method(), + "GetFrameBase.inherited_instance_method", + id="inherited_instance_method", + ), + pytest.param( + GetFrame().inherited_instance_method_wrapped()(), + "wrapped", + id="instance_method_wrapped", + ), + pytest.param( + GetFrame().inherited_class_method(), + "GetFrameBase.inherited_class_method", + id="inherited_class_method", + ), + pytest.param( + GetFrame().inherited_class_method_wrapped()(), + "wrapped", + id="inherited_class_method_wrapped", + ), + pytest.param( + GetFrame().inherited_static_method(), + "GetFrameBase.static_method", + id="inherited_static_method", + marks=pytest.mark.skip(reason="unsupported"), + ), ], ) def test_get_frame_name(frame, frame_name): From 905b3fdd4282120d18dab9137807e83746d28577 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 30 Nov 2022 16:22:25 +0100 Subject: [PATCH 02/12] Add constants for sentry-trace and baggage headers (#1765) * Introduced SENTRY_TRACE_HEADER_NAME variable * Introduced +BAGGAGE_HEADER_NAME variable --- .vscode/settings.json | 6 ++-- sentry_sdk/consts.py | 50 +++++++++++++++---------------- sentry_sdk/integrations/flask.py | 9 ++++-- sentry_sdk/integrations/stdlib.py | 1 - sentry_sdk/tracing.py | 21 ++++++++----- 5 files changed, 49 insertions(+), 38 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index c167a13dc2..ba2472c4c9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,6 @@ { "python.pythonPath": ".venv/bin/python", - "python.formatting.provider": "black" -} \ No newline at end of file + "python.formatting.provider": "black", + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 6d463f3dc5..6fd61d395b 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -44,6 +44,31 @@ DEFAULT_MAX_BREADCRUMBS = 100 +class OP: + DB = "db" + DB_REDIS = "db.redis" + EVENT_DJANGO = "event.django" + FUNCTION = "function" + FUNCTION_AWS = "function.aws" + FUNCTION_GCP = "function.gcp" + HTTP_CLIENT = "http.client" + HTTP_CLIENT_STREAM = "http.client.stream" + HTTP_SERVER = "http.server" + MIDDLEWARE_DJANGO = "middleware.django" + MIDDLEWARE_STARLETTE = "middleware.starlette" + MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive" + MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send" + QUEUE_SUBMIT_CELERY = "queue.submit.celery" + QUEUE_TASK_CELERY = "queue.task.celery" + QUEUE_TASK_RQ = "queue.task.rq" + SUBPROCESS = "subprocess" + SUBPROCESS_WAIT = "subprocess.wait" + SUBPROCESS_COMMUNICATE = "subprocess.communicate" + TEMPLATE_RENDER = "template.render" + VIEW_RENDER = "view.render" + WEBSOCKET_SERVER = "websocket.server" + + # This type exists to trick mypy and PyCharm into thinking `init` and `Client` # take these arguments (even though they take opaque **kwargs) class ClientConstructor(object): @@ -106,28 +131,3 @@ def _get_default_options(): VERSION = "1.11.1" - - -class OP: - DB = "db" - DB_REDIS = "db.redis" - EVENT_DJANGO = "event.django" - FUNCTION = "function" - FUNCTION_AWS = "function.aws" - FUNCTION_GCP = "function.gcp" - HTTP_CLIENT = "http.client" - HTTP_CLIENT_STREAM = "http.client.stream" - HTTP_SERVER = "http.server" - MIDDLEWARE_DJANGO = "middleware.django" - MIDDLEWARE_STARLETTE = "middleware.starlette" - MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive" - MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send" - QUEUE_SUBMIT_CELERY = "queue.submit.celery" - QUEUE_TASK_CELERY = "queue.task.celery" - QUEUE_TASK_RQ = "queue.task.rq" - SUBPROCESS = "subprocess" - SUBPROCESS_WAIT = "subprocess.wait" - SUBPROCESS_COMMUNICATE = "subprocess.communicate" - TEMPLATE_RENDER = "template.render" - VIEW_RENDER = "view.render" - WEBSOCKET_SERVER = "websocket.server" diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py index 52cce0b4b4..67c87b64f6 100644 --- a/sentry_sdk/integrations/flask.py +++ b/sentry_sdk/integrations/flask.py @@ -6,7 +6,7 @@ from sentry_sdk.integrations._wsgi_common import RequestExtractor from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware from sentry_sdk.scope import Scope -from sentry_sdk.tracing import SOURCE_FOR_STYLE +from sentry_sdk.tracing import SENTRY_TRACE_HEADER_NAME, SOURCE_FOR_STYLE from sentry_sdk.utils import ( capture_internal_exceptions, event_from_exception, @@ -101,8 +101,11 @@ def _add_sentry_trace(sender, template, context, **extra): sentry_span = Hub.current.scope.span context["sentry_trace"] = ( Markup( - '' - % (sentry_span.to_traceparent(),) + '' + % ( + SENTRY_TRACE_HEADER_NAME, + sentry_span.to_traceparent(), + ) ) if sentry_span else "" diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py index 3b81b6c2c5..687d9dd2c1 100644 --- a/sentry_sdk/integrations/stdlib.py +++ b/sentry_sdk/integrations/stdlib.py @@ -187,7 +187,6 @@ def sentry_patched_popen_init(self, *a, **kw): env = None with hub.start_span(op=OP.SUBPROCESS, description=description) as span: - for k, v in hub.iter_trace_propagation_headers(span): if env is None: env = _init_argument( diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index aacb3a5bb3..8be9028aa5 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -6,7 +6,6 @@ from datetime import datetime, timedelta import sentry_sdk - from sentry_sdk.utils import logger from sentry_sdk._types import MYPY @@ -24,6 +23,9 @@ import sentry_sdk.profiler from sentry_sdk._types import Event, SamplingContext, MeasurementUnit +BAGGAGE_HEADER_NAME = "baggage" +SENTRY_TRACE_HEADER_NAME = "sentry-trace" + # Transaction source # see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations @@ -278,10 +280,12 @@ def continue_from_headers( # TODO-neel move away from this kwargs stuff, it's confusing and opaque # make more explicit - baggage = Baggage.from_incoming_header(headers.get("baggage")) - kwargs.update({"baggage": baggage}) + baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME)) + kwargs.update({BAGGAGE_HEADER_NAME: baggage}) - sentrytrace_kwargs = extract_sentrytrace_data(headers.get("sentry-trace")) + sentrytrace_kwargs = extract_sentrytrace_data( + headers.get(SENTRY_TRACE_HEADER_NAME) + ) if sentrytrace_kwargs is not None: kwargs.update(sentrytrace_kwargs) @@ -308,7 +312,7 @@ def iter_headers(self): `sentry_tracestate` value, this will cause one to be generated and stored. """ - yield "sentry-trace", self.to_traceparent() + yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent() tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None # `tracestate` will only be `None` if there's no client or no DSN @@ -320,7 +324,7 @@ def iter_headers(self): if self.containing_transaction: baggage = self.containing_transaction.get_baggage().serialize() if baggage: - yield "baggage", baggage + yield BAGGAGE_HEADER_NAME, baggage @classmethod def from_traceparent( @@ -344,7 +348,9 @@ def from_traceparent( if not traceparent: return None - return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs) + return cls.continue_from_headers( + {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs + ) def to_traceparent(self): # type: () -> str @@ -653,6 +659,7 @@ def finish(self, hub=None): # to a concrete decision. if self.sampled is None: logger.warning("Discarding transaction without sampling decision.") + return None finished_spans = [ From 01dc7ee45c93ff3193b5fc28ea6ce51d0d74c700 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 30 Nov 2022 08:51:24 -0800 Subject: [PATCH 03/12] ref(profiling): Eagerly hash stack for profiles (#1755) Hashing the stack is an expensive operation and the same stack is used for parallel transactions happening on various threads. Instead of hashing it each time it's used. --- sentry_sdk/profiler.py | 61 +++++++++++++++++++++++------------------- tests/test_profiler.py | 8 +++--- 2 files changed, 37 insertions(+), 32 deletions(-) diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py index 3d3b7cf5a0..b38b7af962 100644 --- a/sentry_sdk/profiler.py +++ b/sentry_sdk/profiler.py @@ -53,7 +53,9 @@ from typing_extensions import TypedDict import sentry_sdk.tracing - RawSampleData = Tuple[int, Sequence[Tuple[str, Sequence[RawFrameData]]]] + RawStack = Tuple[RawFrameData, ...] + RawSample = Sequence[Tuple[str, RawStack]] + RawSampleWithId = Sequence[Tuple[str, int, RawStack]] ProcessedStack = Tuple[int, ...] @@ -153,7 +155,7 @@ def teardown_profiler(): def extract_stack(frame, max_stack_depth=MAX_STACK_DEPTH): - # type: (Optional[FrameType], int) -> Sequence[RawFrameData] + # type: (Optional[FrameType], int) -> Tuple[RawFrameData, ...] """ Extracts the stack starting the specified frame. The extracted stack assumes the specified frame is the top of the stack, and works back @@ -328,12 +330,14 @@ class SampleBuffer(object): def __init__(self, capacity): # type: (int) -> None - self.buffer = [None] * capacity # type: List[Optional[RawSampleData]] + self.buffer = [ + None + ] * capacity # type: List[Optional[Tuple[int, RawSampleWithId]]] self.capacity = capacity # type: int self.idx = 0 # type: int - def write(self, sample): - # type: (RawSampleData) -> None + def write(self, ts, raw_sample): + # type: (int, RawSample) -> None """ Writing to the buffer is not thread safe. There is the possibility that parallel writes will overwrite one another. @@ -346,7 +350,24 @@ def write(self, sample): any synchronization mechanisms here like locks. """ idx = self.idx - self.buffer[idx] = sample + + sample = [ + ( + thread_id, + # Instead of mapping the stack into frame ids and hashing + # that as a tuple, we can directly hash the stack. + # This saves us from having to generate yet another list. + # Additionally, using the stack as the key directly is + # costly because the stack can be large, so we pre-hash + # the stack, and use the hash as the key as this will be + # needed a few times to improve performance. + hash(stack), + stack, + ) + for thread_id, stack in raw_sample + ] + + self.buffer[idx] = (ts, sample) self.idx = (idx + 1) % self.capacity def slice_profile(self, start_ns, stop_ns): @@ -357,27 +378,13 @@ def slice_profile(self, start_ns, stop_ns): frames = dict() # type: Dict[RawFrameData, int] frames_list = list() # type: List[ProcessedFrame] - # TODO: This is doing an naive iteration over the - # buffer and extracting the appropriate samples. - # - # Is it safe to assume that the samples are always in - # chronological order and binary search the buffer? for ts, sample in filter(None, self.buffer): if start_ns > ts or ts > stop_ns: continue elapsed_since_start_ns = str(ts - start_ns) - for tid, stack in sample: - # Instead of mapping the stack into frame ids and hashing - # that as a tuple, we can directly hash the stack. - # This saves us from having to generate yet another list. - # Additionally, using the stack as the key directly is - # costly because the stack can be large, so we pre-hash - # the stack, and use the hash as the key as this will be - # needed a few times to improve performance. - hashed_stack = hash(stack) - + for tid, hashed_stack, stack in sample: # Check if the stack is indexed first, this lets us skip # indexing frames if it's not necessary if hashed_stack not in stacks: @@ -433,13 +440,11 @@ def _sample_stack(*args, **kwargs): """ self.write( - ( - nanosecond_time(), - [ - (str(tid), extract_stack(frame)) - for tid, frame in sys._current_frames().items() - ], - ) + nanosecond_time(), + [ + (str(tid), extract_stack(frame)) + for tid, frame in sys._current_frames().items() + ], ) return _sample_stack diff --git a/tests/test_profiler.py b/tests/test_profiler.py index 42721044ce..9a268713c8 100644 --- a/tests/test_profiler.py +++ b/tests/test_profiler.py @@ -249,8 +249,8 @@ def __init__(self, capacity, sample_data=None): def make_sampler(self): def _sample_stack(*args, **kwargs): - print("writing", self.sample_data[0]) - self.write(self.sample_data.pop(0)) + ts, sample = self.sample_data.pop(0) + self.write(ts, sample) return _sample_stack @@ -760,7 +760,7 @@ def test_thread_scheduler_single_background_thread(scheduler_class): ) def test_sample_buffer(capacity, start_ns, stop_ns, samples, profile): buffer = SampleBuffer(capacity) - for sample in samples: - buffer.write(sample) + for ts, sample in samples: + buffer.write(ts, sample) result = buffer.slice_profile(start_ns, stop_ns) assert result == profile From 46697ddeb19f2d5989c8bae88dbad41f68797dca Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 1 Dec 2022 12:04:41 +0100 Subject: [PATCH 04/12] Add instrumenter config to switch between Otel and Sentry instrumentation. (#1766) * Add instrumenter config to switch between Sentry and OTel instrumentation. * Add API to set arbitrary context in Transaction. (#1769) * Add API to set custom Span timestamps (#1770) --- sentry_sdk/api.py | 3 +- sentry_sdk/client.py | 4 ++ sentry_sdk/consts.py | 6 +++ sentry_sdk/hub.py | 17 +++++++- sentry_sdk/tracing.py | 90 +++++++++++++++++++++++++++++++++++++------ 5 files changed, 106 insertions(+), 14 deletions(-) diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py index cec914aca1..ffa017cfc1 100644 --- a/sentry_sdk/api.py +++ b/sentry_sdk/api.py @@ -4,6 +4,7 @@ from sentry_sdk.scope import Scope from sentry_sdk._types import MYPY +from sentry_sdk.tracing import NoOpSpan if MYPY: from typing import Any @@ -210,5 +211,5 @@ def start_transaction( transaction=None, # type: Optional[Transaction] **kwargs # type: Any ): - # type: (...) -> Transaction + # type: (...) -> Union[Transaction, NoOpSpan] return Hub.current.start_transaction(transaction, **kwargs) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index bf1e483634..8af7003156 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -20,6 +20,7 @@ from sentry_sdk.transport import make_transport from sentry_sdk.consts import ( DEFAULT_OPTIONS, + INSTRUMENTER, VERSION, ClientConstructor, ) @@ -86,6 +87,9 @@ def _get_options(*args, **kwargs): if rv["server_name"] is None and hasattr(socket, "gethostname"): rv["server_name"] = socket.gethostname() + if rv["instrumenter"] is None: + rv["instrumenter"] = INSTRUMENTER.SENTRY + return rv diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 6fd61d395b..47d630dee3 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -44,6 +44,11 @@ DEFAULT_MAX_BREADCRUMBS = 100 +class INSTRUMENTER: + SENTRY = "sentry" + OTEL = "otel" + + class OP: DB = "db" DB_REDIS = "db.redis" @@ -107,6 +112,7 @@ def __init__( send_client_reports=True, # type: bool _experiments={}, # type: Experiments # noqa: B006 proxy_headers=None, # type: Optional[Dict[str, str]] + instrumenter=INSTRUMENTER.SENTRY, # type: Optional[str] ): # type: (...) -> None pass diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py index 3d4a28d526..df9de10fe4 100644 --- a/sentry_sdk/hub.py +++ b/sentry_sdk/hub.py @@ -5,9 +5,10 @@ from contextlib import contextmanager from sentry_sdk._compat import with_metaclass +from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.scope import Scope from sentry_sdk.client import Client -from sentry_sdk.tracing import Span, Transaction +from sentry_sdk.tracing import NoOpSpan, Span, Transaction from sentry_sdk.session import Session from sentry_sdk.utils import ( exc_info_from_error, @@ -450,6 +451,7 @@ def add_breadcrumb( def start_span( self, span=None, # type: Optional[Span] + instrumenter=INSTRUMENTER.SENTRY, # type: str **kwargs # type: Any ): # type: (...) -> Span @@ -464,6 +466,11 @@ def start_span( for every incoming HTTP request. Use `start_transaction` to start a new transaction when one is not already in progress. """ + configuration_instrumenter = self.client and self.client.options["instrumenter"] + + if instrumenter != configuration_instrumenter: + return NoOpSpan() + # TODO: consider removing this in a future release. # This is for backwards compatibility with releases before # start_transaction existed, to allow for a smoother transition. @@ -494,9 +501,10 @@ def start_span( def start_transaction( self, transaction=None, # type: Optional[Transaction] + instrumenter=INSTRUMENTER.SENTRY, # type: str **kwargs # type: Any ): - # type: (...) -> Transaction + # type: (...) -> Union[Transaction, NoOpSpan] """ Start and return a transaction. @@ -519,6 +527,11 @@ def start_transaction( When the transaction is finished, it will be sent to Sentry with all its finished child spans. """ + configuration_instrumenter = self.client and self.client.options["instrumenter"] + + if instrumenter != configuration_instrumenter: + return NoOpSpan() + custom_sampling_context = kwargs.pop("custom_sampling_context", {}) # if we haven't been given a transaction, make one diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 8be9028aa5..93d22dc758 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta import sentry_sdk +from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.utils import logger from sentry_sdk._types import MYPY @@ -125,6 +126,7 @@ def __init__( status=None, # type: Optional[str] transaction=None, # type: Optional[str] # deprecated containing_transaction=None, # type: Optional[Transaction] + start_timestamp=None, # type: Optional[datetime] ): # type: (...) -> None self.trace_id = trace_id or uuid.uuid4().hex @@ -139,7 +141,7 @@ def __init__( self._tags = {} # type: Dict[str, str] self._data = {} # type: Dict[str, Any] self._containing_transaction = containing_transaction - self.start_timestamp = datetime.utcnow() + self.start_timestamp = start_timestamp or datetime.utcnow() try: # TODO: For Python 3.7+, we could use a clock with ns resolution: # self._start_timestamp_monotonic = time.perf_counter_ns() @@ -206,8 +208,8 @@ def containing_transaction(self): # referencing themselves) return self._containing_transaction - def start_child(self, **kwargs): - # type: (**Any) -> Span + def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): + # type: (str, **Any) -> Span """ Start a sub-span from the current span or transaction. @@ -215,6 +217,13 @@ def start_child(self, **kwargs): trace id, sampling decision, transaction pointer, and span recorder are inherited from the current span/transaction. """ + hub = self.hub or sentry_sdk.Hub.current + client = hub.client + configuration_instrumenter = client and client.options["instrumenter"] + + if instrumenter != configuration_instrumenter: + return NoOpSpan() + kwargs.setdefault("sampled", self.sampled) child = Span( @@ -461,8 +470,8 @@ def is_success(self): # type: () -> bool return self.status == "ok" - def finish(self, hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Optional[str] + def finish(self, hub=None, end_timestamp=None): + # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str] # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads # to incompatible return types for Span.finish and Transaction.finish. if self.timestamp is not None: @@ -472,8 +481,13 @@ def finish(self, hub=None): hub = hub or self.hub or sentry_sdk.Hub.current try: - duration_seconds = time.perf_counter() - self._start_timestamp_monotonic - self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds) + if end_timestamp: + self.timestamp = end_timestamp + else: + duration_seconds = time.perf_counter() - self._start_timestamp_monotonic + self.timestamp = self.start_timestamp + timedelta( + seconds=duration_seconds + ) except AttributeError: self.timestamp = datetime.utcnow() @@ -550,6 +564,7 @@ class Transaction(Span): # tracestate data from other vendors, of the form `dogs=yes,cats=maybe` "_third_party_tracestate", "_measurements", + "_contexts", "_profile", "_baggage", "_active_thread_id", @@ -575,7 +590,9 @@ def __init__( "instead of Span(transaction=...)." ) name = kwargs.pop("transaction") + Span.__init__(self, **kwargs) + self.name = name self.source = source self.sample_rate = None # type: Optional[float] @@ -586,6 +603,7 @@ def __init__( self._sentry_tracestate = sentry_tracestate self._third_party_tracestate = third_party_tracestate self._measurements = {} # type: Dict[str, Any] + self._contexts = {} # type: Dict[str, Any] self._profile = None # type: Optional[sentry_sdk.profiler.Profile] self._baggage = baggage # for profiling, we want to know on which thread a transaction is started @@ -619,8 +637,8 @@ def containing_transaction(self): # reference. return self - def finish(self, hub=None): - # type: (Optional[sentry_sdk.Hub]) -> Optional[str] + def finish(self, hub=None, end_timestamp=None): + # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str] if self.timestamp is not None: # This transaction is already finished, ignore. return None @@ -652,7 +670,7 @@ def finish(self, hub=None): ) self.name = "" - Span.finish(self, hub) + Span.finish(self, hub, end_timestamp) if not self.sampled: # At this point a `sampled = None` should have already been resolved @@ -674,11 +692,15 @@ def finish(self, hub=None): # to be garbage collected self._span_recorder = None + contexts = {} + contexts.update(self._contexts) + contexts.update({"trace": self.get_trace_context()}) + event = { "type": "transaction", "transaction": self.name, "transaction_info": {"source": self.source}, - "contexts": {"trace": self.get_trace_context()}, + "contexts": contexts, "tags": self._tags, "timestamp": self.timestamp, "start_timestamp": self.start_timestamp, @@ -703,6 +725,10 @@ def set_measurement(self, name, value, unit=""): self._measurements[name] = {"value": value, "unit": unit} + def set_context(self, key, value): + # type: (str, Any) -> None + self._contexts[key] = value + def to_json(self): # type: () -> Dict[str, Any] rv = super(Transaction, self).to_json() @@ -828,6 +854,48 @@ def _set_initial_sampling_decision(self, sampling_context): ) +class NoOpSpan(Span): + def __repr__(self): + # type: () -> Any + return self.__class__.__name__ + + def __enter__(self): + # type: () -> Any + return self + + def __exit__(self, ty, value, tb): + # type: (Any, Any, Any) -> Any + pass + + def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): + # type: (str, **Any) -> Any + pass + + def new_span(self, **kwargs): + # type: (**Any) -> Any + pass + + def set_tag(self, key, value): + # type: (Any, Any) -> Any + pass + + def set_data(self, key, value): + # type: (Any, Any) -> Any + pass + + def set_status(self, value): + # type: (Any) -> Any + pass + + def set_http_status(self, http_status): + # type: (Any) -> Any + pass + + def finish(self, hub=None, end_timestamp=None): + # type: (Any, Any) -> Any + pass + + # Circular imports from sentry_sdk.tracing_utils import ( From b1290c60208997b082287c724454949ae0166b54 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 7 Dec 2022 06:11:24 -0800 Subject: [PATCH 05/12] feat(profiling): Introduce active thread id on scope (#1764) Up to this point, simply taking the current thread when the transaction/profile was started was good enough. When using ASGI apps with non async handlers, the request is received on the main thread. This is also where the transaction or profile was started. However, the request is handled on another thread using a thread pool. To support this use case, we want to be able to set the active thread id on the scope where we can read it when we need it to allow the active thread id to be set elsewhere. --- sentry_sdk/client.py | 4 +++- sentry_sdk/profiler.py | 14 +++++++++++--- sentry_sdk/scope.py | 21 +++++++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py index 8af7003156..d32d014d96 100644 --- a/sentry_sdk/client.py +++ b/sentry_sdk/client.py @@ -433,7 +433,9 @@ def capture_event( if is_transaction: if profile is not None: - envelope.add_profile(profile.to_json(event_opt, self.options)) + envelope.add_profile( + profile.to_json(event_opt, self.options, scope) + ) envelope.add_transaction(event_opt) else: envelope.add_event(event_opt) diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py index b38b7af962..21313c9f73 100644 --- a/sentry_sdk/profiler.py +++ b/sentry_sdk/profiler.py @@ -51,6 +51,7 @@ from typing import Sequence from typing import Tuple from typing_extensions import TypedDict + import sentry_sdk.scope import sentry_sdk.tracing RawStack = Tuple[RawFrameData, ...] @@ -267,8 +268,8 @@ def __exit__(self, ty, value, tb): self.scheduler.stop_profiling() self._stop_ns = nanosecond_time() - def to_json(self, event_opt, options): - # type: (Any, Dict[str, Any]) -> Dict[str, Any] + def to_json(self, event_opt, options, scope): + # type: (Any, Dict[str, Any], Optional[sentry_sdk.scope.Scope]) -> Dict[str, Any] assert self._start_ns is not None assert self._stop_ns is not None @@ -280,6 +281,9 @@ def to_json(self, event_opt, options): profile["frames"], options["in_app_exclude"], options["in_app_include"] ) + # the active thread id from the scope always take priorty if it exists + active_thread_id = None if scope is None else scope.active_thread_id + return { "environment": event_opt.get("environment"), "event_id": uuid.uuid4().hex, @@ -311,7 +315,11 @@ def to_json(self, event_opt, options): # because we end the transaction after the profile "relative_end_ns": str(self._stop_ns - self._start_ns), "trace_id": self.transaction.trace_id, - "active_thread_id": str(self.transaction._active_thread_id), + "active_thread_id": str( + self.transaction._active_thread_id + if active_thread_id is None + else active_thread_id + ), } ], } diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py index e0a2dc7a8d..f5ac270914 100644 --- a/sentry_sdk/scope.py +++ b/sentry_sdk/scope.py @@ -94,6 +94,10 @@ class Scope(object): "_session", "_attachments", "_force_auto_session_tracking", + # The thread that is handling the bulk of the work. This can just + # be the main thread, but that's not always true. For web frameworks, + # this would be the thread handling the request. + "_active_thread_id", ) def __init__(self): @@ -125,6 +129,8 @@ def clear(self): self._session = None # type: Optional[Session] self._force_auto_session_tracking = None # type: Optional[bool] + self._active_thread_id = None # type: Optional[int] + @_attr_setter def level(self, value): # type: (Optional[str]) -> None @@ -228,6 +234,17 @@ def span(self, span): if transaction.name: self._transaction = transaction.name + @property + def active_thread_id(self): + # type: () -> Optional[int] + """Get/set the current active thread id.""" + return self._active_thread_id + + def set_active_thread_id(self, active_thread_id): + # type: (Optional[int]) -> None + """Set the current active thread id.""" + self._active_thread_id = active_thread_id + def set_tag( self, key, # type: str @@ -447,6 +464,8 @@ def update_from_scope(self, scope): self._span = scope._span if scope._attachments: self._attachments.extend(scope._attachments) + if scope._active_thread_id is not None: + self._active_thread_id = scope._active_thread_id def update_from_kwargs( self, @@ -496,6 +515,8 @@ def __copy__(self): rv._force_auto_session_tracking = self._force_auto_session_tracking rv._attachments = list(self._attachments) + rv._active_thread_id = self._active_thread_id + return rv def __repr__(self): From dd26fbe757854dc2bac62742ed6dbc0710c19642 Mon Sep 17 00:00:00 2001 From: Tony Xiao Date: Wed, 14 Dec 2022 03:44:32 -0500 Subject: [PATCH 06/12] fix(ci): Fix Github action checks (#1780) The checks are failing for 2 reasons: 1. GitHub actions dropped python3.7 support on the latest hosted runners. https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 2. New release of Tox was validation the python version in the environment name and the trailing framework version being used in the environment name was being treated as a python version and validated causing an issue. Further changes: * Added one GitHub job to check if all tests have passed. Makes it easier to configure required checks in GitHub. * Pinning Tox to <4 Co-authored-by: Anton Pirker --- .github/workflows/test-common.yml | 11 +- .../workflows/test-integration-aiohttp.yml | 25 +- .github/workflows/test-integration-asgi.yml | 25 +- .../workflows/test-integration-aws_lambda.yml | 25 +- .github/workflows/test-integration-beam.yml | 25 +- .github/workflows/test-integration-boto3.yml | 25 +- .github/workflows/test-integration-bottle.yml | 25 +- .github/workflows/test-integration-celery.yml | 25 +- .../workflows/test-integration-chalice.yml | 25 +- .github/workflows/test-integration-django.yml | 25 +- .github/workflows/test-integration-falcon.yml | 25 +- .../workflows/test-integration-fastapi.yml | 25 +- .github/workflows/test-integration-flask.yml | 25 +- .github/workflows/test-integration-gcp.yml | 25 +- .github/workflows/test-integration-httpx.yml | 25 +- .../workflows/test-integration-pure_eval.yml | 25 +- .../workflows/test-integration-pymongo.yml | 25 +- .../workflows/test-integration-pyramid.yml | 25 +- .github/workflows/test-integration-quart.yml | 25 +- .github/workflows/test-integration-redis.yml | 25 +- .../test-integration-rediscluster.yml | 25 +- .../workflows/test-integration-requests.yml | 25 +- .github/workflows/test-integration-rq.yml | 25 +- .github/workflows/test-integration-sanic.yml | 25 +- .../workflows/test-integration-sqlalchemy.yml | 25 +- .../workflows/test-integration-starlette.yml | 25 +- .../workflows/test-integration-tornado.yml | 25 +- .../workflows/test-integration-trytond.yml | 25 +- scripts/split-tox-gh-actions/ci-yaml.txt | 18 +- .../split-tox-gh-actions.py | 11 +- tox.ini | 347 +++++++++--------- 31 files changed, 715 insertions(+), 347 deletions(-) diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml index 2c8964d4ae..d3922937fe 100644 --- a/.github/workflows/test-common.yml +++ b/.github/workflows/test-common.yml @@ -24,7 +24,11 @@ jobs: continue-on-error: true strategy: matrix: - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] services: postgres: @@ -51,9 +55,6 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | pip install codecov tox @@ -69,4 +70,4 @@ jobs: ./scripts/runtox.sh "py${{ matrix.python-version }}$" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch --ignore=tests/integrations coverage combine .coverage* coverage xml -i - codecov --file coverage.xml \ No newline at end of file + codecov --file coverage.xml diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml index 62f0a48ebf..73483454c2 100644 --- a/.github/workflows/test-integration-aiohttp.yml +++ b/.github/workflows/test-integration-aiohttp.yml @@ -27,12 +27,16 @@ jobs: name: aiohttp, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test aiohttp env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All aiohttp tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml index 069ebbf3aa..16715ca230 100644 --- a/.github/workflows/test-integration-asgi.yml +++ b/.github/workflows/test-integration-asgi.yml @@ -27,12 +27,16 @@ jobs: name: asgi, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test asgi env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All asgi tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml index 5e40fed7e6..4d795a642d 100644 --- a/.github/workflows/test-integration-aws_lambda.yml +++ b/.github/workflows/test-integration-aws_lambda.yml @@ -27,12 +27,16 @@ jobs: name: aws_lambda, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test aws_lambda env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All aws_lambda tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml index 55f8e015be..0f6df2df0b 100644 --- a/.github/workflows/test-integration-beam.yml +++ b/.github/workflows/test-integration-beam.yml @@ -27,12 +27,16 @@ jobs: name: beam, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test beam env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All beam tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml index 9b8747c5f8..8f390fb309 100644 --- a/.github/workflows/test-integration-boto3.yml +++ b/.github/workflows/test-integration-boto3.yml @@ -27,12 +27,16 @@ jobs: name: boto3, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.6","3.7","3.8"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test boto3 env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All boto3 tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml index 834638213b..b2c3fcc92b 100644 --- a/.github/workflows/test-integration-bottle.yml +++ b/.github/workflows/test-integration-bottle.yml @@ -27,12 +27,16 @@ jobs: name: bottle, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test bottle env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All bottle tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml index 17feb5a4ba..927a0371cd 100644 --- a/.github/workflows/test-integration-celery.yml +++ b/.github/workflows/test-integration-celery.yml @@ -27,12 +27,16 @@ jobs: name: celery, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test celery env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All celery tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml index 36067fc7ca..44fe01e19f 100644 --- a/.github/workflows/test-integration-chalice.yml +++ b/.github/workflows/test-integration-chalice.yml @@ -27,12 +27,16 @@ jobs: name: chalice, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.6","3.7","3.8"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test chalice env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All chalice tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml index db659728a8..93c792b7b7 100644 --- a/.github/workflows/test-integration-django.yml +++ b/.github/workflows/test-integration-django.yml @@ -27,12 +27,16 @@ jobs: name: django, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] services: postgres: image: postgres @@ -58,11 +62,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test django env: @@ -77,3 +78,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All django tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml index af4c701e1a..956e8d5ba7 100644 --- a/.github/workflows/test-integration-falcon.yml +++ b/.github/workflows/test-integration-falcon.yml @@ -27,12 +27,16 @@ jobs: name: falcon, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test falcon env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All falcon tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml index 6352d134e4..2dc8f1e171 100644 --- a/.github/workflows/test-integration-fastapi.yml +++ b/.github/workflows/test-integration-fastapi.yml @@ -27,12 +27,16 @@ jobs: name: fastapi, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test fastapi env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All fastapi tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml index 8e353814ff..96263508da 100644 --- a/.github/workflows/test-integration-flask.yml +++ b/.github/workflows/test-integration-flask.yml @@ -27,12 +27,16 @@ jobs: name: flask, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test flask env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All flask tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml index 8aa4e12b7a..eefdfe1aae 100644 --- a/.github/workflows/test-integration-gcp.yml +++ b/.github/workflows/test-integration-gcp.yml @@ -27,12 +27,16 @@ jobs: name: gcp, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test gcp env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All gcp tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml index f9e1b4ec31..9f5ac92a3f 100644 --- a/.github/workflows/test-integration-httpx.yml +++ b/.github/workflows/test-integration-httpx.yml @@ -27,12 +27,16 @@ jobs: name: httpx, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test httpx env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All httpx tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml index ef39704c43..1d8f7e1beb 100644 --- a/.github/workflows/test-integration-pure_eval.yml +++ b/.github/workflows/test-integration-pure_eval.yml @@ -27,12 +27,16 @@ jobs: name: pure_eval, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test pure_eval env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All pure_eval tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml index b2e82b7fb3..fb961558ac 100644 --- a/.github/workflows/test-integration-pymongo.yml +++ b/.github/workflows/test-integration-pymongo.yml @@ -27,12 +27,16 @@ jobs: name: pymongo, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test pymongo env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All pymongo tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml index bbd017b66f..ad7bc43e85 100644 --- a/.github/workflows/test-integration-pyramid.yml +++ b/.github/workflows/test-integration-pyramid.yml @@ -27,12 +27,16 @@ jobs: name: pyramid, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test pyramid env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All pyramid tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml index de7671dbda..b9d82e53bc 100644 --- a/.github/workflows/test-integration-quart.yml +++ b/.github/workflows/test-integration-quart.yml @@ -27,12 +27,16 @@ jobs: name: quart, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test quart env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All quart tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml index 60352088cd..074c41fe5b 100644 --- a/.github/workflows/test-integration-redis.yml +++ b/.github/workflows/test-integration-redis.yml @@ -27,12 +27,16 @@ jobs: name: redis, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.7","3.8","3.9"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test redis env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All redis tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml index 5866637176..06962926fa 100644 --- a/.github/workflows/test-integration-rediscluster.yml +++ b/.github/workflows/test-integration-rediscluster.yml @@ -27,12 +27,16 @@ jobs: name: rediscluster, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.7","3.8","3.9"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test rediscluster env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All rediscluster tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml index 7e33b446db..5650121a51 100644 --- a/.github/workflows/test-integration-requests.yml +++ b/.github/workflows/test-integration-requests.yml @@ -27,12 +27,16 @@ jobs: name: requests, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.8","3.9"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test requests env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All requests tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml index e2a0ebaff8..3e3ead8118 100644 --- a/.github/workflows/test-integration-rq.yml +++ b/.github/workflows/test-integration-rq.yml @@ -27,12 +27,16 @@ jobs: name: rq, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test rq env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All rq tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml index aa99f54a90..37ffd84bb9 100644 --- a/.github/workflows/test-integration-sanic.yml +++ b/.github/workflows/test-integration-sanic.yml @@ -27,12 +27,16 @@ jobs: name: sanic, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test sanic env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All sanic tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml index ea36e0f562..c57fc950b7 100644 --- a/.github/workflows/test-integration-sqlalchemy.yml +++ b/.github/workflows/test-integration-sqlalchemy.yml @@ -27,12 +27,16 @@ jobs: name: sqlalchemy, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["2.7","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test sqlalchemy env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All sqlalchemy tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml index a35544e9e9..e4083f72d5 100644 --- a/.github/workflows/test-integration-starlette.yml +++ b/.github/workflows/test-integration-starlette.yml @@ -27,12 +27,16 @@ jobs: name: starlette, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test starlette env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All starlette tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml index 17c1f18a8e..de5d02f6e7 100644 --- a/.github/workflows/test-integration-tornado.yml +++ b/.github/workflows/test-integration-tornado.yml @@ -27,12 +27,16 @@ jobs: name: tornado, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test tornado env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All tornado tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml index 12771ffd21..10853341e2 100644 --- a/.github/workflows/test-integration-trytond.yml +++ b/.github/workflows/test-integration-trytond.yml @@ -27,12 +27,16 @@ jobs: name: trytond, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true strategy: + fail-fast: false matrix: python-version: ["3.5","3.6","3.7","3.8","3.9","3.10"] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] steps: - uses: actions/checkout@v3 @@ -41,11 +45,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test trytond env: @@ -60,3 +61,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All trytond tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt index 2e14cb5062..f2b6f97c27 100644 --- a/scripts/split-tox-gh-actions/ci-yaml.txt +++ b/scripts/split-tox-gh-actions/ci-yaml.txt @@ -27,7 +27,6 @@ jobs: name: {{ framework }}, python ${{ matrix.python-version }}, ${{ matrix.os }} runs-on: ${{ matrix.os }} timeout-minutes: 45 - continue-on-error: true {{ strategy_matrix }} {{ services }} @@ -38,11 +37,8 @@ jobs: python-version: ${{ matrix.python-version }} - name: Setup Test Env - env: - PGHOST: localhost - PGPASSWORD: sentry run: | - pip install codecov tox + pip install codecov "tox>=3,<4" - name: Test {{ framework }} env: @@ -57,3 +53,15 @@ jobs: coverage combine .coverage* coverage xml -i codecov --file coverage.xml + + check_required_tests: + name: All {{ framework }} tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py index 6e0018d0ff..2458fe06af 100755 --- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py +++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py @@ -32,9 +32,14 @@ MATRIX_DEFINITION = """ strategy: + fail-fast: false matrix: python-version: [{{ python-version }}] - os: [ubuntu-latest] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] """ @@ -77,7 +82,7 @@ def get_yaml_files_hash(): """Calculate a hash of all the yaml configuration files""" hasher = hashlib.md5() - path_pattern = (OUT_DIR / f"test-integration-*.yml").as_posix() + path_pattern = (OUT_DIR / "test-integration-*.yml").as_posix() for file in glob(path_pattern): with open(file, "rb") as f: buf = f.read() @@ -127,7 +132,7 @@ def main(fail_on_changes): if python_version not in python_versions[framework]: python_versions[framework].append(python_version) - except ValueError as err: + except ValueError: print(f"ERROR reading line {line}") for framework in python_versions: diff --git a/tox.ini b/tox.ini index 98505caab1..22eac59db8 100644 --- a/tox.ini +++ b/tox.ini @@ -9,97 +9,97 @@ envlist = py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10} # === Integrations === - # General format is {pythonversion}-{integrationname}-{frameworkversion} + # General format is {pythonversion}-{integrationname}-v{frameworkversion} # 1 blank line between different integrations # Each framework version should only be mentioned once. I.e: - # {py3.7,py3.10}-django-{3.2} - # {py3.10}-django-{4.0} + # {py3.7,py3.10}-django-v{3.2} + # {py3.10}-django-v{4.0} # instead of: - # {py3.7}-django-{3.2} - # {py3.7,py3.10}-django-{3.2,4.0} + # {py3.7}-django-v{3.2} + # {py3.7,py3.10}-django-v{3.2,4.0} # Django 1.x - {py2.7,py3.5}-django-{1.8,1.9,1.10} - {py2.7,py3.5,py3.6,py3.7}-django-{1.11} + {py2.7,py3.5}-django-v{1.8,1.9,1.10} + {py2.7,py3.5,py3.6,py3.7}-django-v{1.11} # Django 2.x - {py3.5,py3.6,py3.7}-django-{2.0,2.1} - {py3.5,py3.6,py3.7,py3.8,py3.9}-django-{2.2} + {py3.5,py3.6,py3.7}-django-v{2.0,2.1} + {py3.5,py3.6,py3.7,py3.8,py3.9}-django-v{2.2} # Django 3.x - {py3.6,py3.7,py3.8,py3.9}-django-{3.0,3.1} - {py3.6,py3.7,py3.8,py3.9,py3.10}-django-{3.2} + {py3.6,py3.7,py3.8,py3.9}-django-v{3.0,3.1} + {py3.6,py3.7,py3.8,py3.9,py3.10}-django-v{3.2} # Django 4.x - {py3.8,py3.9,py3.10}-django-{4.0,4.1} + {py3.8,py3.9,py3.10}-django-v{4.0,4.1} - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0} - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1 - {py3.6,py3.8,py3.9,py3.10}-flask-2.0 + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0} + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-v1.1 + {py3.6,py3.8,py3.9,py3.10}-flask-v2.0 {py3.7,py3.8,py3.9,py3.10}-asgi - {py3.7,py3.8,py3.9,py3.10}-starlette-{0.19.1,0.20,0.21} + {py3.7,py3.8,py3.9,py3.10}-starlette-v{0.19.1,0.20,0.21} {py3.7,py3.8,py3.9,py3.10}-fastapi {py3.7,py3.8,py3.9,py3.10}-quart - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12 + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-v0.12 - {py2.7,py3.5,py3.6,py3.7}-falcon-1.4 - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0 + {py2.7,py3.5,py3.6,py3.7}-falcon-v1.4 + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-v2.0 - {py3.5,py3.6,py3.7}-sanic-{0.8,18} - {py3.6,py3.7}-sanic-19 - {py3.6,py3.7,py3.8}-sanic-20 - {py3.7,py3.8,py3.9,py3.10}-sanic-21 - {py3.7,py3.8,py3.9,py3.10}-sanic-22 + {py3.5,py3.6,py3.7}-sanic-v{0.8,18} + {py3.6,py3.7}-sanic-v19 + {py3.6,py3.7,py3.8}-sanic-v20 + {py3.7,py3.8,py3.9,py3.10}-sanic-v21 + {py3.7,py3.8,py3.9,py3.10}-sanic-v22 - {py2.7}-celery-3 - {py2.7,py3.5,py3.6}-celery-{4.1,4.2} - {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4} - {py3.6,py3.7,py3.8}-celery-{5.0} - {py3.7,py3.8,py3.9,py3.10}-celery-{5.1,5.2} + {py2.7}-celery-v3 + {py2.7,py3.5,py3.6}-celery-v{4.1,4.2} + {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4} + {py3.6,py3.7,py3.8}-celery-v{5.0} + {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2} - py3.7-beam-{2.12,2.13,2.32,2.33} + py3.7-beam-v{2.12,2.13,2.32,2.33} # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda py3.7-gcp - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10} + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-v{1.6,1.7,1.8,1.9,1.10} - {py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11} - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3} - {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5} + {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11} + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3} + {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5} - py3.7-aiohttp-3.5 - {py3.7,py3.8,py3.9,py3.10}-aiohttp-3.6 + py3.7-aiohttp-v3.5 + {py3.7,py3.8,py3.9,py3.10}-aiohttp-v3.6 - {py3.7,py3.8,py3.9}-tornado-{5} - {py3.7,py3.8,py3.9,py3.10}-tornado-{6} + {py3.7,py3.8,py3.9}-tornado-v{5} + {py3.7,py3.8,py3.9,py3.10}-tornado-v{6} - {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2} - {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-{5.4} + {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2} + {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-v{5.4} {py2.7,py3.8,py3.9}-requests {py2.7,py3.7,py3.8,py3.9}-redis - {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2.1.0,2} + {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2} - {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3} + {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3} {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval - {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20} + {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20} - {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} + {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} - {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17} + {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17} - {py2.7,py3.6}-pymongo-{3.1} - {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-{3.12} - {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-{4.0} - {py3.7,py3.8,py3.9,py3.10}-pymongo-{4.1,4.2} + {py2.7,py3.6}-pymongo-v{3.1} + {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12} + {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.0} + {py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.1,4.2} [testenv] deps = @@ -111,41 +111,41 @@ deps = py3.4: colorama==0.4.1 py3.4: watchdog==0.10.7 - django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 - - {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2 - {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio - {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary - - django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 - django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0 - django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0 - - django-{4.0,4.1}: djangorestframework - django-{4.0,4.1}: pytest-asyncio - django-{4.0,4.1}: psycopg2-binary - django-{4.0,4.1}: pytest-django - django-{4.0,4.1}: Werkzeug - - django-1.8: Django>=1.8,<1.9 - django-1.9: Django>=1.9,<1.10 - django-1.10: Django>=1.10,<1.11 - django-1.11: Django>=1.11,<1.12 - django-2.0: Django>=2.0,<2.1 - django-2.1: Django>=2.1,<2.2 - django-2.2: Django>=2.2,<2.3 - django-3.0: Django>=3.0,<3.1 - django-3.1: Django>=3.1,<3.2 - django-3.2: Django>=3.2,<3.3 - django-4.0: Django>=4.0,<4.1 - django-4.1: Django>=4.1,<4.2 + django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0 + + {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels[daphne]>2 + {py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio + {py2.7,py3.7,py3.8,py3.9,py3.10}-django-v{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary + + django-v{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0 + django-v{2.2,3.0,3.1,3.2}: pytest-django>=4.0 + django-v{2.2,3.0,3.1,3.2}: Werkzeug<2.0 + + django-v{4.0,4.1}: djangorestframework + django-v{4.0,4.1}: pytest-asyncio + django-v{4.0,4.1}: psycopg2-binary + django-v{4.0,4.1}: pytest-django + django-v{4.0,4.1}: Werkzeug + + django-v1.8: Django>=1.8,<1.9 + django-v1.9: Django>=1.9,<1.10 + django-v1.10: Django>=1.10,<1.11 + django-v1.11: Django>=1.11,<1.12 + django-v2.0: Django>=2.0,<2.1 + django-v2.1: Django>=2.1,<2.2 + django-v2.2: Django>=2.2,<2.3 + django-v3.0: Django>=3.0,<3.1 + django-v3.1: Django>=3.1,<3.2 + django-v3.2: Django>=3.2,<3.3 + django-v4.0: Django>=4.0,<4.1 + django-v4.1: Django>=4.1,<4.2 flask: flask-login - flask-0.11: Flask>=0.11,<0.12 - flask-0.12: Flask>=0.12,<0.13 - flask-1.0: Flask>=1.0,<1.1 - flask-1.1: Flask>=1.1,<1.2 - flask-2.0: Flask>=2.0,<2.1 + flask-v0.11: Flask>=0.11,<0.12 + flask-v0.12: Flask>=0.12,<0.13 + flask-v1.0: Flask>=1.0,<1.1 + flask-v1.1: Flask>=1.1,<1.2 + flask-v2.0: Flask>=2.0,<2.1 asgi: pytest-asyncio asgi: async-asgi-testclient @@ -157,10 +157,10 @@ deps = starlette: pytest-asyncio starlette: python-multipart starlette: requests - starlette-0.21: httpx - starlette-0.19.1: starlette==0.19.1 - starlette-0.20: starlette>=0.20.0,<0.21.0 - starlette-0.21: starlette>=0.21.0,<0.22.0 + starlette-v0.21: httpx + starlette-v0.19.1: starlette==0.19.1 + starlette-v0.20: starlette>=0.20.0,<0.21.0 + starlette-v0.21: starlette>=0.21.0,<0.22.0 fastapi: fastapi fastapi: httpx @@ -168,42 +168,42 @@ deps = fastapi: python-multipart fastapi: requests - bottle-0.12: bottle>=0.12,<0.13 + bottle-v0.12: bottle>=0.12,<0.13 - falcon-1.4: falcon>=1.4,<1.5 - falcon-2.0: falcon>=2.0.0rc3,<3.0 + falcon-v1.4: falcon>=1.4,<1.5 + falcon-v2.0: falcon>=2.0.0rc3,<3.0 - sanic-0.8: sanic>=0.8,<0.9 - sanic-18: sanic>=18.0,<19.0 - sanic-19: sanic>=19.0,<20.0 - sanic-20: sanic>=20.0,<21.0 - sanic-21: sanic>=21.0,<22.0 - sanic-22: sanic>=22.0,<22.9.0 + sanic-v0.8: sanic>=0.8,<0.9 + sanic-v18: sanic>=18.0,<19.0 + sanic-v19: sanic>=19.0,<20.0 + sanic-v20: sanic>=20.0,<21.0 + sanic-v21: sanic>=21.0,<22.0 + sanic-v22: sanic>=22.0,<22.9.0 sanic: aiohttp - sanic-21: sanic_testing<22 - sanic-22: sanic_testing<22.9.0 + sanic-v21: sanic_testing<22 + sanic-v22: sanic_testing<22.9.0 {py3.5,py3.6}-sanic: aiocontextvars==0.2.1 py3.5-sanic: ujson<4 - beam-2.12: apache-beam>=2.12.0, <2.13.0 - beam-2.13: apache-beam>=2.13.0, <2.14.0 - beam-2.32: apache-beam>=2.32.0, <2.33.0 - beam-2.33: apache-beam>=2.33.0, <2.34.0 + beam-v2.12: apache-beam>=2.12.0, <2.13.0 + beam-v2.13: apache-beam>=2.13.0, <2.14.0 + beam-v2.32: apache-beam>=2.32.0, <2.33.0 + beam-v2.33: apache-beam>=2.33.0, <2.34.0 beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python celery: redis - celery-3: Celery>=3.1,<4.0 - celery-4.1: Celery>=4.1,<4.2 - celery-4.2: Celery>=4.2,<4.3 - celery-4.3: Celery>=4.3,<4.4 + celery-v3: Celery>=3.1,<4.0 + celery-v4.1: Celery>=4.1,<4.2 + celery-v4.2: Celery>=4.2,<4.3 + celery-v4.3: Celery>=4.3,<4.4 # https://github.com/celery/vine/pull/29#issuecomment-689498382 celery-4.3: vine<5.0.0 # https://github.com/celery/celery/issues/6153 - celery-4.4: Celery>=4.4,<4.5,!=4.4.4 - celery-5.0: Celery>=5.0,<5.1 - celery-5.1: Celery>=5.1,<5.2 - celery-5.2: Celery>=5.2,<5.3 + celery-v4.4: Celery>=4.4,<4.5,!=4.4.4 + celery-v5.0: Celery>=5.0,<5.1 + celery-v5.1: Celery>=5.1,<5.2 + celery-v5.2: Celery>=5.2,<5.3 py3.5-celery: newrelic<6.0.0 {py3.7}-celery: importlib-metadata<5.0 @@ -213,85 +213,85 @@ deps = aws_lambda: boto3 - pyramid-1.6: pyramid>=1.6,<1.7 - pyramid-1.7: pyramid>=1.7,<1.8 - pyramid-1.8: pyramid>=1.8,<1.9 - pyramid-1.9: pyramid>=1.9,<1.10 - pyramid-1.10: pyramid>=1.10,<1.11 + pyramid-v1.6: pyramid>=1.6,<1.7 + pyramid-v1.7: pyramid>=1.7,<1.8 + pyramid-v1.8: pyramid>=1.8,<1.9 + pyramid-v1.9: pyramid>=1.9,<1.10 + pyramid-v1.10: pyramid>=1.10,<1.11 # https://github.com/jamesls/fakeredis/issues/245 - rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0 - rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2 - rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4 - - rq-0.6: rq>=0.6,<0.7 - rq-0.7: rq>=0.7,<0.8 - rq-0.8: rq>=0.8,<0.9 - rq-0.9: rq>=0.9,<0.10 - rq-0.10: rq>=0.10,<0.11 - rq-0.11: rq>=0.11,<0.12 - rq-0.12: rq>=0.12,<0.13 - rq-0.13: rq>=0.13,<0.14 - rq-1.0: rq>=1.0,<1.1 - rq-1.1: rq>=1.1,<1.2 - rq-1.2: rq>=1.2,<1.3 - rq-1.3: rq>=1.3,<1.4 - rq-1.4: rq>=1.4,<1.5 - rq-1.5: rq>=1.5,<1.6 - - aiohttp-3.4: aiohttp>=3.4.0,<3.5.0 - aiohttp-3.5: aiohttp>=3.5.0,<3.6.0 + rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0 + rq-v{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2 + rq-v{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4 + + rq-v0.6: rq>=0.6,<0.7 + rq-v0.7: rq>=0.7,<0.8 + rq-v0.8: rq>=0.8,<0.9 + rq-v0.9: rq>=0.9,<0.10 + rq-v0.10: rq>=0.10,<0.11 + rq-v0.11: rq>=0.11,<0.12 + rq-v0.12: rq>=0.12,<0.13 + rq-v0.13: rq>=0.13,<0.14 + rq-v1.0: rq>=1.0,<1.1 + rq-v1.1: rq>=1.1,<1.2 + rq-v1.2: rq>=1.2,<1.3 + rq-v1.3: rq>=1.3,<1.4 + rq-v1.4: rq>=1.4,<1.5 + rq-v1.5: rq>=1.5,<1.6 + + aiohttp-v3.4: aiohttp>=3.4.0,<3.5.0 + aiohttp-v3.5: aiohttp>=3.5.0,<3.6.0 aiohttp: pytest-aiohttp - tornado-5: tornado>=5,<6 - tornado-6: tornado>=6.0a1 + tornado-v5: tornado>=5,<6 + tornado-v6: tornado>=6.0a1 - trytond-5.4: trytond>=5.4,<5.5 - trytond-5.2: trytond>=5.2,<5.3 - trytond-5.0: trytond>=5.0,<5.1 - trytond-4.6: trytond>=4.6,<4.7 + trytond-v5.4: trytond>=5.4,<5.5 + trytond-v5.2: trytond>=5.2,<5.3 + trytond-v5.0: trytond>=5.0,<5.1 + trytond-v4.6: trytond>=4.6,<4.7 - trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0 + trytond-v{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0 redis: fakeredis<1.7.4 - rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0 - rediscluster-2.1.0: redis-py-cluster>=2.0.0,<2.1.1 - rediscluster-2: redis-py-cluster>=2.1.1,<3.0.0 + rediscluster-v1: redis-py-cluster>=1.0.0,<2.0.0 + rediscluster-v2.1.0: redis-py-cluster>=2.0.0,<2.1.1 + rediscluster-v2: redis-py-cluster>=2.1.1,<3.0.0 - sqlalchemy-1.2: sqlalchemy>=1.2,<1.3 - sqlalchemy-1.3: sqlalchemy>=1.3,<1.4 + sqlalchemy-v1.2: sqlalchemy>=1.2,<1.3 + sqlalchemy-v1.3: sqlalchemy>=1.3,<1.4 linters: -r linter-requirements.txt py3.8: hypothesis pure_eval: pure_eval - chalice-1.16: chalice>=1.16.0,<1.17.0 - chalice-1.17: chalice>=1.17.0,<1.18.0 - chalice-1.18: chalice>=1.18.0,<1.19.0 - chalice-1.19: chalice>=1.19.0,<1.20.0 - chalice-1.20: chalice>=1.20.0,<1.21.0 + chalice-v1.16: chalice>=1.16.0,<1.17.0 + chalice-v1.17: chalice>=1.17.0,<1.18.0 + chalice-v1.18: chalice>=1.18.0,<1.19.0 + chalice-v1.19: chalice>=1.19.0,<1.20.0 + chalice-v1.20: chalice>=1.20.0,<1.21.0 chalice: pytest-chalice==0.0.5 - boto3-1.9: boto3>=1.9,<1.10 - boto3-1.10: boto3>=1.10,<1.11 - boto3-1.11: boto3>=1.11,<1.12 - boto3-1.12: boto3>=1.12,<1.13 - boto3-1.13: boto3>=1.13,<1.14 - boto3-1.14: boto3>=1.14,<1.15 - boto3-1.15: boto3>=1.15,<1.16 - boto3-1.16: boto3>=1.16,<1.17 + boto3-v1.9: boto3>=1.9,<1.10 + boto3-v1.10: boto3>=1.10,<1.11 + boto3-v1.11: boto3>=1.11,<1.12 + boto3-v1.12: boto3>=1.12,<1.13 + boto3-v1.13: boto3>=1.13,<1.14 + boto3-v1.14: boto3>=1.14,<1.15 + boto3-v1.15: boto3>=1.15,<1.16 + boto3-v1.16: boto3>=1.16,<1.17 - httpx-0.16: httpx>=0.16,<0.17 - httpx-0.17: httpx>=0.17,<0.18 + httpx-v0.16: httpx>=0.16,<0.17 + httpx-v0.17: httpx>=0.17,<0.18 pymongo: mockupdb - pymongo-3.1: pymongo>=3.1,<3.2 - pymongo-3.12: pymongo>=3.12,<4.0 - pymongo-4.0: pymongo>=4.0,<4.1 - pymongo-4.1: pymongo>=4.1,<4.2 - pymongo-4.2: pymongo>=4.2,<4.3 + pymongo-v3.1: pymongo>=3.1,<3.2 + pymongo-v3.12: pymongo>=3.12,<4.0 + pymongo-v4.0: pymongo>=4.0,<4.1 + pymongo-v4.1: pymongo>=4.1,<4.2 + pymongo-v4.2: pymongo>=4.2,<4.3 setenv = PYTHONDONTWRITEBYTECODE=1 @@ -359,19 +359,22 @@ basepython = commands = ; https://github.com/pytest-dev/pytest/issues/5532 - {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12}: pip install pytest<5 - {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2 + {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5 + {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2 ; https://github.com/pallets/flask/issues/4455 - {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" + {py3.7,py3.8,py3.9,py3.10}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" ; https://github.com/more-itertools/more-itertools/issues/578 - py3.5-flask-{0.11,0.12}: pip install more-itertools<8.11.0 + py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0 ; use old pytest for old Python versions: {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3 - py.test --durations=5 {env:TESTPATH} {posargs} + ; Running `py.test` as an executable suffers from an import error + ; when loading tests in scenarios. In particular, django fails to + ; load the settings from the test module. + python -m pytest --durations=5 -vvv {env:TESTPATH} {posargs} [testenv:linters] commands = From eb0db0a86d7e0584d80d73ac29f5188305971ab9 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 Dec 2022 13:28:23 +0100 Subject: [PATCH 07/12] Tox Cleanup (#1749) * Removed dead code from runtox shell script * Removed unused CI_PYTHON_VERSION --- .github/workflows/test-common.yml | 2 - .../workflows/test-integration-aiohttp.yml | 2 - .github/workflows/test-integration-asgi.yml | 2 - .../workflows/test-integration-aws_lambda.yml | 2 - .github/workflows/test-integration-beam.yml | 2 - .github/workflows/test-integration-boto3.yml | 2 - .github/workflows/test-integration-bottle.yml | 2 - .github/workflows/test-integration-celery.yml | 2 - .../workflows/test-integration-chalice.yml | 2 - .github/workflows/test-integration-django.yml | 2 - .github/workflows/test-integration-falcon.yml | 2 - .../workflows/test-integration-fastapi.yml | 2 - .github/workflows/test-integration-flask.yml | 2 - .github/workflows/test-integration-gcp.yml | 2 - .github/workflows/test-integration-httpx.yml | 2 - .../workflows/test-integration-pure_eval.yml | 2 - .../workflows/test-integration-pymongo.yml | 2 - .../workflows/test-integration-pyramid.yml | 2 - .github/workflows/test-integration-quart.yml | 2 - .github/workflows/test-integration-redis.yml | 2 - .../test-integration-rediscluster.yml | 2 - .../workflows/test-integration-requests.yml | 2 - .github/workflows/test-integration-rq.yml | 2 - .github/workflows/test-integration-sanic.yml | 2 - .../workflows/test-integration-sqlalchemy.yml | 2 - .../workflows/test-integration-starlette.yml | 2 - .../workflows/test-integration-tornado.yml | 2 - .../workflows/test-integration-trytond.yml | 2 - scripts/runtox.sh | 23 ++----- scripts/split-tox-gh-actions/ci-yaml.txt | 2 - tox.ini | 65 +++++++++++++------ 31 files changed, 51 insertions(+), 95 deletions(-) diff --git a/.github/workflows/test-common.yml b/.github/workflows/test-common.yml index d3922937fe..06a5b1f80f 100644 --- a/.github/workflows/test-common.yml +++ b/.github/workflows/test-common.yml @@ -59,8 +59,6 @@ jobs: pip install codecov tox - name: Run Tests - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-aiohttp.yml b/.github/workflows/test-integration-aiohttp.yml index 73483454c2..5d67bc70ce 100644 --- a/.github/workflows/test-integration-aiohttp.yml +++ b/.github/workflows/test-integration-aiohttp.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test aiohttp - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-asgi.yml b/.github/workflows/test-integration-asgi.yml index 16715ca230..a84a0cf8d1 100644 --- a/.github/workflows/test-integration-asgi.yml +++ b/.github/workflows/test-integration-asgi.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test asgi - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-aws_lambda.yml b/.github/workflows/test-integration-aws_lambda.yml index 4d795a642d..22ed7f4945 100644 --- a/.github/workflows/test-integration-aws_lambda.yml +++ b/.github/workflows/test-integration-aws_lambda.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test aws_lambda - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-beam.yml b/.github/workflows/test-integration-beam.yml index 0f6df2df0b..03a484537c 100644 --- a/.github/workflows/test-integration-beam.yml +++ b/.github/workflows/test-integration-beam.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test beam - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-boto3.yml b/.github/workflows/test-integration-boto3.yml index 8f390fb309..cbb4ec7db1 100644 --- a/.github/workflows/test-integration-boto3.yml +++ b/.github/workflows/test-integration-boto3.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test boto3 - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-bottle.yml b/.github/workflows/test-integration-bottle.yml index b2c3fcc92b..2fee720f4d 100644 --- a/.github/workflows/test-integration-bottle.yml +++ b/.github/workflows/test-integration-bottle.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test bottle - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-celery.yml b/.github/workflows/test-integration-celery.yml index 927a0371cd..7042f8d493 100644 --- a/.github/workflows/test-integration-celery.yml +++ b/.github/workflows/test-integration-celery.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test celery - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-chalice.yml b/.github/workflows/test-integration-chalice.yml index 44fe01e19f..d8240fe024 100644 --- a/.github/workflows/test-integration-chalice.yml +++ b/.github/workflows/test-integration-chalice.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test chalice - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-django.yml b/.github/workflows/test-integration-django.yml index 93c792b7b7..b309b3fec5 100644 --- a/.github/workflows/test-integration-django.yml +++ b/.github/workflows/test-integration-django.yml @@ -66,8 +66,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test django - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-falcon.yml b/.github/workflows/test-integration-falcon.yml index 956e8d5ba7..6141dc2917 100644 --- a/.github/workflows/test-integration-falcon.yml +++ b/.github/workflows/test-integration-falcon.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test falcon - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-fastapi.yml b/.github/workflows/test-integration-fastapi.yml index 2dc8f1e171..838cc43e4a 100644 --- a/.github/workflows/test-integration-fastapi.yml +++ b/.github/workflows/test-integration-fastapi.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test fastapi - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-flask.yml b/.github/workflows/test-integration-flask.yml index 96263508da..16e318cedc 100644 --- a/.github/workflows/test-integration-flask.yml +++ b/.github/workflows/test-integration-flask.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test flask - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-gcp.yml b/.github/workflows/test-integration-gcp.yml index eefdfe1aae..ca6275a537 100644 --- a/.github/workflows/test-integration-gcp.yml +++ b/.github/workflows/test-integration-gcp.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test gcp - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-httpx.yml b/.github/workflows/test-integration-httpx.yml index 9f5ac92a3f..05347aa5a4 100644 --- a/.github/workflows/test-integration-httpx.yml +++ b/.github/workflows/test-integration-httpx.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test httpx - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-pure_eval.yml b/.github/workflows/test-integration-pure_eval.yml index 1d8f7e1beb..4118ce7ecc 100644 --- a/.github/workflows/test-integration-pure_eval.yml +++ b/.github/workflows/test-integration-pure_eval.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test pure_eval - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-pymongo.yml b/.github/workflows/test-integration-pymongo.yml index fb961558ac..a691e69d1c 100644 --- a/.github/workflows/test-integration-pymongo.yml +++ b/.github/workflows/test-integration-pymongo.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test pymongo - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-pyramid.yml b/.github/workflows/test-integration-pyramid.yml index ad7bc43e85..59fbaf88ee 100644 --- a/.github/workflows/test-integration-pyramid.yml +++ b/.github/workflows/test-integration-pyramid.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test pyramid - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-quart.yml b/.github/workflows/test-integration-quart.yml index b9d82e53bc..aae555648e 100644 --- a/.github/workflows/test-integration-quart.yml +++ b/.github/workflows/test-integration-quart.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test quart - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-redis.yml b/.github/workflows/test-integration-redis.yml index 074c41fe5b..7d5eb18fb9 100644 --- a/.github/workflows/test-integration-redis.yml +++ b/.github/workflows/test-integration-redis.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test redis - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-rediscluster.yml b/.github/workflows/test-integration-rediscluster.yml index 06962926fa..453d4984a9 100644 --- a/.github/workflows/test-integration-rediscluster.yml +++ b/.github/workflows/test-integration-rediscluster.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test rediscluster - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-requests.yml b/.github/workflows/test-integration-requests.yml index 5650121a51..d07b8a7ec1 100644 --- a/.github/workflows/test-integration-requests.yml +++ b/.github/workflows/test-integration-requests.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test requests - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-rq.yml b/.github/workflows/test-integration-rq.yml index 3e3ead8118..0a1b1da443 100644 --- a/.github/workflows/test-integration-rq.yml +++ b/.github/workflows/test-integration-rq.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test rq - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-sanic.yml b/.github/workflows/test-integration-sanic.yml index 37ffd84bb9..a3966087c6 100644 --- a/.github/workflows/test-integration-sanic.yml +++ b/.github/workflows/test-integration-sanic.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test sanic - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-sqlalchemy.yml b/.github/workflows/test-integration-sqlalchemy.yml index c57fc950b7..a1a535089f 100644 --- a/.github/workflows/test-integration-sqlalchemy.yml +++ b/.github/workflows/test-integration-sqlalchemy.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test sqlalchemy - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-starlette.yml b/.github/workflows/test-integration-starlette.yml index e4083f72d5..0e34d851a4 100644 --- a/.github/workflows/test-integration-starlette.yml +++ b/.github/workflows/test-integration-starlette.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test starlette - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-tornado.yml b/.github/workflows/test-integration-tornado.yml index de5d02f6e7..cfe39f06d1 100644 --- a/.github/workflows/test-integration-tornado.yml +++ b/.github/workflows/test-integration-tornado.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test tornado - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/.github/workflows/test-integration-trytond.yml b/.github/workflows/test-integration-trytond.yml index 10853341e2..bb5997f27d 100644 --- a/.github/workflows/test-integration-trytond.yml +++ b/.github/workflows/test-integration-trytond.yml @@ -49,8 +49,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test trytond - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/scripts/runtox.sh b/scripts/runtox.sh index a658da4132..8b4c4a1bef 100755 --- a/scripts/runtox.sh +++ b/scripts/runtox.sh @@ -1,4 +1,8 @@ #!/bin/bash + +# Usage: sh scripts/runtox.sh py3.7 +# Runs all environments with substring py3.7 and the given arguments for pytest + set -ex if [ -n "$TOXPATH" ]; then @@ -9,22 +13,7 @@ else TOXPATH=./.venv/bin/tox fi -# Usage: sh scripts/runtox.sh py3.7 -# Runs all environments with substring py3.7 and the given arguments for pytest - -if [ -n "$1" ]; then - searchstring="$1" -elif [ -n "$CI_PYTHON_VERSION" ]; then - searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')" - if [ "$searchstring" = "pypy-2.7" ]; then - searchstring=pypy - fi -elif [ -n "$AZURE_PYTHON_VERSION" ]; then - searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')" - if [ "$searchstring" = pypy2 ]; then - searchstring=pypy - fi -fi +searchstring="$1" export TOX_PARALLEL_NO_SPINNER=1 -exec $TOXPATH -p auto -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}" +exec $TOXPATH -p auto -e "$($TOXPATH -l | grep "$searchstring" | tr $'\n' ',')" -- "${@:2}" diff --git a/scripts/split-tox-gh-actions/ci-yaml.txt b/scripts/split-tox-gh-actions/ci-yaml.txt index f2b6f97c27..b9ecdf39e7 100644 --- a/scripts/split-tox-gh-actions/ci-yaml.txt +++ b/scripts/split-tox-gh-actions/ci-yaml.txt @@ -41,8 +41,6 @@ jobs: pip install codecov "tox>=3,<4" - name: Test {{ framework }} - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} timeout-minutes: 45 shell: bash run: | diff --git a/tox.ini b/tox.ini index 22eac59db8..51a92a07c9 100644 --- a/tox.ini +++ b/tox.ini @@ -30,77 +30,104 @@ envlist = # Django 4.x {py3.8,py3.9,py3.10}-django-v{4.0,4.1} + # Flask {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12,1.0} {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-v1.1 {py3.6,py3.8,py3.9,py3.10}-flask-v2.0 - {py3.7,py3.8,py3.9,py3.10}-asgi + # FastAPI + {py3.7,py3.8,py3.9,py3.10}-fastapi + # Starlette {py3.7,py3.8,py3.9,py3.10}-starlette-v{0.19.1,0.20,0.21} - {py3.7,py3.8,py3.9,py3.10}-fastapi - + # Quart {py3.7,py3.8,py3.9,py3.10}-quart + # Bottle {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-v0.12 + # Falcon {py2.7,py3.5,py3.6,py3.7}-falcon-v1.4 {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-v2.0 + # Sanic {py3.5,py3.6,py3.7}-sanic-v{0.8,18} {py3.6,py3.7}-sanic-v19 {py3.6,py3.7,py3.8}-sanic-v20 {py3.7,py3.8,py3.9,py3.10}-sanic-v21 {py3.7,py3.8,py3.9,py3.10}-sanic-v22 + # Beam + py3.7-beam-v{2.12,2.13,2.32,2.33} + + # Celery {py2.7}-celery-v3 {py2.7,py3.5,py3.6}-celery-v{4.1,4.2} {py2.7,py3.5,py3.6,py3.7,py3.8}-celery-v{4.3,4.4} {py3.6,py3.7,py3.8}-celery-v{5.0} {py3.7,py3.8,py3.9,py3.10}-celery-v{5.1,5.2} - py3.7-beam-v{2.12,2.13,2.32,2.33} + # Chalice + {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20} + + # Asgi + {py3.7,py3.8,py3.9,py3.10}-asgi + # AWS Lambda # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions. py3.7-aws_lambda + # GCP py3.7-gcp + # Pyramid {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-v{1.6,1.7,1.8,1.9,1.10} - {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11} - {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3} - {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5} - + # AIOHTTP py3.7-aiohttp-v3.5 {py3.7,py3.8,py3.9,py3.10}-aiohttp-v3.6 + # Tornado {py3.7,py3.8,py3.9}-tornado-v{5} {py3.7,py3.8,py3.9,py3.10}-tornado-v{6} + # Trytond {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-v{4.6,5.0,5.2} {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-v{5.4} - {py2.7,py3.8,py3.9}-requests - + # Redis {py2.7,py3.7,py3.8,py3.9}-redis - {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2} - {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3} - - {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval - - {py3.6,py3.7,py3.8}-chalice-v{1.16,1.17,1.18,1.19,1.20} + # Redis Cluster + {py2.7,py3.7,py3.8,py3.9}-rediscluster-v{1,2.1.0,2} - {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} + # RQ (Redis Queue) + {py2.7,py3.5,py3.6}-rq-v{0.6,0.7,0.8,0.9,0.10,0.11} + {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-v{0.12,0.13,1.0,1.1,1.2,1.3} + {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-v{1.4,1.5} - {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17} + # SQL Alchemy + {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-v{1.2,1.3} + # Mongo DB {py2.7,py3.6}-pymongo-v{3.1} {py2.7,py3.6,py3.7,py3.8,py3.9}-pymongo-v{3.12} {py3.6,py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.0} {py3.7,py3.8,py3.9,py3.10}-pymongo-v{4.1,4.2} + # HTTPX + {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-v{0.16,0.17} + + # Requests + {py2.7,py3.8,py3.9}-requests + + # pure_eval + {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval + + # Boto3 + {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} + [testenv] deps = # if you change test-requirements.txt and your change is not being reflected @@ -361,10 +388,8 @@ commands = ; https://github.com/pytest-dev/pytest/issues/5532 {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5 {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2 - ; https://github.com/pallets/flask/issues/4455 {py3.7,py3.8,py3.9,py3.10}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1" - ; https://github.com/more-itertools/more-itertools/issues/578 py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0 From d0eed0ee828684f22fe2a2b28b02cf7f4ce8c74a Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Wed, 14 Dec 2022 16:12:04 +0100 Subject: [PATCH 08/12] Basic OTel support (#1772) Adding basic OpenTelementry (OTel) support to the Sentry SDK: - Adding a OTel SpanProcessor that can receive spans form OTel and then convert them into Sentry Spans and send them to Sentry. - Adding a OTel Propagator that can receive and propagate trace headers (Baggage) to keep distributed tracing intact. --- .../test-integration-opentelemetry.yml | 73 ++++ .../integrations/opentelemetry/__init__.py | 7 + .../integrations/opentelemetry/consts.py | 6 + .../integrations/opentelemetry/propagator.py | 113 +++++ .../opentelemetry/span_processor.py | 236 ++++++++++ sentry_sdk/tracing.py | 22 +- setup.py | 1 + tests/integrations/opentelemetry/__init__.py | 3 + .../opentelemetry/test_propagator.py | 248 +++++++++++ .../opentelemetry/test_span_processor.py | 405 ++++++++++++++++++ tests/tracing/test_noop_span.py | 46 ++ tox.ini | 5 + 12 files changed, 1154 insertions(+), 11 deletions(-) create mode 100644 .github/workflows/test-integration-opentelemetry.yml create mode 100644 sentry_sdk/integrations/opentelemetry/__init__.py create mode 100644 sentry_sdk/integrations/opentelemetry/consts.py create mode 100644 sentry_sdk/integrations/opentelemetry/propagator.py create mode 100644 sentry_sdk/integrations/opentelemetry/span_processor.py create mode 100644 tests/integrations/opentelemetry/__init__.py create mode 100644 tests/integrations/opentelemetry/test_propagator.py create mode 100644 tests/integrations/opentelemetry/test_span_processor.py create mode 100644 tests/tracing/test_noop_span.py diff --git a/.github/workflows/test-integration-opentelemetry.yml b/.github/workflows/test-integration-opentelemetry.yml new file mode 100644 index 0000000000..73a16098e4 --- /dev/null +++ b/.github/workflows/test-integration-opentelemetry.yml @@ -0,0 +1,73 @@ +name: Test opentelemetry + +on: + push: + branches: + - master + - release/** + + pull_request: + +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + +jobs: + test: + name: opentelemetry, python ${{ matrix.python-version }}, ${{ matrix.os }} + runs-on: ${{ matrix.os }} + timeout-minutes: 45 + + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.9","3.10"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Setup Test Env + run: | + pip install codecov "tox>=3,<4" + + - name: Test opentelemetry + timeout-minutes: 45 + shell: bash + run: | + set -x # print commands that are executed + coverage erase + + ./scripts/runtox.sh "${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + coverage combine .coverage* + coverage xml -i + codecov --file coverage.xml + + check_required_tests: + name: All opentelemetry tests passed or skipped + needs: test + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test.result, 'failure') + run: | + echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1 diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py new file mode 100644 index 0000000000..e0020204d5 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/__init__.py @@ -0,0 +1,7 @@ +from sentry_sdk.integrations.opentelemetry.span_processor import ( # noqa: F401 + SentrySpanProcessor, +) + +from sentry_sdk.integrations.opentelemetry.propagator import ( # noqa: F401 + SentryPropagator, +) diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py new file mode 100644 index 0000000000..79663dd670 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/consts.py @@ -0,0 +1,6 @@ +from opentelemetry.context import ( # type: ignore + create_key, +) + +SENTRY_TRACE_KEY = create_key("sentry-trace") +SENTRY_BAGGAGE_KEY = create_key("sentry-baggage") diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py new file mode 100644 index 0000000000..7b2a88e347 --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/propagator.py @@ -0,0 +1,113 @@ +from opentelemetry import trace # type: ignore +from opentelemetry.context import ( # type: ignore + Context, + get_current, + set_value, +) +from opentelemetry.propagators.textmap import ( # type: ignore + CarrierT, + Getter, + Setter, + TextMapPropagator, + default_getter, + default_setter, +) +from opentelemetry.trace import ( # type: ignore + TraceFlags, + NonRecordingSpan, + SpanContext, +) +from sentry_sdk.integrations.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, +) +from sentry_sdk.integrations.opentelemetry.span_processor import ( + SentrySpanProcessor, +) + +from sentry_sdk.tracing import ( + BAGGAGE_HEADER_NAME, + SENTRY_TRACE_HEADER_NAME, +) +from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data +from sentry_sdk._types import MYPY + +if MYPY: + from typing import Optional + from typing import Set + + +class SentryPropagator(TextMapPropagator): # type: ignore + """ + Propagates tracing headers for Sentry's tracing system in a way OTel understands. + """ + + def extract(self, carrier, context=None, getter=default_getter): + # type: (CarrierT, Optional[Context], Getter) -> Context + if context is None: + context = get_current() + + sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME) + if not sentry_trace: + return context + + sentrytrace = extract_sentrytrace_data(sentry_trace[0]) + if not sentrytrace: + return context + + context = set_value(SENTRY_TRACE_KEY, sentrytrace, context) + + trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"] + + span_context = SpanContext( + trace_id=int(trace_id, 16), # type: ignore + span_id=int(span_id, 16), # type: ignore + # we simulate a sampled trace on the otel side and leave the sampling to sentry + trace_flags=TraceFlags(TraceFlags.SAMPLED), + is_remote=True, + ) + + baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME) + + if baggage_header: + baggage = Baggage.from_incoming_header(baggage_header[0]) + else: + # If there's an incoming sentry-trace but no incoming baggage header, + # for instance in traces coming from older SDKs, + # baggage will be empty and frozen and won't be populated as head SDK. + baggage = Baggage(sentry_items={}) + + baggage.freeze() + context = set_value(SENTRY_BAGGAGE_KEY, baggage, context) + + span = NonRecordingSpan(span_context) + modified_context = trace.set_span_in_context(span, context) + return modified_context + + def inject(self, carrier, context=None, setter=default_setter): + # type: (CarrierT, Optional[Context], Setter) -> None + if context is None: + context = get_current() + + current_span = trace.get_current_span(context) + + if not current_span.context.is_valid: + return + + span_id = trace.format_span_id(current_span.context.span_id) + + span_map = SentrySpanProcessor().otel_span_map + sentry_span = span_map.get(span_id, None) + if not sentry_span: + return + + setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent()) + + baggage = sentry_span.containing_transaction.get_baggage() + if baggage: + setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize()) + + @property + def fields(self): + # type: () -> Set[str] + return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME} diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py new file mode 100644 index 0000000000..0ec9c620af --- /dev/null +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -0,0 +1,236 @@ +from datetime import datetime + +from opentelemetry.context import get_value # type: ignore +from opentelemetry.sdk.trace import SpanProcessor # type: ignore +from opentelemetry.semconv.trace import SpanAttributes # type: ignore +from opentelemetry.trace import ( # type: ignore + format_span_id, + format_trace_id, + SpanContext, + Span as OTelSpan, + SpanKind, +) +from sentry_sdk.consts import INSTRUMENTER +from sentry_sdk.hub import Hub +from sentry_sdk.integrations.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, +) +from sentry_sdk.tracing import Transaction, Span as SentrySpan +from sentry_sdk.utils import Dsn +from sentry_sdk._types import MYPY + +from urllib3.util import parse_url as urlparse # type: ignore + +if MYPY: + from typing import Any + from typing import Dict + from typing import Union + +OPEN_TELEMETRY_CONTEXT = "otel" + + +class SentrySpanProcessor(SpanProcessor): # type: ignore + """ + Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. + """ + + # The mapping from otel span ids to sentry spans + otel_span_map = {} # type: Dict[str, Union[Transaction, OTelSpan]] + + def __new__(cls): + # type: () -> SentrySpanProcessor + if not hasattr(cls, "instance"): + cls.instance = super(SentrySpanProcessor, cls).__new__(cls) + + return cls.instance + + def on_start(self, otel_span, parent_context=None): + # type: (OTelSpan, SpanContext) -> None + hub = Hub.current + if not hub: + return + + if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: + return + + if not otel_span.context.is_valid: + return + + if self._is_sentry_span(hub, otel_span): + return + + trace_data = self._get_trace_data(otel_span, parent_context) + + parent_span_id = trace_data["parent_span_id"] + sentry_parent_span = ( + self.otel_span_map.get(parent_span_id, None) if parent_span_id else None + ) + + sentry_span = None + if sentry_parent_span: + sentry_span = sentry_parent_span.start_child( + span_id=trace_data["span_id"], + description=otel_span.name, + start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9), + instrumenter=INSTRUMENTER.OTEL, + ) + else: + sentry_span = hub.start_transaction( + name=otel_span.name, + span_id=trace_data["span_id"], + parent_span_id=parent_span_id, + trace_id=trace_data["trace_id"], + baggage=trace_data["baggage"], + start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9), + instrumenter=INSTRUMENTER.OTEL, + ) + + self.otel_span_map[trace_data["span_id"]] = sentry_span + + def on_end(self, otel_span): + # type: (OTelSpan) -> None + hub = Hub.current + if not hub: + return + + if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: + return + + if not otel_span.context.is_valid: + return + + span_id = format_span_id(otel_span.context.span_id) + sentry_span = self.otel_span_map.pop(span_id, None) + if not sentry_span: + return + + sentry_span.op = otel_span.name + + if isinstance(sentry_span, Transaction): + sentry_span.name = otel_span.name + sentry_span.set_context( + OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span) + ) + + else: + self._update_span_with_otel_data(sentry_span, otel_span) + + sentry_span.finish( + end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9) + ) + + def _is_sentry_span(self, hub, otel_span): + # type: (Hub, OTelSpan) -> bool + """ + Break infinite loop: + HTTP requests to Sentry are caught by OTel and send again to Sentry. + """ + otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) + dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc + + if otel_span_url and dsn_url in otel_span_url: + return True + + return False + + def _get_otel_context(self, otel_span): + # type: (OTelSpan) -> Dict[str, Any] + """ + Returns the OTel context for Sentry. + See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context + """ + ctx = {} + + if otel_span.attributes: + ctx["attributes"] = dict(otel_span.attributes) + + if otel_span.resource.attributes: + ctx["resource"] = dict(otel_span.resource.attributes) + + return ctx + + def _get_trace_data(self, otel_span, parent_context): + # type: (OTelSpan, SpanContext) -> Dict[str, Any] + """ + Extracts tracing information from one OTel span and its parent OTel context. + """ + trace_data = {} + + span_id = format_span_id(otel_span.context.span_id) + trace_data["span_id"] = span_id + + trace_id = format_trace_id(otel_span.context.trace_id) + trace_data["trace_id"] = trace_id + + parent_span_id = ( + format_span_id(otel_span.parent.span_id) if otel_span.parent else None + ) + trace_data["parent_span_id"] = parent_span_id + + sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context) + trace_data["parent_sampled"] = ( + sentry_trace_data[2] if sentry_trace_data else None + ) + + baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context) + trace_data["baggage"] = baggage + + return trace_data + + def _update_span_with_otel_data(self, sentry_span, otel_span): + # type: (SentrySpan, OTelSpan) -> None + """ + Convert OTel span data and update the Sentry span with it. + This should eventually happen on the server when ingesting the spans. + """ + for key, val in otel_span.attributes.items(): + sentry_span.set_data(key, val) + + sentry_span.set_data("otel.kind", otel_span.kind) + + op = otel_span.name + description = otel_span.name + + http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None) + db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None) + + if http_method: + op = "http" + + if otel_span.kind == SpanKind.SERVER: + op += ".server" + elif otel_span.kind == SpanKind.CLIENT: + op += ".client" + + description = http_method + + peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None) + if peer_name: + description += " {}".format(peer_name) + + target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None) + if target: + description += " {}".format(target) + + if not peer_name and not target: + url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None) + if url: + parsed_url = urlparse(url) + url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}" + description += " {}".format(url) + + status_code = otel_span.attributes.get( + SpanAttributes.HTTP_STATUS_CODE, None + ) + if status_code: + sentry_span.set_http_status(status_code) + + elif db_query: + op = "db" + statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None) + if statement: + description = statement + + sentry_span.op = op + sentry_span.description = description diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py index 93d22dc758..dc65ea5fd7 100644 --- a/sentry_sdk/tracing.py +++ b/sentry_sdk/tracing.py @@ -856,43 +856,43 @@ def _set_initial_sampling_decision(self, sampling_context): class NoOpSpan(Span): def __repr__(self): - # type: () -> Any + # type: () -> str return self.__class__.__name__ def __enter__(self): - # type: () -> Any + # type: () -> NoOpSpan return self def __exit__(self, ty, value, tb): - # type: (Any, Any, Any) -> Any + # type: (Optional[Any], Optional[Any], Optional[Any]) -> None pass def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs): - # type: (str, **Any) -> Any - pass + # type: (str, **Any) -> NoOpSpan + return NoOpSpan() def new_span(self, **kwargs): - # type: (**Any) -> Any + # type: (**Any) -> NoOpSpan pass def set_tag(self, key, value): - # type: (Any, Any) -> Any + # type: (str, Any) -> None pass def set_data(self, key, value): - # type: (Any, Any) -> Any + # type: (str, Any) -> None pass def set_status(self, value): - # type: (Any) -> Any + # type: (str) -> None pass def set_http_status(self, http_status): - # type: (Any) -> Any + # type: (int) -> None pass def finish(self, hub=None, end_timestamp=None): - # type: (Any, Any) -> Any + # type: (Optional[sentry_sdk.Hub], Optional[datetime]) -> Optional[str] pass diff --git a/setup.py b/setup.py index 687111566b..318c9dc837 100644 --- a/setup.py +++ b/setup.py @@ -63,6 +63,7 @@ def get_file_text(file_name): "starlette": ["starlette>=0.19.1"], "fastapi": ["fastapi>=0.79.0"], "pymongo": ["pymongo>=3.1"], + "opentelemetry": ["opentelemetry-distro>=0.350b0"], }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/integrations/opentelemetry/__init__.py new file mode 100644 index 0000000000..39ecc610d5 --- /dev/null +++ b/tests/integrations/opentelemetry/__init__.py @@ -0,0 +1,3 @@ +import pytest + +django = pytest.importorskip("opentelemetry") diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py new file mode 100644 index 0000000000..529aa99c09 --- /dev/null +++ b/tests/integrations/opentelemetry/test_propagator.py @@ -0,0 +1,248 @@ +from mock import MagicMock +import mock + +from opentelemetry.context import get_current +from opentelemetry.trace.propagation import get_current_span +from opentelemetry.trace import ( + set_span_in_context, + TraceFlags, + SpanContext, +) +from sentry_sdk.integrations.opentelemetry.consts import ( + SENTRY_BAGGAGE_KEY, + SENTRY_TRACE_KEY, +) + +from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator +from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor +from sentry_sdk.tracing_utils import Baggage + + +def test_extract_no_context_no_sentry_trace_header(): + """ + No context and NO Sentry trace data in getter. + Extract should return empty context. + """ + carrier = None + context = None + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == {} + + +def test_extract_context_no_sentry_trace_header(): + """ + Context but NO Sentry trace data in getter. + Extract should return context as is. + """ + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.return_value = None + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert modified_context == context + + +def test_extract_empty_context_sentry_trace_header_no_baggage(): + """ + Empty context but Sentry trace data but NO Baggage in getter. + Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id. + """ + carrier = None + context = {} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + None, + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 3 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == "" + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +def test_extract_context_sentry_trace_header_baggage(): + """ + Empty context but Sentry trace data and Baggage in getter. + Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id. + """ + baggage_header = ( + "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, " + "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, " + "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;" + ) + + carrier = None + context = {"some": "value"} + getter = MagicMock() + getter.get.side_effect = [ + ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"], + [baggage_header], + ] + + modified_context = SentryPropagator().extract(carrier, context, getter) + + assert len(modified_context.keys()) == 4 + + assert modified_context[SENTRY_TRACE_KEY] == { + "trace_id": "1234567890abcdef1234567890abcdef", + "parent_span_id": "1234567890abcdef", + "parent_sampled": True, + } + + assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" + ) + + span_context = get_current_span(modified_context).get_span_context() + assert span_context.span_id == int("1234567890abcdef", 16) + assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16) + + +def test_inject_empty_otel_span_map(): + """ + Empty otel_span_map. + So there is no sentry_span to be found in inject() + and the function is returned early and no setters are called. + """ + carrier = None + context = get_current() + setter = MagicMock() + setter.set = MagicMock() + + span_context = SpanContext( + trace_id=int("1234567890abcdef1234567890abcdef", 16), + span_id=int("1234567890abcdef", 16), + trace_flags=TraceFlags(TraceFlags.SAMPLED), + is_remote=True, + ) + span = MagicMock() + span.context = span_context + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", + return_value=span, + ): + full_context = set_span_in_context(span, context) + SentryPropagator().inject(carrier, full_context, setter) + + setter.set.assert_not_called() + + +def test_inject_sentry_span_no_baggage(): + """ + Inject a sentry span with no baggage. + """ + carrier = None + context = get_current() + setter = MagicMock() + setter.set = MagicMock() + + trace_id = "1234567890abcdef1234567890abcdef" + span_id = "1234567890abcdef" + + span_context = SpanContext( + trace_id=int(trace_id, 16), + span_id=int(span_id, 16), + trace_flags=TraceFlags(TraceFlags.SAMPLED), + is_remote=True, + ) + span = MagicMock() + span.context = span_context + + sentry_span = MagicMock() + sentry_span.to_traceparent = mock.Mock( + return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" + ) + sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None) + + span_processor = SentrySpanProcessor() + span_processor.otel_span_map[span_id] = sentry_span + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", + return_value=span, + ): + full_context = set_span_in_context(span, context) + SentryPropagator().inject(carrier, full_context, setter) + + setter.set.assert_called_once_with( + carrier, + "sentry-trace", + "1234567890abcdef1234567890abcdef-1234567890abcdef-1", + ) + + +def test_inject_sentry_span_baggage(): + """ + Inject a sentry span with baggage. + """ + carrier = None + context = get_current() + setter = MagicMock() + setter.set = MagicMock() + + trace_id = "1234567890abcdef1234567890abcdef" + span_id = "1234567890abcdef" + + span_context = SpanContext( + trace_id=int(trace_id, 16), + span_id=int(span_id, 16), + trace_flags=TraceFlags(TraceFlags.SAMPLED), + is_remote=True, + ) + span = MagicMock() + span.context = span_context + + sentry_span = MagicMock() + sentry_span.to_traceparent = mock.Mock( + return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1" + ) + sentry_items = { + "sentry-trace_id": "771a43a4192642f0b136d5159a501700", + "sentry-public_key": "49d0f7386ad645858ae85020e393bef3", + "sentry-sample_rate": 0.01337, + "sentry-user_id": "Amélie", + } + baggage = Baggage(sentry_items=sentry_items) + sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage) + + span_processor = SentrySpanProcessor() + span_processor.otel_span_map[span_id] = sentry_span + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span", + return_value=span, + ): + full_context = set_span_in_context(span, context) + SentryPropagator().inject(carrier, full_context, setter) + + setter.set.assert_any_call( + carrier, + "sentry-trace", + "1234567890abcdef1234567890abcdef-1234567890abcdef-1", + ) + + setter.set.assert_any_call( + carrier, + "baggage", + baggage.serialize(), + ) diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py new file mode 100644 index 0000000000..6d151c9cfe --- /dev/null +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -0,0 +1,405 @@ +from datetime import datetime +from mock import MagicMock +import mock +import time +from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor +from sentry_sdk.tracing import Span, Transaction + +from opentelemetry.trace import SpanKind + + +def test_is_sentry_span(): + otel_span = MagicMock() + + hub = MagicMock() + hub.client = None + + span_processor = SentrySpanProcessor() + assert not span_processor._is_sentry_span(hub, otel_span) + + client = MagicMock() + client.options = {"instrumenter": "otel"} + client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" + + hub.client = client + assert not span_processor._is_sentry_span(hub, otel_span) + + otel_span.attributes = { + "http.url": "https://example.com", + } + assert not span_processor._is_sentry_span(hub, otel_span) + + otel_span.attributes = { + "http.url": "https://o123456.ingest.sentry.io/api/123/envelope", + } + assert span_processor._is_sentry_span(hub, otel_span) + + +def test_get_otel_context(): + otel_span = MagicMock() + otel_span.attributes = {"foo": "bar"} + otel_span.resource = MagicMock() + otel_span.resource.attributes = {"baz": "qux"} + + span_processor = SentrySpanProcessor() + otel_context = span_processor._get_otel_context(otel_span) + + assert otel_context == { + "attributes": {"foo": "bar"}, + "resource": {"baz": "qux"}, + } + + +def test_get_trace_data_with_span_and_trace(): + otel_span = MagicMock() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = None + + parent_context = {} + + span_processor = SentrySpanProcessor() + sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) + assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" + assert sentry_trace_data["span_id"] == "1234567890abcdef" + assert sentry_trace_data["parent_span_id"] is None + assert sentry_trace_data["parent_sampled"] is None + assert sentry_trace_data["baggage"] is None + + +def test_get_trace_data_with_span_and_trace_and_parent(): + otel_span = MagicMock() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = MagicMock() + otel_span.parent.span_id = int("abcdef1234567890", 16) + + parent_context = {} + + span_processor = SentrySpanProcessor() + sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) + assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" + assert sentry_trace_data["span_id"] == "1234567890abcdef" + assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" + assert sentry_trace_data["parent_sampled"] is None + assert sentry_trace_data["baggage"] is None + + +def test_get_trace_data_with_sentry_trace(): + otel_span = MagicMock() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = MagicMock() + otel_span.parent.span_id = int("abcdef1234567890", 16) + + parent_context = {} + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.get_value", + side_effect=[ + ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True), + None, + ], + ): + span_processor = SentrySpanProcessor() + sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) + assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" + assert sentry_trace_data["span_id"] == "1234567890abcdef" + assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" + assert sentry_trace_data["parent_sampled"] is True + assert sentry_trace_data["baggage"] is None + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.get_value", + side_effect=[ + ("1234567890abcdef1234567890abcdef", "1234567890abcdef", False), + None, + ], + ): + span_processor = SentrySpanProcessor() + sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) + assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" + assert sentry_trace_data["span_id"] == "1234567890abcdef" + assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" + assert sentry_trace_data["parent_sampled"] is False + assert sentry_trace_data["baggage"] is None + + +def test_get_trace_data_with_sentry_trace_and_baggage(): + otel_span = MagicMock() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = MagicMock() + otel_span.parent.span_id = int("abcdef1234567890", 16) + + parent_context = {} + + baggage = ( + "sentry-trace_id=771a43a4192642f0b136d5159a501700," + "sentry-public_key=49d0f7386ad645858ae85020e393bef3," + "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie" + ) + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.get_value", + side_effect=[ + ("1234567890abcdef1234567890abcdef", "1234567890abcdef", True), + baggage, + ], + ): + span_processor = SentrySpanProcessor() + sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context) + assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef" + assert sentry_trace_data["span_id"] == "1234567890abcdef" + assert sentry_trace_data["parent_span_id"] == "abcdef1234567890" + assert sentry_trace_data["parent_sampled"] + assert sentry_trace_data["baggage"] == baggage + + +def test_update_span_with_otel_data_http_method(): + sentry_span = Span() + + otel_span = MagicMock() + otel_span.name = "Test OTel Span" + otel_span.kind = SpanKind.CLIENT + otel_span.attributes = { + "http.method": "GET", + "http.status_code": 429, + "http.status_text": "xxx", + "http.user_agent": "curl/7.64.1", + "net.peer.name": "example.com", + "http.target": "/", + } + + span_processor = SentrySpanProcessor() + span_processor._update_span_with_otel_data(sentry_span, otel_span) + + assert sentry_span.op == "http.client" + assert sentry_span.description == "GET example.com /" + assert sentry_span._tags["http.status_code"] == "429" + assert sentry_span.status == "resource_exhausted" + + assert sentry_span._data["http.method"] == "GET" + assert sentry_span._data["http.status_code"] == 429 + assert sentry_span._data["http.status_text"] == "xxx" + assert sentry_span._data["http.user_agent"] == "curl/7.64.1" + assert sentry_span._data["net.peer.name"] == "example.com" + assert sentry_span._data["http.target"] == "/" + + +def test_update_span_with_otel_data_http_method2(): + sentry_span = Span() + + otel_span = MagicMock() + otel_span.name = "Test OTel Span" + otel_span.kind = SpanKind.SERVER + otel_span.attributes = { + "http.method": "GET", + "http.status_code": 429, + "http.status_text": "xxx", + "http.user_agent": "curl/7.64.1", + "http.url": "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef", + } + + span_processor = SentrySpanProcessor() + span_processor._update_span_with_otel_data(sentry_span, otel_span) + + assert sentry_span.op == "http.server" + assert sentry_span.description == "GET https://httpbin.org/status/403" + assert sentry_span._tags["http.status_code"] == "429" + assert sentry_span.status == "resource_exhausted" + + assert sentry_span._data["http.method"] == "GET" + assert sentry_span._data["http.status_code"] == 429 + assert sentry_span._data["http.status_text"] == "xxx" + assert sentry_span._data["http.user_agent"] == "curl/7.64.1" + assert ( + sentry_span._data["http.url"] + == "https://httpbin.org/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef" + ) + + +def test_update_span_with_otel_data_db_query(): + sentry_span = Span() + + otel_span = MagicMock() + otel_span.name = "Test OTel Span" + otel_span.attributes = { + "db.system": "postgresql", + "db.statement": "SELECT * FROM table where pwd = '123456'", + } + + span_processor = SentrySpanProcessor() + span_processor._update_span_with_otel_data(sentry_span, otel_span) + + assert sentry_span.op == "db" + assert sentry_span.description == "SELECT * FROM table where pwd = '123456'" + + assert sentry_span._data["db.system"] == "postgresql" + assert ( + sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'" + ) + + +def test_on_start_transaction(): + otel_span = MagicMock() + otel_span.name = "Sample OTel Span" + otel_span.start_time = time.time_ns() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = MagicMock() + otel_span.parent.span_id = int("abcdef1234567890", 16) + + parent_context = {} + + fake_client = MagicMock() + fake_client.options = {"instrumenter": "otel"} + fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" + + current_hub = MagicMock() + current_hub.client = fake_client + + fake_hub = MagicMock() + fake_hub.current = current_hub + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub + ): + span_processor = SentrySpanProcessor() + span_processor.on_start(otel_span, parent_context) + + fake_hub.current.start_transaction.assert_called_once_with( + name="Sample OTel Span", + span_id="1234567890abcdef", + parent_span_id="abcdef1234567890", + trace_id="1234567890abcdef1234567890abcdef", + baggage=None, + start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9), + instrumenter="otel", + ) + + assert len(span_processor.otel_span_map.keys()) == 1 + assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef" + + +def test_on_start_child(): + otel_span = MagicMock() + otel_span.name = "Sample OTel Span" + otel_span.start_time = time.time_ns() + otel_span.context = MagicMock() + otel_span.context.trace_id = int("1234567890abcdef1234567890abcdef", 16) + otel_span.context.span_id = int("1234567890abcdef", 16) + otel_span.parent = MagicMock() + otel_span.parent.span_id = int("abcdef1234567890", 16) + + parent_context = {} + + fake_client = MagicMock() + fake_client.options = {"instrumenter": "otel"} + fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456" + + current_hub = MagicMock() + current_hub.client = fake_client + + fake_hub = MagicMock() + fake_hub.current = current_hub + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub + ): + fake_span = MagicMock() + + span_processor = SentrySpanProcessor() + span_processor.otel_span_map["abcdef1234567890"] = fake_span + span_processor.on_start(otel_span, parent_context) + + fake_span.start_child.assert_called_once_with( + span_id="1234567890abcdef", + description="Sample OTel Span", + start_timestamp=datetime.fromtimestamp(otel_span.start_time / 1e9), + instrumenter="otel", + ) + + assert len(span_processor.otel_span_map.keys()) == 2 + assert "abcdef1234567890" in span_processor.otel_span_map.keys() + assert "1234567890abcdef" in span_processor.otel_span_map.keys() + + +def test_on_end_no_sentry_span(): + """ + If on_end is called on a span that is not in the otel_span_map, it should be a no-op. + """ + otel_span = MagicMock() + otel_span.name = "Sample OTel Span" + otel_span.end_time = time.time_ns() + otel_span.context = MagicMock() + otel_span.context.span_id = int("1234567890abcdef", 16) + + span_processor = SentrySpanProcessor() + span_processor.otel_span_map = {} + span_processor._get_otel_context = MagicMock() + span_processor._update_span_with_otel_data = MagicMock() + + span_processor.on_end(otel_span) + + span_processor._get_otel_context.assert_not_called() + span_processor._update_span_with_otel_data.assert_not_called() + + +def test_on_end_sentry_transaction(): + """ + Test on_end for a sentry Transaction. + """ + otel_span = MagicMock() + otel_span.name = "Sample OTel Span" + otel_span.end_time = time.time_ns() + otel_span.context = MagicMock() + otel_span.context.span_id = int("1234567890abcdef", 16) + + fake_sentry_span = MagicMock(spec=Transaction) + fake_sentry_span.set_context = MagicMock() + fake_sentry_span.finish = MagicMock() + + span_processor = SentrySpanProcessor() + span_processor._get_otel_context = MagicMock() + span_processor._update_span_with_otel_data = MagicMock() + span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span + + span_processor.on_end(otel_span) + + fake_sentry_span.set_context.assert_called_once() + span_processor._update_span_with_otel_data.assert_not_called() + fake_sentry_span.finish.assert_called_once() + + +def test_on_end_sentry_span(): + """ + Test on_end for a sentry Span. + """ + otel_span = MagicMock() + otel_span.name = "Sample OTel Span" + otel_span.end_time = time.time_ns() + otel_span.context = MagicMock() + otel_span.context.span_id = int("1234567890abcdef", 16) + + fake_sentry_span = MagicMock(spec=Span) + fake_sentry_span.set_context = MagicMock() + fake_sentry_span.finish = MagicMock() + + span_processor = SentrySpanProcessor() + span_processor._get_otel_context = MagicMock() + span_processor._update_span_with_otel_data = MagicMock() + span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span + + span_processor.on_end(otel_span) + + fake_sentry_span.set_context.assert_not_called() + span_processor._update_span_with_otel_data.assert_called_once_with( + fake_sentry_span, otel_span + ) + fake_sentry_span.finish.assert_called_once() diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py new file mode 100644 index 0000000000..3dc148f848 --- /dev/null +++ b/tests/tracing/test_noop_span.py @@ -0,0 +1,46 @@ +import sentry_sdk +from sentry_sdk.tracing import NoOpSpan + +# This tests make sure, that the examples from the documentation [1] +# are working when OTel (OpenTelementry) instrumentation is turned on +# and therefore the Senntry tracing should not do anything. +# +# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/ + + +def test_noop_start_transaction(sentry_init): + sentry_init(instrumenter="otel", debug=True) + + transaction = sentry_sdk.start_transaction(op="task", name="test_transaction_name") + assert isinstance(transaction, NoOpSpan) + + transaction.name = "new name" + + +def test_noop_start_span(sentry_init): + sentry_init(instrumenter="otel", debug=True) + + with sentry_sdk.start_span(op="http", description="GET /") as span: + assert isinstance(span, NoOpSpan) + + span.set_tag("http.status_code", "418") + span.set_data("http.entity_type", "teapot") + + +def test_noop_transaction_start_child(sentry_init): + sentry_init(instrumenter="otel", debug=True) + + transaction = sentry_sdk.start_transaction(name="task") + assert isinstance(transaction, NoOpSpan) + + with transaction.start_child(op="child_task") as child: + assert isinstance(child, NoOpSpan) + + +def test_noop_span_start_child(sentry_init): + sentry_init(instrumenter="otel", debug=True) + span = sentry_sdk.start_span(name="task") + assert isinstance(span, NoOpSpan) + + with span.start_child(op="child_task") as child: + assert isinstance(child, NoOpSpan) diff --git a/tox.ini b/tox.ini index 51a92a07c9..d2e87cb1f7 100644 --- a/tox.ini +++ b/tox.ini @@ -128,6 +128,9 @@ envlist = # Boto3 {py2.7,py3.6,py3.7,py3.8}-boto3-v{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16} + # OpenTelemetry (OTel) + {py3.7,py3.8,py3.9,py3.10}-opentelemetry + [testenv] deps = # if you change test-requirements.txt and your change is not being reflected @@ -320,6 +323,8 @@ deps = pymongo-v4.1: pymongo>=4.1,<4.2 pymongo-v4.2: pymongo>=4.2,<4.3 + opentelemetry: opentelemetry-distro + setenv = PYTHONDONTWRITEBYTECODE=1 TESTPATH=tests From 0a029155c9e3b222cb4f6a447dcf2a1d3d01625b Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Wed, 14 Dec 2022 15:20:32 +0000 Subject: [PATCH 09/12] release: 1.12.0 --- CHANGELOG.md | 14 ++++++++++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 17 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a03c0104b..2185c2fe14 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## 1.12.0 + +### Basic OTel support (ongoing) + +By: @antonpirker (#1772, #1766, #1765) + +### Various fixes & improvements + +- Tox Cleanup (#1749) by @antonpirker +- fix(ci): Fix Github action checks (#1780) by @Zylphrex +- feat(profiling): Introduce active thread id on scope (#1764) by @Zylphrex +- ref(profiling): Eagerly hash stack for profiles (#1755) by @Zylphrex +- fix(profiling): Resolve inherited method class names (#1756) by @Zylphrex + ## 1.11.1 ### Various fixes & improvements diff --git a/docs/conf.py b/docs/conf.py index 0d60cb6656..93eb542d59 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.11.1" +release = "1.12.0" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 47d630dee3..9b76cd9072 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -136,4 +136,4 @@ def _get_default_options(): del _get_default_options -VERSION = "1.11.1" +VERSION = "1.12.0" diff --git a/setup.py b/setup.py index 318c9dc837..6eed498332 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.11.1", + version="1.12.0", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python", From abfdce8118768b78db608bc4be15b655b95fc6d5 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Thu, 15 Dec 2022 09:08:58 +0100 Subject: [PATCH 10/12] Updated changelog --- CHANGELOG.md | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2185c2fe14..2a182032b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,17 +2,24 @@ ## 1.12.0 -### Basic OTel support (ongoing) +### Basic OTel support + +This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. + +See the documentation on how to set it up: +https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ + +Give it a try and let us know if you have any feedback or problems with using it. By: @antonpirker (#1772, #1766, #1765) ### Various fixes & improvements - Tox Cleanup (#1749) by @antonpirker -- fix(ci): Fix Github action checks (#1780) by @Zylphrex -- feat(profiling): Introduce active thread id on scope (#1764) by @Zylphrex -- ref(profiling): Eagerly hash stack for profiles (#1755) by @Zylphrex -- fix(profiling): Resolve inherited method class names (#1756) by @Zylphrex +- CI: Fix Github action checks (#1780) by @Zylphrex +- Profiling: Introduce active thread id on scope (#1764) by @Zylphrex +- Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex +- Profiling: Resolve inherited method class names (#1756) by @Zylphrex ## 1.11.1 From 6959941afc0f9bf3c13ffdc7069fabba1b47bc10 Mon Sep 17 00:00:00 2001 From: Anton Pirker Date: Mon, 19 Dec 2022 10:08:51 +0100 Subject: [PATCH 11/12] Link errors to OTel spans (#1787) Link Sentry captured issue events to performance events from Otel. (This makes Sentry issues visible in Otel performance data) --- .../opentelemetry/span_processor.py | 47 +++++++++++++++ .../opentelemetry/test_span_processor.py | 60 ++++++++++++++++++- 2 files changed, 105 insertions(+), 2 deletions(-) diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py index 0ec9c620af..5b80efbca5 100644 --- a/sentry_sdk/integrations/opentelemetry/span_processor.py +++ b/sentry_sdk/integrations/opentelemetry/span_processor.py @@ -6,16 +6,22 @@ from opentelemetry.trace import ( # type: ignore format_span_id, format_trace_id, + get_current_span, SpanContext, Span as OTelSpan, SpanKind, ) +from opentelemetry.trace.span import ( # type: ignore + INVALID_SPAN_ID, + INVALID_TRACE_ID, +) from sentry_sdk.consts import INSTRUMENTER from sentry_sdk.hub import Hub from sentry_sdk.integrations.opentelemetry.consts import ( SENTRY_BAGGAGE_KEY, SENTRY_TRACE_KEY, ) +from sentry_sdk.scope import add_global_event_processor from sentry_sdk.tracing import Transaction, Span as SentrySpan from sentry_sdk.utils import Dsn from sentry_sdk._types import MYPY @@ -26,10 +32,44 @@ from typing import Any from typing import Dict from typing import Union + from sentry_sdk._types import Event, Hint OPEN_TELEMETRY_CONTEXT = "otel" +def link_trace_context_to_error_event(event, otel_span_map): + # type: (Event, Dict[str, Union[Transaction, OTelSpan]]) -> Event + hub = Hub.current + if not hub: + return event + + if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL: + return event + + if hasattr(event, "type") and event["type"] == "transaction": + return event + + otel_span = get_current_span() + if not otel_span: + return event + + ctx = otel_span.get_span_context() + trace_id = format_trace_id(ctx.trace_id) + span_id = format_span_id(ctx.span_id) + + if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID: + return event + + sentry_span = otel_span_map.get(span_id, None) + if not sentry_span: + return event + + contexts = event.setdefault("contexts", {}) + contexts.setdefault("trace", {}).update(sentry_span.get_trace_context()) + + return event + + class SentrySpanProcessor(SpanProcessor): # type: ignore """ Converts OTel spans into Sentry spans so they can be sent to the Sentry backend. @@ -45,6 +85,13 @@ def __new__(cls): return cls.instance + def __init__(self): + # type: () -> None + @add_global_event_processor + def global_event_processor(event, hint): + # type: (Event, Hint) -> Event + return link_trace_context_to_error_event(event, self.otel_span_map) + def on_start(self, otel_span, parent_context=None): # type: (OTelSpan, SpanContext) -> None hub = Hub.current diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py index 6d151c9cfe..7ba6f59e6c 100644 --- a/tests/integrations/opentelemetry/test_span_processor.py +++ b/tests/integrations/opentelemetry/test_span_processor.py @@ -2,10 +2,13 @@ from mock import MagicMock import mock import time -from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor +from sentry_sdk.integrations.opentelemetry.span_processor import ( + SentrySpanProcessor, + link_trace_context_to_error_event, +) from sentry_sdk.tracing import Span, Transaction -from opentelemetry.trace import SpanKind +from opentelemetry.trace import SpanKind, SpanContext def test_is_sentry_span(): @@ -403,3 +406,56 @@ def test_on_end_sentry_span(): fake_sentry_span, otel_span ) fake_sentry_span.finish.assert_called_once() + + +def test_link_trace_context_to_error_event(): + """ + Test that the trace context is added to the error event. + """ + fake_client = MagicMock() + fake_client.options = {"instrumenter": "otel"} + fake_client + + current_hub = MagicMock() + current_hub.client = fake_client + + fake_hub = MagicMock() + fake_hub.current = current_hub + + span_id = "1234567890abcdef" + trace_id = "1234567890abcdef1234567890abcdef" + + fake_trace_context = { + "bla": "blub", + "foo": "bar", + "baz": 123, + } + + sentry_span = MagicMock() + sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context) + + otel_span_map = { + span_id: sentry_span, + } + + span_context = SpanContext( + trace_id=int(trace_id, 16), + span_id=int(span_id, 16), + is_remote=True, + ) + otel_span = MagicMock() + otel_span.get_span_context = MagicMock(return_value=span_context) + + fake_event = {"event_id": "1234567890abcdef1234567890abcdef"} + + with mock.patch( + "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span", + return_value=otel_span, + ): + event = link_trace_context_to_error_event(fake_event, otel_span_map) + + assert event + assert event == fake_event # the event is changed in place inside the function + assert "contexts" in event + assert "trace" in event["contexts"] + assert event["contexts"]["trace"] == fake_trace_context From ab1496fdf2a899715fbad9f4a4144cf1dfcac651 Mon Sep 17 00:00:00 2001 From: getsentry-bot Date: Mon, 19 Dec 2022 09:10:12 +0000 Subject: [PATCH 12/12] release: 1.12.1 --- CHANGELOG.md | 6 ++++++ docs/conf.py | 2 +- sentry_sdk/consts.py | 2 +- setup.py | 2 +- 4 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a182032b8..42ce1a1848 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 1.12.1 + +### Various fixes & improvements + +- Link errors to OTel spans (#1787) by @antonpirker + ## 1.12.0 ### Basic OTel support diff --git a/docs/conf.py b/docs/conf.py index 93eb542d59..44180fade1 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ copyright = "2019, Sentry Team and Contributors" author = "Sentry Team and Contributors" -release = "1.12.0" +release = "1.12.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py index 9b76cd9072..afb4b975bb 100644 --- a/sentry_sdk/consts.py +++ b/sentry_sdk/consts.py @@ -136,4 +136,4 @@ def _get_default_options(): del _get_default_options -VERSION = "1.12.0" +VERSION = "1.12.1" diff --git a/setup.py b/setup.py index 6eed498332..86680690ce 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ def get_file_text(file_name): setup( name="sentry-sdk", - version="1.12.0", + version="1.12.1", author="Sentry Team and Contributors", author_email="hello@sentry.io", url="https://github.com/getsentry/sentry-python",