diff --git a/.craft.yml b/.craft.yml index 353b02f77e..70875d5404 100644 --- a/.craft.yml +++ b/.craft.yml @@ -1,4 +1,4 @@ -minVersion: 0.28.1 +minVersion: 0.34.1 targets: - name: pypi includeNames: /^sentry[_\-]sdk.*$/ @@ -8,20 +8,25 @@ targets: pypi:sentry-sdk: - name: github - name: aws-lambda-layer - includeNames: /^sentry-python-serverless-\d+(\.\d+)*\.zip$/ + # This regex that matches the version is taken from craft: + # https://github.com/getsentry/craft/blob/8d77c38ddbe4be59f98f61b6e42952ca087d3acd/src/utils/version.ts#L11 + includeNames: /^sentry-python-serverless-\bv?(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(?:-?([\da-z-]+(?:\.[\da-z-]+)*))?(?:\+([\da-z-]+(?:\.[\da-z-]+)*))?\b.zip$/ layerName: SentryPythonServerlessSDK compatibleRuntimes: - name: python versions: # The number of versions must be, at most, the maximum number of - # runtimes AWS Lambda permits for a layer. + # runtimes AWS Lambda permits for a layer (currently 15). # On the other hand, AWS Lambda does not support every Python runtime. # The supported runtimes are available in the following link: # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python.html - - python3.6 - python3.7 - python3.8 - python3.9 + - python3.10 + - python3.11 license: MIT + - name: sentry-pypi + internalPypiRepo: getsentry/pypi changelog: CHANGELOG.md changelogPolicy: auto diff --git a/.flake8 b/.flake8 index 0bb586b18e..8610e09241 100644 --- a/.flake8 +++ b/.flake8 @@ -1,16 +1,21 @@ [flake8] -ignore = - E203, // Handled by black (Whitespace before ':' -- handled by black) - E266, // Handled by black (Too many leading '#' for block comment) - E501, // Handled by black (Line too long) - W503, // Handled by black (Line break occured before a binary operator) - E402, // Sometimes not possible due to execution order (Module level import is not at top of file) - E731, // I don't care (Do not assign a lambda expression, use a def) - B950, // Handled by black (Line too long by flake8-bugbear) - B011, // I don't care (Do not call assert False) - B014, // does not apply to Python 2 (redundant exception types by flake8-bugbear) - N812, // I don't care (Lowercase imported as non-lowercase by pep8-naming) - N804 // is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) -max-line-length = 80 -select = N,B,C,E,F,W,T4,B9 -exclude=checkouts,lol*,.tox +extend-ignore = + # Handled by black (Whitespace before ':' -- handled by black) + E203, + # Handled by black (Line too long) + E501, + # Sometimes not possible due to execution order (Module level import is not at top of file) + E402, + # I don't care (Do not assign a lambda expression, use a def) + E731, + # does not apply to Python 2 (redundant exception types by flake8-bugbear) + B014, + # I don't care (Lowercase imported as non-lowercase by pep8-naming) + N812, + # is a worse version of and conflicts with B902 (first argument of a classmethod should be named cls) + N804, +extend-exclude=checkouts,lol* +exclude = + # gRCP generated files + grpc_test_service_pb2.py + grpc_test_service_pb2_grpc.py diff --git a/.github/ISSUE_TEMPLATE/bug.yml b/.github/ISSUE_TEMPLATE/bug.yml index f6e47929eb..78f1e03d21 100644 --- a/.github/ISSUE_TEMPLATE/bug.yml +++ b/.github/ISSUE_TEMPLATE/bug.yml @@ -27,6 +27,8 @@ body: 1. What 2. you 3. did. + + Extra points for also including the output of `pip freeze --all`. validations: required: true - type: textarea diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 7f40ddc56d..17d8a34dc5 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -3,4 +3,3 @@ contact_links: - name: Support Request url: https://sentry.io/support about: Use our dedicated support channel for paid accounts. - diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000000..41dfc484ff --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ + + +--- + +## General Notes + +Thank you for contributing to `sentry-python`! + +Please add tests to validate your changes, and lint your code using `tox -e linters`. + +Running the test suite on your PR might require maintainer approval. Some tests (AWS Lambda) additionally require a maintainer to add a special label to run and will fail if the label is not present. + +#### For maintainers + +Sensitive test suites require maintainer review to ensure that tests do not compromise our secrets. This review must be repeated after any code revisions. + +Before running sensitive test suites, please carefully check the PR. Then, apply the `Trigger: tests using secrets` label. The label will be removed after any code changes to enforce our policy requiring maintainers to review all code revisions before running sensitive tests. diff --git a/.github/dependabot.yml b/.github/dependabot.yml index eadcd59879..d375588780 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -12,6 +12,12 @@ updates: - dependency-name: pytest versions: - "> 3.7.3" + - dependency-name: flake8 # Later versions dropped Python 2 support + versions: + - "> 5.0.4" + - dependency-name: jsonschema # Later versions dropped Python 2 support + versions: + - "> 3.2.0" - dependency-name: pytest-cov versions: - "> 2.8.1" @@ -43,6 +49,6 @@ updates: open-pull-requests-limit: 10 - package-ecosystem: "github-actions" directory: "/" - schedule: + schedule: interval: weekly open-pull-requests-limit: 10 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4b6de8e4d6..b6bd0724c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,149 +1,99 @@ -name: ci +name: CI on: push: branches: - master - release/** + - 1.x pull_request: permissions: contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless + jobs: - dist: - name: distribution packages + lint: + name: Lint Sources + runs-on: ubuntu-latest timeout-minutes: 10 + + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: 3.12 + + - run: | + pip install tox + tox -e linters + + check-ci-config: + name: Check CI config runs-on: ubuntu-latest + timeout-minutes: 10 steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.12 - run: | - pip install virtualenv - make aws-lambda-layer-build + pip install jinja2 + python scripts/split-tox-gh-actions/split-tox-gh-actions.py --fail-on-changes + + build_lambda_layer: + name: Build Package + runs-on: ubuntu-latest + timeout-minutes: 10 - - uses: actions/upload-artifact@v3 + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: 3.12 + - name: Setup build cache + uses: actions/cache@v4 + id: build_cache + with: + path: ${{ env.CACHED_BUILD_PATHS }} + key: ${{ env.BUILD_CACHE_KEY }} + - name: Build Packages + run: | + echo "Creating directory containing Python SDK Lambda Layer" + pip install virtualenv + # This will also trigger "make dist" that creates the Python packages + make aws-lambda-layer + - name: Upload Python Packages + uses: actions/upload-artifact@v3 with: name: ${{ github.sha }} path: | dist/* - dist-serverless/* docs: - timeout-minutes: 10 - name: build documentation + name: Build SDK API Doc runs-on: ubuntu-latest + timeout-minutes: 10 steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.12 - run: | pip install virtualenv make apidocs cd docs/_build && zip -r gh-pages ./ - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v3.1.1 with: name: ${{ github.sha }} path: docs/_build/gh-pages.zip - - lint: - timeout-minutes: 10 - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - with: - python-version: 3.9 - - - run: | - pip install tox - tox -e linters - - test: - continue-on-error: true - timeout-minutes: 45 - runs-on: ${{ matrix.linux-version }} - strategy: - matrix: - linux-version: [ubuntu-latest] - python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10"] - include: - # GHA doesn't host the combo of python 3.4 and ubuntu-latest (which is - # currently 20.04), so run just that one under 18.04. (See - # https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json - # for a listing of supported python/os combos.) - - linux-version: ubuntu-18.04 - python-version: "3.4" - - services: - # Label used to access the service container - redis: - # Docker Hub image - image: redis - # Set health checks to wait until redis has started - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - # Maps port 6379 on service container to the host - - 6379:6379 - - postgres: - image: postgres - env: - POSTGRES_PASSWORD: sentry - # Set health checks to wait until postgres has started - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - # Maps tcp port 5432 on service container to the host - ports: - - 5432:5432 - - env: - SENTRY_PYTHON_TEST_POSTGRES_USER: postgres - SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry - SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test - - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 - - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - - name: setup - env: - PGHOST: localhost - PGPASSWORD: sentry - run: | - psql -c 'create database travis_ci_test;' -U postgres - psql -c 'create database test_travis_ci_test;' -U postgres - pip install codecov tox - - - name: run tests - env: - CI_PYTHON_VERSION: ${{ matrix.python-version }} - timeout-minutes: 45 - run: | - coverage erase - ./scripts/runtox.sh '' --cov=tests --cov=sentry_sdk --cov-report= --cov-branch - coverage combine .coverage* - coverage xml -i - codecov --file coverage.xml diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 1d88a97406..1c8422c7ee 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -13,10 +13,14 @@ name: "CodeQL" on: push: - branches: [ master ] + branches: + - master + - sentry-sdk-2.0 pull_request: # The branches below must be a subset of the branches above - branches: [ master ] + branches: + - master + - sentry-sdk-2.0 schedule: - cron: '18 18 * * 3' @@ -42,11 +46,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4.1.1 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -57,7 +61,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions @@ -71,4 +75,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/enforce-license-compliance.yml b/.github/workflows/enforce-license-compliance.yml index b331974711..01e02ccb8b 100644 --- a/.github/workflows/enforce-license-compliance.yml +++ b/.github/workflows/enforce-license-compliance.yml @@ -2,9 +2,16 @@ name: Enforce License Compliance on: push: - branches: [master, main, release/*] + branches: + - master + - main + - release/* + - sentry-sdk-2.0 pull_request: - branches: [master, main] + branches: + - master + - main + - sentry-sdk-2.0 jobs: enforce-license-compliance: diff --git a/.github/workflows/jira.yml b/.github/workflows/jira.yml deleted file mode 100644 index 485915ba5e..0000000000 --- a/.github/workflows/jira.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Create JIRA issue - -on: - issues: - types: [labeled] - -jobs: - createIssue: - runs-on: ubuntu-latest - steps: - - uses: getsentry/ga-jira-integration@main - with: - JIRA_API_HOST: ${{secrets.JIRA_BASEURL}} - JIRA_API_TOKEN: ${{secrets.JIRA_APITOKEN}} - JIRA_EMAIL: ${{secrets.JIRA_USEREMAIL}} - TRIGGER_LABEL: "Jira" - JIRA_PROJECT_ID: WEBBACKEND - JIRA_ISSUE_NAME: Story diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 139fe29007..f55ec12407 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,13 +9,16 @@ on: force: description: Force a release even when there are release-blockers (optional) required: false + merge_target: + description: Target branch to merge into. Uses the default branch as a fallback (optional) + required: false jobs: release: runs-on: ubuntu-latest name: "Release a new version" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4.1.1 with: token: ${{ secrets.GH_RELEASE_PAT }} fetch-depth: 0 @@ -26,3 +29,4 @@ jobs: with: version: ${{ github.event.inputs.version }} force: ${{ github.event.inputs.force }} + merge_target: ${{ github.event.inputs.merge_target }} diff --git a/.github/workflows/scripts/trigger_tests_on_label.py b/.github/workflows/scripts/trigger_tests_on_label.py new file mode 100644 index 0000000000..f6039fd16a --- /dev/null +++ b/.github/workflows/scripts/trigger_tests_on_label.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +import argparse +import json +import os +from urllib.parse import quote +from urllib.request import Request, urlopen + +LABEL = "Trigger: tests using secrets" + + +def _has_write(repo_id: int, username: str, *, token: str) -> bool: + req = Request( + f"https://api.github.com/repositories/{repo_id}/collaborators/{username}/permission", + headers={"Authorization": f"token {token}"}, + ) + contents = json.load(urlopen(req, timeout=10)) + + return contents["permission"] in {"admin", "write"} + + +def _remove_label(repo_id: int, pr: int, label: str, *, token: str) -> None: + quoted_label = quote(label) + req = Request( + f"https://api.github.com/repositories/{repo_id}/issues/{pr}/labels/{quoted_label}", + method="DELETE", + headers={"Authorization": f"token {token}"}, + ) + urlopen(req) + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--repo-id", type=int, required=True) + parser.add_argument("--pr", type=int, required=True) + parser.add_argument("--event", required=True) + parser.add_argument("--username", required=True) + parser.add_argument("--label-names", type=json.loads, required=True) + args = parser.parse_args() + + token = os.environ["GITHUB_TOKEN"] + + write_permission = _has_write(args.repo_id, args.username, token=token) + + if ( + not write_permission + # `reopened` is included here due to close => push => reopen + and args.event in {"synchronize", "reopened"} + and LABEL in args.label_names + ): + print(f"Invalidating label [{LABEL}] due to code change...") + _remove_label(args.repo_id, args.pr, LABEL, token=token) + args.label_names.remove(LABEL) + + if write_permission or LABEL in args.label_names: + print("Permissions passed!") + print(f"- has write permission: {write_permission}") + print(f"- has [{LABEL}] label: {LABEL in args.label_names}") + return 0 + else: + print("Permissions failed!") + print(f"- has write permission: {write_permission}") + print(f"- has [{LABEL}] label: {LABEL in args.label_names}") + print(f"- args.label_names: {args.label_names}") + print( + f"Please have a collaborator add the [{LABEL}] label once they " + f"have reviewed the code to trigger tests." + ) + return 1 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml deleted file mode 100644 index e195d701a0..0000000000 --- a/.github/workflows/stale.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: 'close stale issues/PRs' -on: - schedule: - - cron: '0 0 * * *' - workflow_dispatch: -permissions: - contents: read - -jobs: - stale: - permissions: - issues: write # for actions/stale to close stale issues - pull-requests: write # for actions/stale to close stale PRs - runs-on: ubuntu-latest - steps: - - uses: actions/stale@v5 - with: - repo-token: ${{ github.token }} - days-before-stale: 21 - days-before-close: 7 - only-labels: "" - operations-per-run: 100 - remove-stale-when-updated: true - debug-only: false - ascending: false - - exempt-issue-labels: "Status: Backlog,Status: In Progress" - stale-issue-label: "Status: Stale" - stale-issue-message: |- - This issue has gone three weeks without activity. In another week, I will close it. - - But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! - - ---- - - "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 - close-issue-label: "" - close-issue-message: "" - - exempt-pr-labels: "Status: Backlog,Status: In Progress" - stale-pr-label: "Status: Stale" - stale-pr-message: |- - This pull request has gone three weeks without activity. In another week, I will close it. - - But! If you comment or otherwise update it, I will reset the clock, and if you label it `Status: Backlog` or `Status: In Progress`, I will leave it alone ... forever! - - ---- - - "A weed is but an unloved flower." ― _Ella Wheeler Wilcox_ 🥀 - close-pr-label: - close-pr-message: "" diff --git a/.github/workflows/test-integrations-aws-lambda.yml b/.github/workflows/test-integrations-aws-lambda.yml new file mode 100644 index 0000000000..192f32e4c4 --- /dev/null +++ b/.github/workflows/test-integrations-aws-lambda.yml @@ -0,0 +1,104 @@ +name: Test AWS Lambda +on: + push: + branches: + - master + - release/** + - 1.x + # XXX: We are using `pull_request_target` instead of `pull_request` because we want + # this to run on forks with access to the secrets necessary to run the test suite. + # Prefer to use `pull_request` when possible. + pull_request_target: + types: [labeled, opened, reopened, synchronize] +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read + # `write` is needed to remove the `Trigger: tests using secrets` label + pull-requests: write +env: + SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }} + SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }} + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + check-permissions: + name: permissions check + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v4.1.1 + with: + persist-credentials: false + - name: Check permissions on PR + if: github.event_name == 'pull_request_target' + run: | + python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \ + --repo-id ${{ github.event.repository.id }} \ + --pr ${{ github.event.number }} \ + --event ${{ github.event.action }} \ + --username "$ARG_USERNAME" \ + --label-names "$ARG_LABEL_NAMES" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # these can contain special characters + ARG_USERNAME: ${{ github.event.pull_request.user.login }} + ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }} + - name: Check permissions on repo branch + if: github.event_name == 'push' + run: true + test-aws_lambda-pinned: + name: AWS Lambda (pinned) + timeout-minutes: 30 + needs: check-permissions + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.9"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test aws_lambda pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aws_lambda" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All AWS Lambda tests passed + needs: test-aws_lambda-pinned + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-aws_lambda-pinned.result, 'failure') || contains(needs.test-aws_lambda-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-cloud-computing.yml b/.github/workflows/test-integrations-cloud-computing.yml new file mode 100644 index 0000000000..0efc4a6ffb --- /dev/null +++ b/.github/workflows/test-integrations-cloud-computing.yml @@ -0,0 +1,122 @@ +name: Test Cloud Computing +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-cloud_computing-pinned: + name: Cloud Computing (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.6","3.7","3.9","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test boto3 pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test chalice pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test cloud_resource_context pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test gcp pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-cloud_computing-py27: + name: Cloud Computing (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test boto3 py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test chalice py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test cloud_resource_context py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test gcp py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Cloud Computing tests passed + needs: [test-cloud_computing-pinned, test-cloud_computing-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-cloud_computing-pinned.result, 'failure') || contains(needs.test-cloud_computing-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-cloud_computing-py27.result, 'failure') || contains(needs.test-cloud_computing-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml new file mode 100644 index 0000000000..a617bace48 --- /dev/null +++ b/.github/workflows/test-integrations-common.yml @@ -0,0 +1,98 @@ +name: Test Common +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-common-pinned: + name: Common (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test common pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-common-py27: + name: Common (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test common py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Common tests passed + needs: [test-common-pinned, test-common-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-common-pinned.result, 'failure') || contains(needs.test-common-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-common-py27.result, 'failure') || contains(needs.test-common-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-data-processing.yml b/.github/workflows/test-integrations-data-processing.yml new file mode 100644 index 0000000000..6e52aa1c6a --- /dev/null +++ b/.github/workflows/test-integrations-data-processing.yml @@ -0,0 +1,138 @@ +name: Test Data Processing +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-data_processing-pinned: + name: Data Processing (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test arq pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test beam pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test celery pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test huey pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test openai pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-openai" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test rq pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-data_processing-py27: + name: Data Processing (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test arq py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test beam py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test celery py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test huey py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test openai py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-openai" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test rq py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Data Processing tests passed + needs: [test-data_processing-pinned, test-data_processing-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-data_processing-pinned.result, 'failure') || contains(needs.test-data_processing-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-data_processing-py27.result, 'failure') || contains(needs.test-data_processing-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-databases.yml b/.github/workflows/test-integrations-databases.yml new file mode 100644 index 0000000000..1263955b0a --- /dev/null +++ b/.github/workflows/test-integrations-databases.yml @@ -0,0 +1,182 @@ +name: Test Databases +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-databases-pinned: + name: Databases (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - uses: getsentry/action-clickhouse-in-ci@v1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true + psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true + - name: Erase coverage + run: | + coverage erase + - name: Test asyncpg pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test clickhouse_driver pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test pymongo pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test redis pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test rediscluster pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test sqlalchemy pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-databases-py27: + name: Databases (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres + steps: + - uses: actions/checkout@v4.1.1 + - uses: getsentry/action-clickhouse-in-ci@v1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true + psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true + - name: Erase coverage + run: | + coverage erase + - name: Test asyncpg py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test clickhouse_driver py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test pymongo py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test redis py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test rediscluster py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test sqlalchemy py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Databases tests passed + needs: [test-databases-pinned, test-databases-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-databases-pinned.result, 'failure') || contains(needs.test-databases-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-databases-py27.result, 'failure') || contains(needs.test-databases-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml new file mode 100644 index 0000000000..1550187812 --- /dev/null +++ b/.github/workflows/test-integrations-graphql.yml @@ -0,0 +1,81 @@ +name: Test GraphQL +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-graphql-pinned: + name: GraphQL (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.7","3.8","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test ariadne pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-ariadne" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test gql pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gql" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test graphene pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-graphene" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test strawberry pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-strawberry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All GraphQL tests passed + needs: test-graphql-pinned + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-graphql-pinned.result, 'failure') || contains(needs.test-graphql-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-miscellaneous.yml b/.github/workflows/test-integrations-miscellaneous.yml new file mode 100644 index 0000000000..8472a17035 --- /dev/null +++ b/.github/workflows/test-integrations-miscellaneous.yml @@ -0,0 +1,81 @@ +name: Test Miscellaneous +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-miscellaneous-pinned: + name: Miscellaneous (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test loguru pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-loguru" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test opentelemetry pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-opentelemetry" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test pure_eval pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pure_eval" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test trytond pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-trytond" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Miscellaneous tests passed + needs: test-miscellaneous-pinned + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-miscellaneous-pinned.result, 'failure') || contains(needs.test-miscellaneous-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-networking.yml b/.github/workflows/test-integrations-networking.yml new file mode 100644 index 0000000000..2cabce57ad --- /dev/null +++ b/.github/workflows/test-integrations-networking.yml @@ -0,0 +1,122 @@ +name: Test Networking +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-networking-pinned: + name: Networking (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test gevent pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test grpc pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test httpx pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test requests pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-networking-py27: + name: Networking (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test gevent py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test grpc py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test httpx py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test requests py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Networking tests passed + needs: [test-networking-pinned, test-networking-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-networking-pinned.result, 'failure') || contains(needs.test-networking-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-networking-py27.result, 'failure') || contains(needs.test-networking-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-web-frameworks-1.yml b/.github/workflows/test-integrations-web-frameworks-1.yml new file mode 100644 index 0000000000..b9ba7f8bc8 --- /dev/null +++ b/.github/workflows/test-integrations-web-frameworks-1.yml @@ -0,0 +1,164 @@ +name: Test Web Frameworks 1 +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-web_frameworks_1-pinned: + name: Web Frameworks 1 (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true + psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true + - name: Erase coverage + run: | + coverage erase + - name: Test django pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test fastapi pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test flask pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test starlette pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-web_frameworks_1-py27: + name: Web Frameworks 1 (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: sentry + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + # Maps tcp port 5432 on service container to the host + ports: + - 5432:5432 + env: + SENTRY_PYTHON_TEST_POSTGRES_USER: postgres + SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry + SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test + SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true + psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true + - name: Erase coverage + run: | + coverage erase + - name: Test django py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test fastapi py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test flask py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test starlette py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Web Frameworks 1 tests passed + needs: [test-web_frameworks_1-pinned, test-web_frameworks_1-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-web_frameworks_1-pinned.result, 'failure') || contains(needs.test-web_frameworks_1-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-web_frameworks_1-py27.result, 'failure') || contains(needs.test-web_frameworks_1-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.github/workflows/test-integrations-web-frameworks-2.yml b/.github/workflows/test-integrations-web-frameworks-2.yml new file mode 100644 index 0000000000..0bb4828d94 --- /dev/null +++ b/.github/workflows/test-integrations-web-frameworks-2.yml @@ -0,0 +1,162 @@ +name: Test Web Frameworks 2 +on: + push: + branches: + - master + - release/** + - 1.x + pull_request: +# Cancel in progress workflows on pull_requests. +# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +permissions: + contents: read +env: + BUILD_CACHE_KEY: ${{ github.sha }} + CACHED_BUILD_PATHS: | + ${{ github.workspace }}/dist-serverless +jobs: + test-web_frameworks_2-pinned: + name: Web Frameworks 2 (pinned) + timeout-minutes: 30 + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"] + # python3.6 reached EOL and is no longer being supported on + # new versions of hosted runners on Github Actions + # ubuntu-20.04 is the last version that supported python3.6 + # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877 + os: [ubuntu-20.04] + steps: + - uses: actions/checkout@v4.1.1 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + env: + PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org" + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test aiohttp pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test asgi pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test bottle pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test falcon pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test pyramid pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test quart pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test sanic pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test starlite pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test tornado pinned + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + test-web_frameworks_2-py27: + name: Web Frameworks 2 (py27) + timeout-minutes: 30 + runs-on: ubuntu-20.04 + container: python:2.7 + steps: + - uses: actions/checkout@v4.1.1 + - name: Setup Test Env + run: | + pip install coverage "tox>=3,<4" + - name: Erase coverage + run: | + coverage erase + - name: Test aiohttp py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test asgi py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test bottle py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test falcon py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test pyramid py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test quart py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test sanic py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test starlite py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Test tornado py27 + run: | + set -x # print commands that are executed + ./scripts/runtox.sh --exclude-latest "py2.7-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch + - name: Generate coverage XML + run: | + coverage combine .coverage* + coverage xml -i + - uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: coverage.xml + check_required_tests: + name: All Web Frameworks 2 tests passed + needs: [test-web_frameworks_2-pinned, test-web_frameworks_2-py27] + # Always run this, even if a dependent job failed + if: always() + runs-on: ubuntu-20.04 + steps: + - name: Check for failures + if: contains(needs.test-web_frameworks_2-pinned.result, 'failure') || contains(needs.test-web_frameworks_2-pinned.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 + - name: Check for 2.7 failures + if: contains(needs.test-web_frameworks_2-py27.result, 'failure') || contains(needs.test-web_frameworks_2-py27.result, 'skipped') + run: | + echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1 diff --git a/.gitignore b/.gitignore index e23931921e..9dcdf030d3 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ pip-log.txt /build /dist /dist-serverless +sentry-python-serverless*.zip .cache .idea .eggs @@ -23,3 +24,4 @@ venv relay pip-wheel-metadata .mypy_cache +.vscode/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 753558186f..775167c10f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,18 +2,19 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.3.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - repo: https://github.com/psf/black - rev: stable + rev: 24.1.0 hooks: - id: black + exclude: ^(.*_pb2.py|.*_pb2_grpc.py) -- repo: https://gitlab.com/pycqa/flake8 - rev: 4.0.1 +- repo: https://github.com/pycqa/flake8 + rev: 5.0.4 hooks: - id: flake8 diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000000..d316e6d5f1 --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +python 3.7.12 diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index c167a13dc2..0000000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "python.pythonPath": ".venv/bin/python", - "python.formatting.provider": "black" -} \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 41a1dcb045..7257c3d34c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,1824 @@ # Changelog +## 1.45.1 + +**This is a security backport release.** + +- Don't send full env to subprocess (892dd800) by @kmichel-aiven + + See also https://github.com/getsentry/sentry-python/security/advisories/GHSA-g92j-qhmh-64v2 + +## 1.45.0 + +This is the final 1.x release for the forseeable future. Development will continue on the 2.x release line. The first 2.x version will be available in the next few weeks. + +### Various fixes & improvements + +- Allow to upsert monitors (#2929) by @sentrivana + + It's now possible to provide `monitor_config` to the `monitor` decorator/context manager directly: + + ```python + from sentry_sdk.crons import monitor + + # All keys except `schedule` are optional + monitor_config = { + "schedule": {"type": "crontab", "value": "0 0 * * *"}, + "timezone": "Europe/Vienna", + "checkin_margin": 10, + "max_runtime": 10, + "failure_issue_threshold": 5, + "recovery_threshold": 5, + } + + @monitor(monitor_slug='', monitor_config=monitor_config) + def tell_the_world(): + print('My scheduled task...') + ``` + + Check out [the cron docs](https://docs.sentry.io/platforms/python/crons/) for details. + +- Add Django `signals_denylist` to filter signals that are attached to by `signals_spans` (#2758) by @lieryan + + If you want to exclude some Django signals from performance tracking, you can use the new `signals_denylist` Django option: + + ```python + import django.db.models.signals + import sentry_sdk + + sentry_sdk.init( + ... + integrations=[ + DjangoIntegration( + ... + signals_denylist=[ + django.db.models.signals.pre_init, + django.db.models.signals.post_init, + ], + ), + ], + ) + ``` + +- `increment` for metrics (#2588) by @mitsuhiko + + `increment` and `inc` are equivalent, so you can pick whichever you like more. + +- Add `value`, `unit` to `before_emit_metric` (#2958) by @sentrivana + + If you add a custom `before_emit_metric`, it'll now accept 4 arguments (the `key`, `value`, `unit` and `tags`) instead of just `key` and `tags`. + + ```python + def before_emit(key, value, unit, tags): + if key == "removed-metric": + return False + tags["extra"] = "foo" + del tags["release"] + return True + + sentry_sdk.init( + ... + _experiments={ + "before_emit_metric": before_emit, + } + ) + ``` + +- Remove experimental metric summary options (#2957) by @sentrivana + + The `_experiments` options `metrics_summary_sample_rate` and `should_summarize_metric` have been removed. + +- New normalization rules for metric keys, names, units, tags (#2946) by @sentrivana +- Change `data_category` from `statsd` to `metric_bucket` (#2954) by @cleptric +- Accessing `__mro__` might throw a `ValueError` (#2952) by @sentrivana +- Suppress prompt spawned by subprocess when using `pythonw` (#2936) by @collinbanko +- Handle `None` in GraphQL query #2715 (#2762) by @czyber +- Do not send "quiet" Sanic exceptions to Sentry (#2821) by @hamedsh +- Implement `metric_bucket` rate limits (#2933) by @cleptric +- Fix type hints for `monitor` decorator (#2944) by @szokeasaurusrex +- Remove deprecated `typing` imports in crons (#2945) by @szokeasaurusrex +- Make `monitor_config` a `TypedDict` (#2931) by @sentrivana +- Add `devenv-requirements.txt` and update env setup instructions (#2761) by @arr-ee +- Bump `types-protobuf` from `4.24.0.20240311` to `4.24.0.20240408` (#2941) by @dependabot +- Disable Codecov check run annotations (#2537) by @eliatcodecov + +## 1.44.1 + +### Various fixes & improvements + +- Make `monitor` async friendly (#2912) by @sentrivana + + You can now decorate your async functions with the `monitor` + decorator and they will correctly report their duration + and completion status. + +- Fixed `Event | None` runtime `TypeError` (#2928) by @szokeasaurusrex + +## 1.44.0 + +### Various fixes & improvements + +- ref: Define types at runtime (#2914) by @szokeasaurusrex +- Explicit reexport of types (#2866) (#2913) by @szokeasaurusrex +- feat(profiling): Add thread data to spans (#2843) by @Zylphrex + +## 1.43.0 + +### Various fixes & improvements + +- Add optional `keep_alive` (#2842) by @sentrivana + + If you're experiencing frequent network issues between the SDK and Sentry, + you can try turning on TCP keep-alive: + + ```python + import sentry_sdk + + sentry_sdk.init( + # ...your usual settings... + keep_alive=True, + ) + ``` + +- Add support for Celery Redbeat cron tasks (#2643) by @kwigley + + The SDK now supports the Redbeat scheduler in addition to the default + Celery Beat scheduler for auto instrumenting crons. See + [the docs](https://docs.sentry.io/platforms/python/integrations/celery/crons/) + for more information about how to set this up. + +- `aws_event` can be an empty list (#2849) by @sentrivana +- Re-export `Event` in `types.py` (#2829) by @szokeasaurusrex +- Small API docs improvement (#2828) by @antonpirker +- Fixed OpenAI tests (#2834) by @antonpirker +- Bump `checkouts/data-schemas` from `ed078ed` to `8232f17` (#2832) by @dependabot + +## 1.42.0 + +### Various fixes & improvements + +- **New integration:** [OpenAI integration](https://docs.sentry.io/platforms/python/integrations/openai/) (#2791) by @colin-sentry + + We added an integration for OpenAI to capture errors and also performance data when using the OpenAI Python SDK. + + Useage: + + This integrations is auto-enabling, so if you have the `openai` package in your project it will be enabled. Just initialize Sentry before you create your OpenAI client. + + ```python + from openai import OpenAI + + import sentry_sdk + + sentry_sdk.init( + dsn="___PUBLIC_DSN___", + enable_tracing=True, + traces_sample_rate=1.0, + ) + + client = OpenAI() + ``` + + For more information, see the documentation for [OpenAI integration](https://docs.sentry.io/platforms/python/integrations/openai/). + +- Discard open OpenTelemetry spans after 10 minutes (#2801) by @antonpirker +- Propagate sentry-trace and baggage headers to Huey tasks (#2792) by @cnschn +- Added Event type (#2753) by @szokeasaurusrex +- Improve scrub_dict typing (#2768) by @szokeasaurusrex +- Dependencies: bump types-protobuf from 4.24.0.20240302 to 4.24.0.20240311 (#2797) by @dependabot + +## 1.41.0 + +### Various fixes & improvements + +- Add recursive scrubbing to `EventScrubber` (#2755) by @Cheapshot003 + + By default, the `EventScrubber` will not search your events for potential + PII recursively. With this release, you can enable this behavior with: + + ```python + import sentry_sdk + from sentry_sdk.scrubber import EventScrubber + + sentry_sdk.init( + # ...your usual settings... + event_scrubber=EventScrubber(recursive=True), + ) + ``` + +- Expose `socket_options` (#2786) by @sentrivana + + If the SDK is experiencing connection issues (connection resets, server + closing connection without response, etc.) while sending events to Sentry, + tweaking the default `urllib3` socket options to the following can help: + + ```python + import socket + from urllib3.connection import HTTPConnection + import sentry_sdk + + sentry_sdk.init( + # ...your usual settings... + socket_options=HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + # note: skip the following line if you're on MacOS since TCP_KEEPIDLE doesn't exist there + (socket.SOL_TCP, socket.TCP_KEEPIDLE, 45), + (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10), + (socket.SOL_TCP, socket.TCP_KEEPCNT, 6), + ], + ) + ``` + +- Allow to configure merge target for releases (#2777) by @sentrivana +- Allow empty character in metric tags values (#2775) by @viglia +- Replace invalid tag values with an empty string instead of _ (#2773) by @markushi +- Add documentation comment to `scrub_list` (#2769) by @szokeasaurusrex +- Fixed regex to parse version in lambda package file (#2767) by @antonpirker +- xfail broken AWS Lambda tests for now (#2794) by @sentrivana +- Removed print statements because it messes with the tests (#2789) by @antonpirker +- Bump `types-protobuf` from 4.24.0.20240129 to 4.24.0.20240302 (#2782) by @dependabot +- Bump `checkouts/data-schemas` from `eb941c2` to `ed078ed` (#2781) by @dependabot + +## 1.40.6 + +### Various fixes & improvements + +- Fix compatibility with `greenlet`/`gevent` (#2756) by @sentrivana +- Fix query source relative filepath (#2717) by @gggritso +- Support `clickhouse-driver==0.2.7` (#2752) by @sentrivana +- Bump `checkouts/data-schemas` from `6121fd3` to `eb941c2` (#2747) by @dependabot + +## 1.40.5 + +### Various fixes & improvements + +- Deprecate `last_event_id()`. (#2749) by @antonpirker +- Warn if uWSGI is set up without proper thread support (#2738) by @sentrivana + + uWSGI has to be run in threaded mode for the SDK to run properly. If this is + not the case, the consequences could range from features not working unexpectedly + to uWSGI workers crashing. + + Please make sure to run uWSGI with both `--enable-threads` and `--py-call-uwsgi-fork-hooks`. + +- `parsed_url` can be `None` (#2734) by @sentrivana +- Python 3.7 is not supported anymore by Lambda, so removed it and added 3.12 (#2729) by @antonpirker + +## 1.40.4 + +### Various fixes & improvements + +- Only start metrics flusher thread on demand (#2727) by @sentrivana +- Bump checkouts/data-schemas from `aa7058c` to `6121fd3` (#2724) by @dependabot + +## 1.40.3 + +### Various fixes & improvements + +- Turn off metrics for uWSGI (#2720) by @sentrivana +- Minor improvements (#2714) by @antonpirker + +## 1.40.2 + +### Various fixes & improvements + +- test: Fix `pytest` error (#2712) by @szokeasaurusrex +- build(deps): bump types-protobuf from 4.24.0.4 to 4.24.0.20240129 (#2691) by @dependabot + +## 1.40.1 + +### Various fixes & improvements + +- Fix uWSGI workers hanging (#2694) by @sentrivana +- Make metrics work with `gevent` (#2694) by @sentrivana +- Guard against `engine.url` being `None` (#2708) by @sentrivana +- Fix performance regression in `sentry_sdk.utils._generate_installed_modules` (#2703) by @GlenWalker +- Guard against Sentry initialization mid SQLAlchemy cursor (#2702) by @apmorton +- Fix yaml generation script (#2695) by @sentrivana +- Fix AWS Lambda workflow (#2710) by @sentrivana +- Bump `codecov/codecov-action` from 3 to 4 (#2706) by @dependabot +- Bump `actions/cache` from 3 to 4 (#2661) by @dependabot +- Bump `actions/checkout` from 3.1.0 to 4.1.1 (#2561) by @dependabot +- Bump `github/codeql-action` from 2 to 3 (#2603) by @dependabot +- Bump `actions/setup-python` from 4 to 5 (#2577) by @dependabot + +## 1.40.0 + +### Various fixes & improvements + +- Enable metrics related settings by default (#2685) by @iambriccardo +- Fix `UnicodeDecodeError` on Python 2 (#2657) by @sentrivana +- Enable DB query source by default (#2629) by @sentrivana +- Fix query source duration check (#2675) by @sentrivana +- Reformat with `black==24.1.0` (#2680) by @sentrivana +- Cleaning up existing code to prepare for new Scopes API (#2611) by @antonpirker +- Moved redis related tests to databases (#2674) by @antonpirker +- Improve `sentry_sdk.trace` type hints (#2633) by @szokeasaurusrex +- Bump `checkouts/data-schemas` from `e9f7d58` to `aa7058c` (#2639) by @dependabot + +## 1.39.2 + +### Various fixes & improvements + +- Fix timestamp in transaction created by OTel (#2627) by @antonpirker +- Fix relative path in DB query source (#2624) by @antonpirker +- Run more CI checks on 2.0 branch (#2625) by @sentrivana +- Fix tracing `TypeError` for static and class methods (#2559) by @szokeasaurusrex +- Fix missing `ctx` in Arq integration (#2600) by @ivanovart +- Change `data_category` from `check_in` to `monitor` (#2598) by @sentrivana + +## 1.39.1 + +### Various fixes & improvements + +- Fix psycopg2 detection in the Django integration (#2593) by @sentrivana +- Filter out empty string releases (#2591) by @sentrivana +- Fixed local var not present when there is an error in a user's `error_sampler` function (#2511) by @antonpirker +- Fixed typing in `aiohttp` (#2590) by @antonpirker + +## 1.39.0 + +### Various fixes & improvements + +- Add support for cluster clients from Redis SDK (#2394) by @md384 +- Improve location reporting for timer metrics (#2552) by @mitsuhiko +- Fix Celery `TypeError` with no-argument `apply_async` (#2575) by @szokeasaurusrex +- Fix Lambda integration with EventBridge source (#2546) by @davidcroda +- Add max tries to Spotlight (#2571) by @hazAT +- Handle `os.path.devnull` access issues (#2579) by @sentrivana +- Change `code.filepath` frame picking logic (#2568) by @sentrivana +- Trigger AWS Lambda tests on label (#2538) by @sentrivana +- Run permissions step on pull_request_target but not push (#2548) by @sentrivana +- Hash AWS Lambda test functions based on current revision (#2557) by @sentrivana +- Update Django version in tests (#2562) by @sentrivana +- Make metrics tests non-flaky (#2572) by @antonpirker + +## 1.38.0 + +### Various fixes & improvements + +- Only add trace context to checkins and do not run `event_processors` for checkins (#2536) by @antonpirker +- Metric span summaries (#2522) by @mitsuhiko +- Add source context to code locations (#2539) by @jan-auer +- Use in-app filepath instead of absolute path (#2541) by @antonpirker +- Switch to `jinja2` for generating CI yamls (#2534) by @sentrivana + +## 1.37.1 + +### Various fixes & improvements + +- Fix `NameError` on `parse_version` with eventlet (#2532) by @sentrivana +- build(deps): bump checkouts/data-schemas from `68def1e` to `e9f7d58` (#2501) by @dependabot + +## 1.37.0 + +### Various fixes & improvements + +- Move installed modules code to utils (#2429) by @sentrivana + + Note: We moved the internal function `_get_installed_modules` from `sentry_sdk.integrations.modules` to `sentry_sdk.utils`. + So if you use this function you have to update your imports + +- Add code locations for metrics (#2526) by @jan-auer +- Add query source to DB spans (#2521) by @antonpirker +- Send events to Spotlight sidecar (#2524) by @HazAT +- Run integration tests with newest `pytest` (#2518) by @sentrivana +- Bring tests up to date (#2512) by @sentrivana +- Fix: Prevent global var from being discarded at shutdown (#2530) by @antonpirker +- Fix: Scope transaction source not being updated in scope.span setter (#2519) by @sl0thentr0py + +## 1.36.0 + +### Various fixes & improvements + +- Django: Support Django 5.0 (#2490) by @sentrivana +- Django: Handling ASGI body in the right way. (#2513) by @antonpirker +- Flask: Test with Flask 3.0 (#2506) by @sentrivana +- Celery: Do not create a span when task is triggered by Celery Beat (#2510) by @antonpirker +- Redis: Ensure `RedisIntegration` is disabled, unless `redis` is installed (#2504) by @szokeasaurusrex +- Quart: Fix Quart integration for Quart 0.19.4 (#2516) by @antonpirker +- gRPC: Make async gRPC less noisy (#2507) by @jyggen + +## 1.35.0 + +### Various fixes & improvements + +- **Updated gRPC integration:** Asyncio interceptors and easier setup (#2369) by @fdellekart + + Our gRPC integration now instruments incoming unary-unary grpc requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. Everything works now for sync and async code. + + Before this release you had to add Sentry interceptors by hand to your gRPC code, now the only thing you need to do is adding the `GRPCIntegration` to you `sentry_sdk_init()` call. (See [documentation](https://docs.sentry.io/platforms/python/integrations/grpc/) for more information): + + ```python + import sentry_sdk + from sentry_sdk.integrations.grpc import GRPCIntegration + + sentry_sdk.init( + dsn="___PUBLIC_DSN___", + enable_tracing=True, + integrations=[ + GRPCIntegration(), + ], + ) + ``` + The old way still works, but we strongly encourage you to update your code to the way described above. + +- Python 3.12: Replace deprecated datetime functions (#2502) by @sentrivana +- Metrics: Unify datetime format (#2409) by @mitsuhiko +- Celery: Set correct data in `check_in`s (#2500) by @antonpirker +- Celery: Read timezone for Crons monitors from `celery_schedule` if existing (#2497) by @antonpirker +- Django: Removing redundant code in Django tests (#2491) by @vagi8 +- Django: Make reading the request body work in Django ASGI apps. (#2495) by @antonpirker +- FastAPI: Use wraps on fastapi request call wrapper (#2476) by @nkaras +- Fix: Probe for psycopg2 and psycopg3 parameters function. (#2492) by @antonpirker +- Fix: Remove unnecessary TYPE_CHECKING alias (#2467) by @rafrafek + +## 1.34.0 + +### Various fixes & improvements +- Added Python 3.12 support (#2471, #2483) +- Handle missing `connection_kwargs` in `patch_redis_client` (#2482) by @szokeasaurusrex +- Run common test suite on Python 3.12 (#2479) by @sentrivana + +## 1.33.1 + +### Various fixes & improvements + +- Make parse_version work in utils.py itself. (#2474) by @antonpirker + +## 1.33.0 + +### Various fixes & improvements + +- New: Added `error_sampler` option (#2456) by @szokeasaurusrex +- Python 3.12: Detect interpreter in shutdown state on thread spawn (#2468) by @mitsuhiko +- Patch eventlet under Sentry SDK (#2464) by @szokeasaurusrex +- Mitigate CPU spikes when sending lots of events with lots of data (#2449) by @antonpirker +- Make `debug` option also configurable via environment (#2450) by @antonpirker +- Make sure `get_dsn_parameters` is an actual function (#2441) by @sentrivana +- Bump pytest-localserver, add compat comment (#2448) by @sentrivana +- AWS Lambda: Update compatible runtimes for AWS Lambda layer (#2453) by @antonpirker +- AWS Lambda: Load AWS Lambda secrets in Github CI (#2153) by @antonpirker +- Redis: Connection attributes in `redis` database spans (#2398) by @antonpirker +- Falcon: Falcon integration checks response status before reporting error (#2465) by @szokeasaurusrex +- Quart: Support Quart 0.19 onwards (#2403) by @pgjones +- Sanic: Sanic integration initial version (#2419) by @szokeasaurusrex +- Django: Fix parsing of Django `path` patterns (#2452) by @sentrivana +- Django: Add Django 4.2 to test suite (#2462) by @sentrivana +- Polish changelog (#2434) by @sentrivana +- Update CONTRIBUTING.md (#2443) by @krishvsoni +- Update README.md (#2435) by @sentrivana + +## 1.32.0 + +### Various fixes & improvements + +- **New:** Error monitoring for some of the most popular Python GraphQL libraries: + - Add [GQL GraphQL integration](https://docs.sentry.io/platforms/python/integrations/gql/) (#2368) by @szokeasaurusrex + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.gql import GQLIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + GQLIntegration(), + ], + ) + ``` + + - Add [Graphene GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/graphene/) (#2389) by @sentrivana + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.graphene import GrapheneIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + GrapheneIntegration(), + ], + ) + ``` + + - Add [Strawberry GraphQL error & tracing integration](https://docs.sentry.io/platforms/python/integrations/strawberry/) (#2393) by @sentrivana + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.strawberry import StrawberryIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + # make sure to set async_execution to False if you're executing + # GraphQL queries synchronously + StrawberryIntegration(async_execution=True), + ], + traces_sample_rate=1.0, + ) + ``` + + - Add [Ariadne GraphQL error integration](https://docs.sentry.io/platforms/python/integrations/ariadne/) (#2387) by @sentrivana + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.ariadne import AriadneIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + AriadneIntegration(), + ], + ) + ``` + +- Capture multiple named groups again (#2432) by @sentrivana +- Don't fail when upstream scheme is unusual (#2371) by @vanschelven +- Support new RQ version (#2405) by @antonpirker +- Remove `utcnow`, `utcfromtimestamp` deprecated in Python 3.12 (#2415) by @rmad17 +- Add `trace` to `__all__` in top-level `__init__.py` (#2401) by @lobsterkatie +- Move minimetrics code to the SDK (#2385) by @mitsuhiko +- Add configurable compression levels (#2382) by @mitsuhiko +- Shift flushing by up to a rollup window (#2396) by @mitsuhiko +- Make a consistent noop flush behavior (#2428) by @mitsuhiko +- Stronger recursion protection (#2426) by @mitsuhiko +- Remove `OpenTelemetryIntegration` from `__init__.py` (#2379) by @sentrivana +- Update API docs (#2397) by @antonpirker +- Pin some test requirements because new majors break our tests (#2404) by @antonpirker +- Run more `requests`, `celery`, `falcon` tests (#2414) by @sentrivana +- Move `importorskip`s in tests to `__init__.py` files (#2412) by @sentrivana +- Fix `mypy` errors (#2433) by @sentrivana +- Fix pre-commit issues (#2424) by @bukzor-sentryio +- Update [CONTRIBUTING.md](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md) (#2411) by @sentrivana +- Bump `sphinx` from 7.2.5 to 7.2.6 (#2378) by @dependabot +- [Experimental] Add explain plan to DB spans (#2315) by @antonpirker + +## 1.31.0 + +### Various fixes & improvements + +- **New:** Add integration for `clickhouse-driver` (#2167) by @mimre25 + + For more information, see the documentation for [clickhouse-driver](https://docs.sentry.io/platforms/python/configuration/integrations/clickhouse-driver) for more information. + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + ClickhouseDriverIntegration(), + ], + ) + ``` + +- **New:** Add integration for `asyncpg` (#2314) by @mimre25 + + For more information, see the documentation for [asyncpg](https://docs.sentry.io/platforms/python/configuration/integrations/asyncpg/) for more information. + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.asyncpg import AsyncPGIntegration + + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + AsyncPGIntegration(), + ], + ) + ``` + +- **New:** Allow to override `propagate_traces` in `Celery` per task (#2331) by @jan-auer + + For more information, see the documentation for [Celery](https://docs.sentry.io//platforms/python/guides/celery/#distributed-traces) for more information. + + Usage: + ```python + import sentry_sdk + from sentry_sdk.integrations.celery import CeleryIntegration + + # Enable global distributed traces (this is the default, just to be explicit.) + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + CeleryIntegration(propagate_traces=True), + ], + ) + + ... + + # This will NOT propagate the trace. (The task will start its own trace): + my_task_b.apply_async( + args=("some_parameter", ), + headers={"sentry-propagate-traces": False}, + ) + ``` + +- Prevent Falcon integration from breaking ASGI apps (#2359) by @szokeasaurusrex +- Backpressure: only downsample a max of 10 times (#2347) by @sl0thentr0py +- Made NoOpSpan compatible to Transactions. (#2364) by @antonpirker +- Cleanup ASGI integration (#2335) by @antonpirker +- Pin anyio in tests (dep of httpx), because new major 4.0.0 breaks tests. (#2336) by @antonpirker +- Added link to backpressure section in docs. (#2354) by @antonpirker +- Add .vscode to .gitignore (#2317) by @shoaib-mohd +- Documenting Spans and Transactions (#2358) by @antonpirker +- Fix in profiler: do not call getcwd from module root (#2329) by @Zylphrex +- Fix deprecated version attribute (#2338) by @vagi8 +- Fix transaction name in Starlette and FastAPI (#2341) by @antonpirker +- Fix tests using Postgres (#2362) by @antonpirker +- build(deps): Updated linting tooling (#2350) by @antonpirker +- build(deps): bump sphinx from 7.2.4 to 7.2.5 (#2344) by @dependabot +- build(deps): bump actions/checkout from 2 to 4 (#2352) by @dependabot +- build(deps): bump checkouts/data-schemas from `ebc77d3` to `68def1e` (#2351) by @dependabot + +## 1.30.0 + +### Various fixes & improvements + +- Officially support Python 3.11 (#2300) by @sentrivana +- Context manager monitor (#2290) by @szokeasaurusrex +- Set response status code in transaction `response` context. (#2312) by @antonpirker +- Add missing context kwarg to `_sentry_task_factory` (#2267) by @JohnnyDeuss +- In Postgres take the connection params from the connection (#2308) by @antonpirker +- Experimental: Allow using OTel for performance instrumentation (#2272) by @sentrivana + + This release includes experimental support for replacing Sentry's default + performance monitoring solution with one powered by OpenTelemetry without having + to do any manual setup. + + Try it out by installing `pip install sentry-sdk[opentelemetry-experimental]` and + then initializing the SDK with: + + ```python + sentry_sdk.init( + # ...your usual options... + _experiments={"otel_powered_performance": True}, + ) + ``` + + This enables OpenTelemetry performance monitoring support for some of the most + popular frameworks and libraries (Flask, Django, FastAPI, requests...). + + We're looking forward to your feedback! Please let us know about your experience + in this discussion: https://github.com/getsentry/sentry/discussions/55023 + + **Important note:** Please note that this feature is experimental and in a + proof-of-concept stage and is not meant for production use. It may be changed or + removed at any point. + +- Enable backpressure handling by default (#2298) by @sl0thentr0py + + The SDK now dynamically downsamples transactions to reduce backpressure in high + throughput systems. It starts a new `Monitor` thread to perform some health checks + which decide to downsample (halved each time) in 10 second intervals till the system + is healthy again. + + To disable this behavior, use: + + ```python + sentry_sdk.init( + # ...your usual options... + enable_backpressure_handling=False, + ) + ``` + + If your system serves heavy load, please let us know how this feature works for you! + + Check out the [documentation](https://docs.sentry.io/platforms/python/configuration/options/#enable-backpressure-handling) for more information. + +- Stop recording spans for internal web requests to Sentry (#2297) by @szokeasaurusrex +- Add test for `ThreadPoolExecutor` (#2259) by @gggritso +- Add docstrings for `Scope.update_from_*` (#2311) by @sentrivana +- Moved `is_sentry_url` to utils (#2304) by @szokeasaurusrex +- Fix: arq attribute error on settings, support worker args (#2260) by @rossmacarthur +- Fix: Exceptions include detail property for their value (#2193) by @nicolassanmar +- build(deps): bump mypy from 1.4.1 to 1.5.1 (#2319) by @dependabot +- build(deps): bump sphinx from 7.1.2 to 7.2.4 (#2322) by @dependabot +- build(deps): bump sphinx from 7.0.1 to 7.1.2 (#2296) by @dependabot +- build(deps): bump checkouts/data-schemas from `1b85152` to `ebc77d3` (#2254) by @dependabot + +## 1.29.2 + +### Various fixes & improvements + +- Revert GraphQL integration (#2287) by @sentrivana + +## 1.29.1 + +### Various fixes & improvements + +- Fix GraphQL integration swallowing responses (#2286) by @sentrivana +- Fix typo (#2283) by @sentrivana + +## 1.29.0 + +### Various fixes & improvements + +- Capture GraphQL client errors (#2243) by @sentrivana + - The SDK will now create dedicated errors whenever an HTTP client makes a request to a `/graphql` endpoint and the response contains an error. You can opt out of this by providing `capture_graphql_errors=False` to the HTTP client integration. +- Read MAX_VALUE_LENGTH from client options (#2121) (#2171) by @puittenbroek +- Rename `request_bodies` to `max_request_body_size` (#2247) by @mgaligniana +- Always sample checkin regardless of `sample_rate` (#2279) by @szokeasaurusrex +- Add information to short-interval cron error message (#2246) by @lobsterkatie +- Add DB connection attributes in spans (#2274) by @antonpirker +- Add `db.system` to remaining Redis spans (#2271) by @AbhiPrasad +- Clarified the procedure for running tests (#2276) by @szokeasaurusrex +- Fix Chalice tests (#2278) by @sentrivana +- Bump Black from 23.3.0 to 23.7.0 (#2256) by @dependabot +- Remove py3.4 from tox.ini (#2248) by @sentrivana + +## 1.28.1 + +### Various fixes & improvements + +- Redis: Add support for redis.asyncio (#1933) by @Zhenay +- Make sure each task that is started by Celery Beat has its own trace. (#2249) by @antonpirker +- Add Sampling Decision to Trace Envelope Header (#2239) by @antonpirker +- Do not add trace headers (`sentry-trace` and `baggage`) to HTTP requests to Sentry (#2240) by @antonpirker +- Prevent adding `sentry-trace` header multiple times (#2235) by @antonpirker +- Skip distributions with incomplete metadata (#2231) by @rominf +- Remove stale.yml (#2245) by @hubertdeng123 +- Django: Fix 404 Handler handler being labeled as "generic ASGI request" (#1277) by @BeryJu + +## 1.28.0 + +### Various fixes & improvements + +- Add support for cron jobs in ARQ integration (#2088) by @lewazo +- Backpressure handling prototype (#2189) by @sl0thentr0py +- Add "replay" context to event payload (#2234) by @antonpirker +- Update test Django app to be compatible for Django 4.x (#1794) by @DilLip-Chowdary-Codes + +## 1.27.1 + +### Various fixes & improvements + +- Add Starlette/FastAPI template tag for adding Sentry tracing information (#2225) by @antonpirker + - By adding `{{ sentry_trace_meta }}` to your Starlette/FastAPI Jinja2 templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. +- Fixed generation of baggage when a DSC is already in propagation context (#2232) by @antonpirker +- Handle explicitly passing `None` for `trace_configs` in `aiohttp` (#2230) by @Harmon758 +- Support newest Starlette versions (#2227) by @antonpirker + +## 1.27.0 + +### Various fixes & improvements + +- Support for SQLAlchemy 2.0 (#2200) by @antonpirker +- Add instrumentation of `aiohttp` client requests (#1761) by @md384 +- Add Django template tag for adding Sentry tracing information (#2222) by @antonpirker + - By adding `{{ sentry_trace_meta }}` to your Django templates we will include Sentry trace information as a meta tag in the rendered HTML to allow your frontend to pick up and continue the trace started in the backend. + +- Update Flask HTML meta helper (#2203) by @antonpirker +- Take trace ID always from propagation context (#2209) by @antonpirker +- Fix trace context in event payload (#2205) by @antonpirker +- Use new top level API in `trace_propagation_meta` (#2202) by @antonpirker +- Do not overwrite existing baggage on outgoing requests (#2191, #2214) by @sentrivana +- Set the transaction/span status from an OTel span (#2115) by @daniil-konovalenko +- Fix propagation of OTel `NonRecordingSpan` (#2187) by @hartungstenio +- Fix `TaskLockedException` handling in Huey integration (#2206) by @Zhenay +- Add message format configuration arguments to Loguru integration (#2208) by @Gwill +- Profiling: Add client reports for profiles (#2207) by @Zylphrex +- CI: Fix CI (#2220) by @antonpirker +- Dependencies: Bump `checkouts/data-schemas` from `7fdde87` to `1b85152` (#2218) by @dependabot +- Dependencies: Bump `mypy` from 1.3.0 to 1.4.1 (#2194) by @dependabot +- Docs: Change API doc theme (#2210) by @sentrivana +- Docs: Allow (some) autocompletion for top-level API (#2213) by @sentrivana +- Docs: Revert autocomplete hack (#2224) by @sentrivana + +## 1.26.0 + +### Various fixes & improvements + +- Tracing without performance (#2136) by @antonpirker +- Load tracing information from environment (#2176) by @antonpirker +- Auto-enable HTTPX integration if HTTPX installed (#2177) by @sentrivana +- Support for SOCKS proxies (#1050) by @Roguelazer +- Wrap `parse_url` calls in `capture_internal_exceptions` (#2162) by @sentrivana +- Run 2.7 tests in CI again (#2181) by @sentrivana +- Crons: Do not support sub-minute cron intervals (#2172) by @antonpirker +- Profile: Add function name to profiler frame cache (#2164) by @Zylphrex +- Dependencies: bump checkouts/data-schemas from `0ed3357` to `7fdde87` (#2165) by @dependabot +- Update changelog (#2163) by @sentrivana + +## 1.25.1 + +### Django update (ongoing) + +Collections of improvements to our Django integration. + +By: @mgaligniana (#1773) + +### Various fixes & improvements + +- Fix `parse_url` (#2161) by @sentrivana and @antonpirker + + Our URL sanitization used in multiple integrations broke with the recent Python security update. If you started seeing `ValueError`s with `"'Filtered' does not appear to be an IPv4 or IPv6 address"`, this release fixes that. See [the original issue](https://github.com/getsentry/sentry-python/issues/2160) for more context. + +- Better version parsing in integrations (#2152) by @antonpirker + + We now properly support all integration versions that conform to [PEP 440](https://peps.python.org/pep-0440/). This replaces our naïve version parsing that wouldn't accept versions such as `2.0.0rc1` or `2.0.5.post1`. + +- Align HTTP status code as span data field `http.response.status_code` (#2113) by @antonpirker +- Do not encode cached value to determine size (#2143) by @sentrivana +- Fix using `unittest.mock` whenever available (#1926) by @mgorny +- Fix 2.7 `common` tests (#2145) by @sentrivana +- Bump `actions/stale` from `6` to `8` (#1978) by @dependabot +- Bump `black` from `22.12.0` to `23.3.0` (#1984) by @dependabot +- Bump `mypy` from `1.2.0` to `1.3.0` (#2110) by @dependabot +- Bump `sphinx` from `5.3.0` to `7.0.1` (#2112) by @dependabot + +## 1.25.0 + +### Various fixes & improvements + +- Support urllib3>=2.0.0 (#2148) by @asottile-sentry + + We're now supporting urllib3's new major version, 2.0.0. If you encounter issues (e.g. some of your dependencies not supporting the new urllib3 version yet) you might consider pinning the urllib3 version to `<2.0.0` manually in your project. Check out the [the urllib3 migration guide](https://urllib3.readthedocs.io/en/latest/v2-migration-guide.html#migrating-as-an-application-developer) for details. + +- Auto-retry tests on failure (#2134) by @sentrivana +- Correct `importlib.metadata` check in `test_modules` (#2149) by @asottile-sentry +- Fix distribution name normalization (PEP-0503) (#2144) by @rominf +- Fix `functions_to_trace` typing (#2141) by @rcmarron + +## 1.24.0 + +### Various fixes & improvements + +- **New:** Celery Beat exclude tasks option (#2130) by @antonpirker + + You can exclude Celery Beat tasks from being auto-instrumented. To do this, add a list of tasks you want to exclude as option `exclude_beat_tasks` when creating `CeleryIntegration`. The list can contain simple strings with the full task name, as specified in the Celery Beat schedule, or regular expressions to match multiple tasks. + + For more information, see the documentation for [Crons](https://docs.sentry.io/platforms/python/guides/celery/crons/) for more information. + + Usage: + + ```python + exclude_beat_tasks = [ + "some-task-a", + "payment-check-.*", + ] + sentry_sdk.init( + dsn='___PUBLIC_DSN___', + integrations=[ + CeleryIntegration( + monitor_beat_tasks=True, + exclude_beat_tasks=exclude_beat_tasks, + ), + ], + ) + ``` + + In this example the task `some-task-a` and all tasks with a name starting with `payment-check-` will be ignored. + +- **New:** Add support for **ExceptionGroups** (#2025) by @antonpirker + + _Note:_ If running Self-Hosted Sentry, you should wait to adopt this SDK update until after updating to the 23.6.0 (est. June 2023) release of Sentry. Updating early will not break anything, but you will not get the full benefit of the Exception Groups improvements to issue grouping that were added to the Sentry backend. + +- Prefer `importlib.metadata` over `pkg_resources` if available (#2081) by @sentrivana +- Work with a copy of request, vars in the event (#2125) by @sentrivana +- Pinned version of dependency that broke the build (#2133) by @antonpirker + +## 1.23.1 + +### Various fixes & improvements + +- Disable Django Cache spans by default. (#2120) by @antonpirker + +## 1.23.0 + +### Various fixes & improvements + +- **New:** Add `loguru` integration (#1994) by @PerchunPak + + Check [the documentation](https://docs.sentry.io/platforms/python/configuration/integrations/loguru/) for more information. + + Usage: + + ```python + from loguru import logger + import sentry_sdk + from sentry_sdk.integrations.loguru import LoguruIntegration + + sentry_sdk.init( + dsn="___PUBLIC_DSN___", + integrations=[ + LoguruIntegration(), + ], + ) + + logger.debug("I am ignored") + logger.info("I am a breadcrumb") + logger.error("I am an event", extra=dict(bar=43)) + logger.exception("An exception happened") + ``` + + - An error event with the message `"I am an event"` will be created. + - `"I am a breadcrumb"` will be attached as a breadcrumb to that event. + - `bar` will end up in the `extra` attributes of that event. + - `"An exception happened"` will send the current exception from `sys.exc_info()` with the stack trace to Sentry. If there's no exception, the current stack will be attached. + - The debug message `"I am ignored"` will not be captured by Sentry. To capture it, set `level` to `DEBUG` or lower in `LoguruIntegration`. + +- Do not truncate request body if `request_bodies` is `"always"` (#2092) by @sentrivana +- Fixed Celery headers for Beat auto-instrumentation (#2102) by @antonpirker +- Add `db.operation` to Redis and MongoDB spans (#2089) by @antonpirker +- Make sure we're importing `redis` the library (#2106) by @sentrivana +- Add `include_source_context` option (#2020) by @farhat-nawaz and @sentrivana +- Import `Markup` from `markupsafe` (#2047) by @rco-ableton +- Fix `__qualname__` missing attribute in asyncio integration (#2105) by @sl0thentr0py +- Remove relay extension from AWS Layer (#2068) by @sl0thentr0py +- Add a note about `pip freeze` to the bug template (#2103) by @sentrivana + +## 1.22.2 + +### Various fixes & improvements + +- Fix: Django caching spans when using keyword arguments (#2086) by @antonpirker +- Fix: Duration in Celery Beat tasks monitoring (#2087) by @antonpirker +- Fix: Docstrings of SPANDATA (#2084) by @antonpirker + +## 1.22.1 + +### Various fixes & improvements + +- Fix: Handle a list of keys (not just a single key) in Django cache spans (#2082) by @antonpirker + +## 1.22.0 + +### Various fixes & improvements + +- Add `cache.hit` and `cache.item_size` to Django (#2057) by @antonpirker + + _Note:_ This will add spans for all requests to the caches configured in Django. This will probably add some overhead to your server an also add multiple spans to your performance waterfall diagrams. If you do not want this, you can disable this feature in the DjangoIntegration: + + ```python + sentry_sdk.init( + dsn="...", + integrations=[ + DjangoIntegration(cache_spans=False), + ] + ) + ``` + +- Use `http.method` instead of `method` (#2054) by @AbhiPrasad +- Handle non-int `exc.status_code` in Starlette (#2075) by @sentrivana +- Handle SQLAlchemy `engine.name` being bytes (#2074) by @sentrivana +- Fix `KeyError` in `capture_checkin` if SDK is not initialized (#2073) by @antonpirker +- Use `functools.wrap` for `ThreadingIntegration` patches to fix attributes (#2080) by @EpicWink +- Pin `urllib3` to <2.0.0 for now (#2069) by @sl0thentr0py + +## 1.21.1 + +### Various fixes & improvements + +- Do not send monitor_config when unset (#2058) by @evanpurkhiser +- Add `db.system` span data (#2040, #2042) by @antonpirker +- Fix memory leak in profiling (#2049) by @Zylphrex +- Fix crash loop when returning none in before_send (#2045) by @sentrivana + +## 1.21.0 + +### Various fixes & improvements + +- Better handling of redis span/breadcrumb data (#2033) by @antonpirker + + _Note:_ With this release we will limit the description of redis db spans and the data in breadcrumbs represting redis db operations to 1024 characters. + + This can can lead to truncated data. If you do not want this there is a new parameter `max_data_size` in `RedisIntegration`. You can set this to `None` for disabling trimming. + + Example for **disabling** trimming of redis commands in spans or breadcrumbs: + + ```python + sentry_sdk.init( + integrations=[ + RedisIntegration(max_data_size=None), + ] + ) + ``` + + Example for custom trim size of redis commands in spans or breadcrumbs: + + ```python + sentry_sdk.init( + integrations=[ + RedisIntegration(max_data_size=50), + ] + )` + + ``` + +- Add `db.system` to redis and SQLAlchemy db spans (#2037, #2038, #2039) (#2037) by @AbhiPrasad +- Upgraded linting tooling (#2026) by @antonpirker +- Made code more resilient. (#2031) by @antonpirker + +## 1.20.0 + +### Various fixes & improvements + +- Send all events to /envelope endpoint when tracing is enabled (#2009) by @antonpirker + + _Note:_ If you’re self-hosting Sentry 9, you need to stay in the previous version of the SDK or update your self-hosted to at least 20.6.0 + +- Profiling: Remove profile context from SDK (#2013) by @Zylphrex +- Profiling: Additionl performance improvements to the profiler (#1991) by @Zylphrex +- Fix: Celery Beat monitoring without restarting the Beat process (#2001) by @antonpirker +- Fix: Using the Codecov uploader instead of deprecated python package (#2011) by @antonpirker +- Fix: Support for Quart (#2003)` (#2003) by @antonpirker + +## 1.19.1 + +### Various fixes & improvements + +- Make auto monitoring beat update support Celery 4 and 5 (#1989) by @antonpirker + +## 1.19.0 + +### Various fixes & improvements + +- **New:** [Celery Beat](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html) auto monitoring (#1967) by @antonpirker + + The CeleryIntegration can now also monitor your Celery Beat scheduled tasks automatically using the new [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/) feature of Sentry. + + To learn more see our [Celery Beat Auto Discovery](https://docs.sentry.io/platforms/python/guides/celery/crons/) documentation. + + Usage: + + ```python + from celery import Celery, signals + from celery.schedules import crontab + + import sentry_sdk + from sentry_sdk.integrations.celery import CeleryIntegration + + + app = Celery('tasks', broker='...') + app.conf.beat_schedule = { + 'set-in-beat-schedule': { + 'task': 'tasks.some_important_task', + 'schedule': crontab(...), + }, + } + + + @signals.celeryd_init.connect + def init_sentry(**kwargs): + sentry_sdk.init( + dsn='...', + integrations=[CeleryIntegration(monitor_beat_tasks=True)], # 👈 here + environment="local.dev.grace", + release="v1.0", + ) + ``` + + This will auto detect all schedules tasks in your `beat_schedule` and will monitor them with Sentry [Crons](https://blog.sentry.io/2023/01/04/cron-job-monitoring-beta-because-scheduled-jobs-fail-too/). + +- **New:** [gRPC](https://grpc.io/) integration (#1911) by @hossein-raeisi + + The [gRPC](https://grpc.io/) integration instruments all incoming requests and outgoing unary-unary, unary-stream grpc requests using grpcio channels. + + To learn more see our [gRPC Integration](https://docs.sentry.io/platforms/python/configuration/integrations/grpc/) documentation. + + On the server: + + ```python + import grpc + from sentry_sdk.integrations.grpc.server import ServerInterceptor + + + server = grpc.server( + thread_pool=..., + interceptors=[ServerInterceptor()], + ) + ``` + + On the client: + + ```python + import grpc + from sentry_sdk.integrations.grpc.client import ClientInterceptor + + + with grpc.insecure_channel("example.com:12345") as channel: + channel = grpc.intercept_channel(channel, *[ClientInterceptor()]) + + ``` + +- **New:** socket integration (#1911) by @hossein-raeisi + + Use this integration to create spans for DNS resolves (`socket.getaddrinfo()`) and connection creations (`socket.create_connection()`). + + To learn more see our [Socket Integration](https://docs.sentry.io/platforms/python/configuration/integrations/socket/) documentation. + + Usage: + + ```python + import sentry_sdk + from sentry_sdk.integrations.socket import SocketIntegration + sentry_sdk.init( + dsn="___PUBLIC_DSN___", + integrations=[ + SocketIntegration(), + ], + ) + ``` + +- Fix: Do not trim span descriptions. (#1983) by @antonpirker + +## 1.18.0 + +### Various fixes & improvements + +- **New:** Implement `EventScrubber` (#1943) by @sl0thentr0py + + To learn more see our [Scrubbing Sensitive Data](https://docs.sentry.io/platforms/python/data-management/sensitive-data/#event-scrubber) documentation. + + Add a new `EventScrubber` class that scrubs certain potentially sensitive interfaces with a `DEFAULT_DENYLIST`. The default scrubber is automatically run if `send_default_pii = False`: + + ```python + import sentry_sdk + from sentry_sdk.scrubber import EventScrubber + sentry_sdk.init( + # ... + send_default_pii=False, + event_scrubber=EventScrubber(), # this is set by default + ) + ``` + + You can also pass in a custom `denylist` to the `EventScrubber` class and filter additional fields that you want. + + ```python + from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST + # custom denylist + denylist = DEFAULT_DENYLIST + ["my_sensitive_var"] + sentry_sdk.init( + # ... + send_default_pii=False, + event_scrubber=EventScrubber(denylist=denylist), + ) + ``` + +- **New:** Added new `functions_to_trace` option for central way of performance instrumentation (#1960) by @antonpirker + + To learn more see our [Tracing Options](https://docs.sentry.io/platforms/python/configuration/options/#functions-to-trace) documentation. + + An optional list of functions that should be set up for performance monitoring. For each function in the list, a span will be created when the function is executed. + + ```python + functions_to_trace = [ + {"qualified_name": "tests.test_basics._hello_world_counter"}, + {"qualified_name": "time.sleep"}, + {"qualified_name": "collections.Counter.most_common"}, + ] + + sentry_sdk.init( + # ... + traces_sample_rate=1.0, + functions_to_trace=functions_to_trace, + ) + ``` + +- Updated denylist to include other widely used cookies/headers (#1972) by @antonpirker +- Forward all `sentry-` baggage items (#1970) by @cleptric +- Update OSS licensing (#1973) by @antonpirker +- Profiling: Handle non frame types in profiler (#1965) by @Zylphrex +- Tests: Bad arq dependency in tests (#1966) by @Zylphrex +- Better naming (#1962) by @antonpirker + +## 1.17.0 + +### Various fixes & improvements + +- **New:** Monitor Celery Beat tasks with Sentry [Cron Monitoring](https://docs.sentry.io/product/crons/). + + With this feature you can make sure that your Celery beat tasks run at the right time and see if they where successful or not. + + > **Warning** + > Cron Monitoring is currently in beta. Beta features are still in-progress and may have bugs. We recognize the irony. + > If you have any questions or feedback, please email us at crons-feedback@sentry.io, reach out via Discord (#cronjobs), or open an issue. + + Usage: + + ```python + # File: tasks.py + + from celery import Celery, signals + from celery.schedules import crontab + + import sentry_sdk + from sentry_sdk.crons import monitor + from sentry_sdk.integrations.celery import CeleryIntegration + + + # 1. Setup your Celery beat configuration + + app = Celery('mytasks', broker='redis://localhost:6379/0') + app.conf.beat_schedule = { + 'set-in-beat-schedule': { + 'task': 'tasks.tell_the_world', + 'schedule': crontab(hour='10', minute='15'), + 'args': ("in beat_schedule set", ), + }, + } + + + # 2. Initialize Sentry either in `celeryd_init` or `beat_init` signal. + + #@signals.celeryd_init.connect + @signals.beat_init.connect + def init_sentry(**kwargs): + sentry_sdk.init( + dsn='...', + integrations=[CeleryIntegration()], + environment="local.dev.grace", + release="v1.0.7-a1", + ) + + + # 3. Link your Celery task to a Sentry Cron Monitor + + @app.task + @monitor(monitor_slug='3b861d62-ff82-4aa0-9cd6-b2b6403bd0cf') + def tell_the_world(msg): + print(msg) + ``` + +- **New:** Add decorator for Sentry tracing (#1089) by @ynouri + + This allows you to use a decorator to setup custom performance instrumentation. + + To learn more see [Custom Instrumentation](https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/). + + Usage: Just add the new decorator to your function, and a span will be created for it: + + ```python + import sentry_sdk + + @sentry_sdk.trace + def my_complex_function(): + # do stuff + ... + ``` + +- Make Django signals tracing optional (#1929) by @antonpirker + + See the [Django Guide](https://docs.sentry.io/platforms/python/guides/django) to learn more. + +- Deprecated `with_locals` in favor of `include_local_variables` (#1924) by @antonpirker +- Added top level API to get current span (#1954) by @antonpirker +- Profiling: Add profiler options to init (#1947) by @Zylphrex +- Profiling: Set active thread id for quart (#1830) by @Zylphrex +- Fix: Update `get_json` function call for werkzeug 2.1.0+ (#1939) by @michielderoos +- Fix: Returning the tasks result. (#1931) by @antonpirker +- Fix: Rename MYPY to TYPE_CHECKING (#1934) by @untitaker +- Fix: Fix type annotation for ignore_errors in sentry_sdk.init() (#1928) by @tiangolo +- Tests: Start a real http server instead of mocking libs (#1938) by @antonpirker + +## 1.16.0 + +### Various fixes & improvements + +- **New:** Add [arq](https://arq-docs.helpmanual.io/) Integration (#1872) by @Zhenay + + This integration will create performance spans when arq jobs will be enqueued and when they will be run. + It will also capture errors in jobs and will link them to the performance spans. + + Usage: + + ```python + import asyncio + + from httpx import AsyncClient + from arq import create_pool + from arq.connections import RedisSettings + + import sentry_sdk + from sentry_sdk.integrations.arq import ArqIntegration + from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT + + sentry_sdk.init( + dsn="...", + integrations=[ArqIntegration()], + ) + + async def download_content(ctx, url): + session: AsyncClient = ctx['session'] + response = await session.get(url) + print(f'{url}: {response.text:.80}...') + return len(response.text) + + async def startup(ctx): + ctx['session'] = AsyncClient() + + async def shutdown(ctx): + await ctx['session'].aclose() + + async def main(): + with sentry_sdk.start_transaction(name="testing_arq_tasks", source=TRANSACTION_SOURCE_COMPONENT): + redis = await create_pool(RedisSettings()) + for url in ('https://facebook.com', 'https://microsoft.com', 'https://github.com', "asdf" + ): + await redis.enqueue_job('download_content', url) + + class WorkerSettings: + functions = [download_content] + on_startup = startup + on_shutdown = shutdown + + if __name__ == '__main__': + asyncio.run(main()) + ``` + +- Update of [Falcon](https://falconframework.org/) Integration (#1733) by @bartolootrit +- Adding [Cloud Resource Context](https://docs.sentry.io/platforms/python/configuration/integrations/cloudresourcecontext/) integration (#1882) by @antonpirker +- Profiling: Use the transaction timestamps to anchor the profile (#1898) by @Zylphrex +- Profiling: Add debug logs to profiling (#1883) by @Zylphrex +- Profiling: Start profiler thread lazily (#1903) by @Zylphrex +- Fixed checks for structured http data (#1905) by @antonpirker +- Make `set_measurement` public api and remove experimental status (#1909) by @sl0thentr0py +- Add `trace_propagation_targets` option (#1916) by @antonpirker +- Add `enable_tracing` to default traces_sample_rate to 1.0 (#1900) by @sl0thentr0py +- Remove deprecated `tracestate` (#1907) by @sl0thentr0py +- Sanitize URLs in Span description and breadcrumbs (#1876) by @antonpirker +- Mechanism should default to true unless set explicitly (#1889) by @sl0thentr0py +- Better setting of in-app in stack frames (#1894) by @antonpirker +- Add workflow to test gevent (#1870) by @Zylphrex +- Updated outdated HTTPX test matrix (#1917) by @antonpirker +- Switch to MIT license (#1908) by @cleptric + +## 1.15.0 + +### Various fixes & improvements + +- New: Add [Huey](https://huey.readthedocs.io/en/latest/) Integration (#1555) by @Zhenay + + This integration will create performance spans when Huey tasks will be enqueued and when they will be executed. + + Usage: + + Task definition in `demo.py`: + + ```python + import time + + from huey import SqliteHuey, crontab + + import sentry_sdk + from sentry_sdk.integrations.huey import HueyIntegration + + sentry_sdk.init( + dsn="...", + integrations=[ + HueyIntegration(), + ], + traces_sample_rate=1.0, + ) + + huey = SqliteHuey(filename='/tmp/demo.db') + + @huey.task() + def add_numbers(a, b): + return a + b + ``` + + Running the tasks in `run.py`: + + ```python + from demo import add_numbers, flaky_task, nightly_backup + + import sentry_sdk + from sentry_sdk.integrations.huey import HueyIntegration + from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, Transaction + + + def main(): + sentry_sdk.init( + dsn="...", + integrations=[ + HueyIntegration(), + ], + traces_sample_rate=1.0, + ) + + with sentry_sdk.start_transaction(name="testing_huey_tasks", source=TRANSACTION_SOURCE_COMPONENT): + r = add_numbers(1, 2) + + if __name__ == "__main__": + main() + ``` + +- Profiling: Do not send single sample profiles (#1879) by @Zylphrex +- Profiling: Add additional test coverage for profiler (#1877) by @Zylphrex +- Profiling: Always use builtin time.sleep (#1869) by @Zylphrex +- Profiling: Defaul in_app decision to None (#1855) by @Zylphrex +- Profiling: Remove use of threading.Event (#1864) by @Zylphrex +- Profiling: Enable profiling on all transactions (#1797) by @Zylphrex +- FastAPI: Fix check for Starlette in FastAPI integration (#1868) by @antonpirker +- Flask: Do not overwrite default for username with email address in FlaskIntegration (#1873) by @homeworkprod +- Tests: Add py3.11 to test-common (#1871) by @Zylphrex +- Fix: Don't log whole event in before_send / event_processor drops (#1863) by @sl0thentr0py + +## 1.14.0 + +### Various fixes & improvements + +- Add `before_send_transaction` (#1840) by @antonpirker + + Adds a hook (similar to `before_send`) that is called for all transaction events (performance releated data). + + Usage: + + ```python + import sentry_sdk + + def strip_sensitive_data(event, hint): + # modify event here (or return `None` if you want to drop the event entirely) + return event + + sentry_sdk.init( + # ... + before_send_transaction=strip_sensitive_data, + ) + ``` + + See also: https://docs.sentry.io/platforms/python/configuration/filtering/#using-platformidentifier-namebefore-send-transaction- + +- Django: Always remove values of Django session related cookies. (#1842) by @antonpirker +- Profiling: Enable profiling for ASGI frameworks (#1824) by @Zylphrex +- Profiling: Better gevent support (#1822) by @Zylphrex +- Profiling: Add profile context to transaction (#1860) by @Zylphrex +- Profiling: Use co_qualname in python 3.11 (#1831) by @Zylphrex +- OpenTelemetry: fix Use dict for sentry-trace context instead of tuple (#1847) by @AbhiPrasad +- OpenTelemetry: fix extra dependency (#1825) by @bernardotorres +- OpenTelemetry: fix NoOpSpan updates scope (#1834) by @Zylphrex +- OpenTelemetry: Make sure to noop when there is no DSN (#1852) by @antonpirker +- FastAPI: Fix middleware being patched multiple times (#1841) by @JohnnyDeuss +- Starlette: Avoid import of pkg_resource with Starlette integration (#1836) by @mgu +- Removed code coverage target (#1862) by @antonpirker + +## 1.13.0 + +### Various fixes & improvements + +- Add Starlite integration (#1748) by @gazorby + + Adding support for the [Starlite](https://starlite-api.github.io/starlite/1.48/) framework. Unhandled errors are captured. Performance spans for Starlite middleware are also captured. Thanks @gazorby for the great work! + + Usage: + + ```python + from starlite import Starlite, get + + import sentry_sdk + from sentry_sdk.integrations.starlite import StarliteIntegration + + sentry_sdk.init( + dsn="...", + traces_sample_rate=1.0, + integrations=[ + StarliteIntegration(), + ], + ) + + @get("/") + def hello_world() -> dict[str, str]: + """Keeping the tradition alive with hello world.""" + bla = 1/0 # causing an error + return {"hello": "world"} + + app = Starlite(route_handlers=[hello_world]) + ``` + +- Profiling: Remove sample buffer from profiler (#1791) by @Zylphrex +- Profiling: Performance tweaks to profile sampler (#1789) by @Zylphrex +- Add span for Django SimpleTemplateResponse rendering (#1818) by @chdsbd +- Use @wraps for Django Signal receivers (#1815) by @meanmail +- Add enqueued_at and started_at to rq job extra (#1024) by @kruvasyan +- Remove sanic v22 pin (#1819) by @sl0thentr0py +- Add support for `byterray` and `memoryview` built-in types (#1833) by @Tarty +- Handle `"rc"` in SQLAlchemy version. (#1812) by @peterschutt +- Doc: Use .venv (not .env) as a virtual env location in CONTRIBUTING.md (#1790) by @tonyo +- Auto publish to internal pypi on release (#1823) by @asottile-sentry +- Added Python 3.11 to test suite (#1795) by @antonpirker +- Update test/linting dependencies (#1801) by @antonpirker +- Deps: bump sphinx from 5.2.3 to 5.3.0 (#1686) by @dependabot + +## 1.12.1 + +### Various fixes & improvements + +- Link errors to OTel spans (#1787) by @antonpirker + +## 1.12.0 + +### Basic OTel support + +This adds support to automatically integrate OpenTelemetry performance tracing with Sentry. + +See the documentation on how to set it up: +https://docs.sentry.io/platforms/python/performance/instrumentation/opentelemetry/ + +Give it a try and let us know if you have any feedback or problems with using it. + +By: @antonpirker (#1772, #1766, #1765) + +### Various fixes & improvements + +- Tox Cleanup (#1749) by @antonpirker +- CI: Fix Github action checks (#1780) by @Zylphrex +- Profiling: Introduce active thread id on scope (#1764) by @Zylphrex +- Profiling: Eagerly hash stack for profiles (#1755) by @Zylphrex +- Profiling: Resolve inherited method class names (#1756) by @Zylphrex + +## 1.11.1 + +### Various fixes & improvements + +- Move set_transaction_name out of event processor in fastapi/starlette (#1751) by @sl0thentr0py +- Expose proxy_headers as top level config and use in ProxyManager: https://docs.sentry.io/platforms/python/configuration/options/#proxy-headers (#1746) by @sl0thentr0py + +## 1.11.0 + +### Various fixes & improvements + +- Fix signals problem on sentry.io (#1732) by @antonpirker +- Fix reading FastAPI request body twice. (#1724) by @antonpirker +- ref(profiling): Do not error if already setup (#1731) by @Zylphrex +- ref(profiling): Use sleep scheduler by default (#1729) by @Zylphrex +- feat(profiling): Extract more frame info (#1702) by @Zylphrex +- Update actions/upload-artifact to v3.1.1 (#1718) by @mattgauntseo-sentry +- Performance optimizations (#1725) by @antonpirker +- feat(pymongo): add PyMongo integration (#1590) by @Agalin +- Move relay to port 5333 to avoid collisions (#1716) by @sl0thentr0py +- fix(utils): strip_string() checks text length counting bytes not chars (#1711) by @mgaligniana +- chore: remove jira workflow (#1707) by @vladanpaunovic +- build(deps): bump checkouts/data-schemas from `a214fbc` to `20ff3b9` (#1703) by @dependabot +- perf(profiling): Tune the sample profile generation code for performance (#1694) by @Zylphrex + +## 1.10.1 + +### Various fixes & improvements + +- Bug fixes for FastAPI and Sentry SDK 1.10.0 (#1699) by @antonpirker +- The wrapped receive() did not return anything. (#1698) by @antonpirker + +## 1.10.0 + +### Various fixes & improvements + +- Unified naming for span ops (#1661) by @antonpirker + + We have unified the strings of our span operations. See https://develop.sentry.dev/sdk/performance/span-operations/ + + **WARNING**: If you have Sentry Dashboards or Sentry Discover queries that use `transaction.op` in their fields, conditions, aggregates or columns this change could potentially break your Dashboards/Discover setup. + Here is a list of the changes we made to the `op`s. Please adjust your dashboards and Discover queries accordingly: + + | Old operation (`op`) | New Operation (`op`) | + | ------------------------ | ---------------------- | + | `asgi.server` | `http.server` | + | `aws.request` | `http.client` | + | `aws.request.stream` | `http.client.stream` | + | `celery.submit` | `queue.submit.celery` | + | `celery.task` | `queue.task.celery` | + | `django.middleware` | `middleware.django` | + | `django.signals` | `event.django` | + | `django.template.render` | `template.render` | + | `django.view` | `view.render` | + | `http` | `http.client` | + | `redis` | `db.redis` | + | `rq.task` | `queue.task.rq` | + | `serverless.function` | `function.aws` | + | `serverless.function` | `function.gcp` | + | `starlette.middleware` | `middleware.starlette` | + +- Include framework in SDK name (#1662) by @antonpirker +- Asyncio integration (#1671) by @antonpirker +- Add exception handling to Asyncio Integration (#1695) by @antonpirker +- Fix asyncio task factory (#1689) by @antonpirker +- Have instrumentation for ASGI middleware receive/send callbacks. (#1673) by @antonpirker +- Use Django internal ASGI handling from Channels version 4.0.0. (#1688) by @antonpirker +- fix(integrations): Fix http putrequest when url is None (#1693) by @MattFlower +- build(deps): bump checkouts/data-schemas from `f0a57f2` to `a214fbc` (#1627) by @dependabot +- build(deps): bump flake8-bugbear from 22.9.11 to 22.9.23 (#1637) by @dependabot +- build(deps): bump sphinx from 5.1.1 to 5.2.3 (#1653) by @dependabot +- build(deps): bump actions/stale from 5 to 6 (#1638) by @dependabot +- build(deps): bump black from 22.8.0 to 22.10.0 (#1670) by @dependabot +- Remove unused node setup from ci. (#1681) by @antonpirker +- Check for Decimal is in_valid_sample_rate (#1672) by @Arvind2222 +- Add session for aiohttp integration (#1605) by @denys-pidlisnyi +- feat(profiling): Extract qualified name for each frame (#1669) by @Zylphrex +- feat(profiling): Attach thread metadata to profiles (#1660) by @Zylphrex +- ref(profiling): Rename profiling frame keys (#1680) by @Zylphrex +- fix(profiling): get_frame_name only look at arguments (#1684) by @Zylphrex +- fix(profiling): Need to sample profile correctly (#1679) by @Zylphrex +- fix(profiling): Race condition spawning multiple profiling threads (#1676) by @Zylphrex +- tests(profiling): Add basic profiling tests (#1677) by @Zylphrex +- tests(profiling): Add tests for thread schedulers (#1683) by @Zylphrex + +## 1.9.10 + +### Various fixes & improvements + +- Use content-length header in ASGI instead of reading request body (#1646, #1631, #1595, #1573) (#1649) by @antonpirker +- Added newer Celery versions to test suite (#1655) by @antonpirker +- Django 4.x support (#1632) by @antonpirker +- Cancel old CI runs when new one is started. (#1651) by @antonpirker +- Increase max string size for desc (#1647) by @k-fish +- Pin Sanic version for CI (#1650) by @antonpirker +- Fix for partial signals in old Django and old Python versions. (#1641) by @antonpirker +- Convert profile output to the sample format (#1611) by @phacops +- Dynamically adjust profiler sleep time (#1634) by @Zylphrex + +## 1.9.9 + +### Django update (ongoing) + +- Instrument Django Signals so they show up in "Performance" view (#1526) by @BeryJu +- include other Django enhancements brought up by the community + +### Various fixes & improvements + +- fix(profiling): Profiler mode type hints (#1633) by @Zylphrex +- New ASGIMiddleware tests (#1600) by @antonpirker +- build(deps): bump mypy from 0.961 to 0.971 (#1517) by @dependabot +- build(deps): bump black from 22.3.0 to 22.8.0 (#1596) by @dependabot +- build(deps): bump sphinx from 5.0.2 to 5.1.1 (#1524) by @dependabot +- ref: upgrade linters to flake8 5.x (#1610) by @asottile-sentry +- feat(profiling): Introduce different profiler schedulers (#1616) by @Zylphrex +- fix(profiling): Check transaction sampled status before profiling (#1624) by @Zylphrex +- Wrap Baggage ser/deser in capture_internal_exceptions (#1630) by @sl0thentr0py +- Faster Tests (DjangoCon) (#1602) by @antonpirker +- feat(profiling): Add support for profiles_sample_rate (#1613) by @Zylphrex +- feat(profiling): Support for multithreaded profiles (#1570) by @Zylphrex + +## 1.9.8 + +### Various fixes & improvements + +- Baggage creation for head of trace (#1589) by @sl0thentr0py + - The SDK now also generates new baggage entries for dynamic sampling when it is the first (head) SDK in the pipeline. + +## 1.9.7 + +### Various fixes & improvements + +- Let SentryAsgiMiddleware work with Starlette and FastAPI integrations (#1594) by @antonpirker + +**Note:** The last version 1.9.6 introduced a breaking change where projects that used Starlette or FastAPI +and had manually setup `SentryAsgiMiddleware` could not start. This versions fixes this behaviour. +With this version if you have a manual `SentryAsgiMiddleware` setup and are using Starlette or FastAPI +everything just works out of the box. + +Sorry for any inconveniences the last version might have brought to you. + +We can do better and in the future we will do our best to not break your code again. + +## 1.9.6 + +### Various fixes & improvements + +- Auto-enable Starlette and FastAPI (#1533) by @antonpirker +- Add more version constraints (#1574) by @isra17 +- Fix typo in starlette attribute check (#1566) by @sl0thentr0py + +## 1.9.5 + +### Various fixes & improvements + +- fix(redis): import redis pipeline using full path (#1565) by @olksdr +- Fix side effects for parallel tests (#1554) by @sl0thentr0py + +## 1.9.4 + +### Various fixes & improvements + +- Remove TRANSACTION_SOURCE_UNKNOWN and default to CUSTOM (#1558) by @sl0thentr0py +- feat(redis): Add instrumentation for redis pipeline (#1543) by @jjbayer +- Handle no release when uploading profiles (#1548) by @szokeasaurusrex + +## 1.9.3 + +### Various fixes & improvements + +- Wrap StarletteRequestExtractor in capture_internal_exceptions (#1551) by @sl0thentr0py + +## 1.9.2 + +### Various fixes & improvements + +- chore: remove quotes (#1545) by @vladanpaunovic + +## 1.9.1 + +### Various fixes & improvements + +- Fix FastAPI issues (#1532) ( #1514) (#1532) by @antonpirker +- Add deprecation warning for 3.4, 3.5 (#1541) by @sl0thentr0py +- Fast tests (#1504) by @antonpirker +- Replace Travis CI badge with GitHub Actions badge (#1538) by @153957 +- chore(deps): update urllib3 minimum version with environment markers (#1312) by @miketheman +- Update Flask and Quart integrations (#1520) by @pgjones +- chore: Remove ancient examples from tracing prototype (#1528) by @sl0thentr0py +- fix(django): Send correct "url" transaction source if Django resolver fails to resolve (#1525) by @sl0thentr0py + +## 1.9.0 + +### Various fixes & improvements + +- feat(profiler): Add experimental profiler under experiments.enable_profiling (#1481) by @szokeasaurusrex +- Fixed problem with broken response and python-multipart (#1516) by @antonpirker + +## 1.8.0 + +### Various fixes & improvements + +- feat(starlette): add Starlette integration (#1441) by @sl0thentr0py + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the Starlette integration. + Usage: + + ```python + from starlette.applications import Starlette + + from sentry_sdk.integrations.starlette import StarletteIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration()], + ) + + app = Starlette(debug=True, routes=[...]) + ``` + +- feat(fastapi): add FastAPI integration (#829) by @antonpirker + + **Important:** Remove manual usage of `SentryAsgiMiddleware`! This is now done by the FastAPI integration. + + Usage: + + ```python + from fastapi import FastAPI + + from sentry_sdk.integrations.starlette import StarletteIntegration + from sentry_sdk.integrations.fastapi import FastApiIntegration + + sentry_sdk.init( + dsn="...", + integrations=[StarletteIntegration(), FastApiIntegration()], + ) + + app = FastAPI() + ``` + + Yes, you have to add both, the `StarletteIntegration` **AND** the `FastApiIntegration`! + +- fix: avoid sending empty Baggage header (#1507) by @intgr +- fix: properly freeze Baggage object (#1508) by @intgr +- docs: fix simple typo, collecter | collector (#1505) by @timgates42 + +## 1.7.2 + +### Various fixes & improvements + +- feat(transactions): Transaction Source (#1490) by @antonpirker +- Removed (unused) sentry_timestamp header (#1494) by @antonpirker + +## 1.7.1 + +### Various fixes & improvements + +- Skip malformed baggage items (#1491) by @robyoung + +## 1.7.0 + +### Various fixes & improvements + +- feat(tracing): Dynamic Sampling Context / Baggage continuation (#1485) by @sl0thentr0py + + The SDK now propagates the [W3C Baggage Header](https://www.w3.org/TR/baggage/) from + incoming transactions to outgoing requests. + It also extracts Sentry specific [sampling information](https://develop.sentry.dev/sdk/performance/dynamic-sampling-context/) + and adds it to the transaction headers to enable Dynamic Sampling in the product. + +## 1.6.0 + +### Various fixes & improvements + +- Fix Deployment (#1474) by @antonpirker +- Serverless V2 (#1450) by @antonpirker +- Use logging levelno instead of levelname. Levelnames can be overridden (#1449) by @rrauenza + ## 1.5.12 ### Various fixes & improvements diff --git a/CONTRIBUTING-aws-lambda.md b/CONTRIBUTING-aws-lambda.md new file mode 100644 index 0000000000..7a6a158b45 --- /dev/null +++ b/CONTRIBUTING-aws-lambda.md @@ -0,0 +1,21 @@ +# Contributing to Sentry AWS Lambda Layer + +All the general terms of the [CONTRIBUTING.md](CONTRIBUTING.md) apply. + +## Development environment + +You need to have a AWS account and AWS CLI installed and setup. + +We put together two helper functions that can help you with development: + +- `./scripts/aws-deploy-local-layer.sh` + + This script [scripts/aws-deploy-local-layer.sh](scripts/aws-deploy-local-layer.sh) will take the code you have checked out locally, create a Lambda layer out of it and deploy it to the `eu-central-1` region of your configured AWS account using `aws` CLI. + + The Lambda layer will have the name `SentryPythonServerlessSDK-local-dev` + +- `./scripts/aws-attach-layer-to-lambda-function.sh` + + You can use this script [scripts/aws-attach-layer-to-lambda-function.sh](scripts/aws-attach-layer-to-lambda-function.sh) to attach the Lambda layer you just deployed (using the first script) onto one of your existing Lambda functions. You will have to give the name of the Lambda function to attach onto as an argument. (See the script for details.) + +With this two helper scripts it should be easy to rapidly iterate your development on the Lambda layer. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 48e9aacce2..05b642c502 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,60 +1,69 @@ # Contributing to Sentry SDK for Python -We welcome contributions to python-sentry by the community. See the [Contributing to Docs](https://docs.sentry.io/contributing/) page if you want to fix or update the documentation on the website. +We welcome contributions to `sentry-python` by the community. -## How to report a problem +This file outlines the process to contribute to the SDK itself. For contributing to the documentation, please see the [Contributing to Docs](https://docs.sentry.io/contributing/) page. -Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There is a ton of great people in our Discord community ready to help you! +## How to Report a Problem -If you feel that you can fix or implement it yourself, please read a few paragraphs below to learn how to submit your changes. +Please search the [issue tracker](https://github.com/getsentry/sentry-python/issues) before creating a new issue (a problem or an improvement request). Please also ask in our [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr) before submitting a new issue. There are a ton of great people in our Discord community ready to help you! -## Submitting changes +## Submitting Changes -- Setup the development environment. -- Clone sentry-python and prepare necessary changes. +- Fork the `sentry-python` repo and prepare your changes. - Add tests for your changes to `tests/`. - Run tests and make sure all of them pass. -- Submit a pull request, referencing any issues it addresses. +- Submit a pull request, referencing any issues your changes address. Please follow our [commit message format](https://develop.sentry.dev/commit-messages/#commit-message-format) when naming your pull request. -We will review your pull request as soon as possible. -Thank you for contributing! +We will review your pull request as soon as possible. Thank you for contributing! -## Development environment +## Development Environment -### Clone the repo: +### Set up Python -```bash -git clone git@github.com:getsentry/sentry-python.git -``` +Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. + +On macOS, we recommend using `brew` to install Python. For Windows, we recommend an official [python.org](https://www.python.org/downloads/) release. + +### Fork and Clone the Repo + +Before you can contribute, you will need to [fork the `sentry-python` repository](https://github.com/getsentry/sentry-python/fork). Then, clone the forked repository to your local development environment. -Make sure that you have Python 3 installed. Version 3.7 or higher is required to run style checkers on pre-commit. On macOS, we recommend using brew to install Python. For Windows, we recommend an official python.org release. +### Create a Virtual Environment -### Create a virtual environment: +To keep your Python development environment and packages separate from the ones +used by your operation system, create a [virtual environment](https://docs.python.org/3/tutorial/venv.html): ```bash cd sentry-python -python -m venv .env +python -m venv .venv +``` + +Then, activate your virtual environment with the following command. You will need to repeat this step every time you wish to work on your changes for `sentry-python`. -source .env/bin/activate +```bash +source .venv/bin/activate ``` -### Install `sentry-python` in editable mode +### Install `sentry-python` in Editable Mode + +Install `sentry-python` in [editable mode](https://pip.pypa.io/en/latest/topics/local-project-installs/#editable-installs). This will make any changes you make to the SDK code locally immediately effective without you having to reinstall or copy anything. ```bash pip install -e . ``` -**Hint:** Sometimes you need a sample project to run your new changes to sentry-python. In this case install the sample project in the same virtualenv and you should be good to go because the ` pip install -e .` from above installed your local sentry-python in editable mode. +**Hint:** Sometimes you need a sample project to run your new changes to `sentry-python`. In this case install the sample project in the same virtualenv and you should be good to go. -### Install coding style pre-commit hooks: +### Install Coding Style Pre-commit Hooks This will make sure that your commits will have the correct coding style. ```bash cd sentry-python -pip install -r linter-requirements.txt +pip install -r devenv-requirements.txt pip install pre-commit @@ -63,80 +72,84 @@ pre-commit install That's it. You should be ready to make changes, run tests, and make commits! If you experience any problems, please don't hesitate to ping us in our [Discord Community](https://discord.com/invite/Ww9hbqr). -## Running tests - -We have a `Makefile` to help people get started with hacking on the SDK -without having to know or understand the Python ecosystem. -Run `make` or `make help` to list commands. +## Running Tests -So the simplest way to run tests is: +You can run all tests with the following command: ```bash -cd sentry-python +pytest tests/ +``` -make test +If you would like to run the tests for a specific integration, use a command similar to the one below: + +```bash +pytest -rs tests/integrations/flask/ # Replace "flask" with the specific integration you wish to test ``` -This will use [Tox](https://tox.wiki/en/latest/) to run our whole test suite -under Python 2.7 and Python 3.7. +**Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests were skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) -Of course you can always run the underlying commands yourself, which is -particularly useful when wanting to provide arguments to `pytest` to run -specific tests: +## Adding a New Integration -```bash -cd sentry-python +1. Write the integration. + + - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. -# create virtual environment -python -m venv .env + - Everybody monkeypatches. That means: -# activate virtual environment -source .env/bin/activate + - Make sure to think about conflicts with other monkeypatches when monkeypatching. -# install sentry-python -pip install -e . + - You don't need to feel bad about it. -# install requirements -pip install -r test-requirements.txt + - Make sure your changes don't break end user contracts. The SDK should never alter the expected behavior of the underlying library or framework from the user's perspective and it shouldn't have any side effects. -# run tests -pytest tests/ -``` + - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. -If you want to run the tests for a specific integration you should do so by doing this: + - Allow the user to turn off the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). -```bash -pytest -rs tests/integrations/flask/ -``` +2. Write tests. + + - Consider the minimum versions supported, and test each version in a separate env in `tox.ini`. + + - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. + +3. Update package metadata. + + - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. + + Do not set upper bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. -**Hint:** Tests of integrations need additional dependencies. The switch `-rs` will show you why tests where skipped and what dependencies you need to install for the tests to run. (You can also consult the [tox.ini](tox.ini) file to see what dependencies are installed for each integration) +4. Write the [docs](https://github.com/getsentry/sentry-docs). Follow the structure of [existing integration docs](https://docs.sentry.io/platforms/python/integrations/). And, please **make sure to add your integration to the table in `python/integrations/index.md`** (people often forget this step 🙂). -## Releasing a new version +5. Merge docs after new version has been released. The docs are built and deployed after each merge, so your changes should go live in a few minutes. -(only relevant for Sentry employees) +6. (optional, if possible) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. This step will only apply to some integrations. -Prerequisites: +## Releasing a New Version -- All the changes that should be release must be in `master` branch. +_(only relevant for Sentry employees)_ + +### Prerequisites + +- All the changes that should be released must be on the `master` branch. - Every commit should follow the [Commit Message Format](https://develop.sentry.dev/commit-messages/#commit-message-format) convention. -- CHANGELOG.md is updated automatically. No human intervention necessary. +- CHANGELOG.md is updated automatically. No human intervention is necessary, but you might want to consider polishing the changelog by hand to make it more user friendly by grouping related things together, adding small code snippets and links to docs, etc. -Manual Process: +### Manual Process -- On GitHub in the `sentry-python` repository go to "Actions" select the "Release" workflow. -- Click on "Run workflow" on the right side, make sure the `master` branch is selected. -- Set "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) -- Click "Run Workflow" +- On GitHub in the `sentry-python` repository, go to "Actions" and select the "Release" workflow. +- Click on "Run workflow" on the right side, and make sure the `master` branch is selected. +- Set the "Version to release" input field. Here you decide if it is a major, minor or patch release. (See "Versioning Policy" below) +- Click "Run Workflow". -This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release)) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) +This will trigger [Craft](https://github.com/getsentry/craft) to prepare everything needed for a release. (For more information, see [craft prepare](https://github.com/getsentry/craft#craft-prepare-preparing-a-new-release).) At the end of this process a release issue is created in the [Publish](https://github.com/getsentry/publish) repository. (Example release issue: https://github.com/getsentry/publish/issues/815) -Now one of the persons with release privileges (most probably your engineering manager) will review this Issue and then add the `accepted` label to the issue. +Now one of the persons with release privileges (most probably your engineering manager) will review this issue and then add the `accepted` label to the issue. There are always two persons involved in a release. -If you are in a hurry and the release should be out immediatly there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediatly. +If you are in a hurry and the release should be out immediately, there is a Slack channel called `#proj-release-approval` where you can see your release issue and where you can ping people to please have a look immediately. -When the release issue is labeled `accepted` [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information). At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! +When the release issue is labeled `accepted`, [Craft](https://github.com/getsentry/craft) is triggered again to publish the release to all the right platforms. (See [craft publish](https://github.com/getsentry/craft#craft-publish-publishing-the-release) for more information.) At the end of this process the release issue on GitHub will be closed and the release is completed! Congratulations! There is a sequence diagram visualizing all this in the [README.md](https://github.com/getsentry/publish) of the `Publish` repository. @@ -159,53 +172,3 @@ sentry-sdk==1.5.0 ``` A major release `N` implies the previous release `N-1` will no longer receive updates. We generally do not backport bugfixes to older versions unless they are security relevant. However, feel free to ask for backports of specific commits on the bugtracker. - -## Adding a new integration (checklist) - -1. Write the integration. - - - Instrument all application instances by default. Prefer global signals/patches instead of configuring a specific instance. Don't make the user pass anything to your integration for anything to work. Aim for zero configuration. - - - Everybody monkeypatches. That means: - - - Make sure to think about conflicts with other monkeypatches when monkeypatching. - - - You don't need to feel bad about it. - - - Avoid modifying the hub, registering a new client or the like. The user drives the client, and the client owns integrations. - - - Allow the user to disable the integration by changing the client. Check `Hub.current.get_integration(MyIntegration)` from within your signal handlers to see if your integration is still active before you do anything impactful (such as sending an event). - -2. Write tests. - - - Think about the minimum versions supported, and test each version in a separate env in `tox.ini`. - - - Create a new folder in `tests/integrations/`, with an `__init__` file that skips the entire suite if the package is not installed. - -3. Update package metadata. - - - We use `extras_require` in `setup.py` to communicate minimum version requirements for integrations. People can use this in combination with tools like Poetry or Pipenv to detect conflicts between our supported versions and their used versions programmatically. - - Do not set upper-bounds on version requirements as people are often faster in adopting new versions of a web framework than we are in adding them to the test matrix or our package metadata. - -4. Write the [docs](https://github.com/getsentry/sentry-docs). Answer the following questions: - - - What does your integration do? Split in two sections: Executive summary at top and exact behavior further down. - - - Which version of the SDK supports which versions of the modules it hooks into? - - - One code example with basic setup. - - - Make sure to add integration page to `python/index.md` (people forget to do that all the time). - -Tip: Put most relevant parts wrapped in `..` tags for usage from within the Sentry UI. - -5. Merge docs after new version has been released (auto-deploys on merge). - -6. (optional) Update data in [`sdk_updates.py`](https://github.com/getsentry/sentry/blob/master/src/sentry/sdk_updates.py) to give users in-app suggestions to use your integration. May not be applicable or doable for all kinds of integrations. - -## Commit message format guidelines - -See the documentation on commit messages here: - -https://develop.sentry.dev/commit-messages/#commit-message-format diff --git a/LICENSE b/LICENSE index 61555f192e..016323bd8d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,9 +1,21 @@ -Copyright (c) 2018 Sentry (https://sentry.io) and individual contributors. -All rights reserved. +MIT License -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: +Copyright (c) 2018 Functional Software, Inc. dba Sentry -* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile index 577dd58740..ac0ef51f5f 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ help: @echo "make test: Run basic tests (not testing most integrations)" @echo "make test-all: Run ALL tests (slow, closest to CI)" @echo "make format: Run code formatters (destructive)" - @echo "make aws-lambda-layer-build: Build serverless ZIP dist package" + @echo "make aws-lambda-layer: Build AWS Lambda layer directory for serverless integration" @echo @echo "Also make sure to read ./CONTRIBUTING.md" @false @@ -19,9 +19,9 @@ help: $(VENV_PATH)/bin/pip install tox dist: .venv - rm -rf dist build + rm -rf dist dist-serverless build + $(VENV_PATH)/bin/pip install wheel setuptools $(VENV_PATH)/bin/python setup.py sdist bdist_wheel - .PHONY: dist format: .venv @@ -30,7 +30,7 @@ format: .venv .PHONY: format test: .venv - @$(VENV_PATH)/bin/tox -e py2.7,py3.7 + @$(VENV_PATH)/bin/tox -e py3.9 .PHONY: test test-all: .venv @@ -46,13 +46,13 @@ lint: .venv echo "Bad formatting? Run: make format"; \ echo "================================"; \ false) - .PHONY: lint apidocs: .venv @$(VENV_PATH)/bin/pip install --editable . @$(VENV_PATH)/bin/pip install -U -r ./docs-requirements.txt - @$(VENV_PATH)/bin/sphinx-build -W -b html docs/ docs/_build + rm -rf docs/_build + @$(VENV_PATH)/bin/sphinx-build -vv -W -b html docs/ docs/_build .PHONY: apidocs apidocs-hotfix: apidocs @@ -60,8 +60,7 @@ apidocs-hotfix: apidocs @$(VENV_PATH)/bin/ghp-import -pf docs/_build .PHONY: apidocs-hotfix -aws-lambda-layer-build: dist - $(VENV_PATH)/bin/pip install urllib3 - $(VENV_PATH)/bin/pip install certifi - $(VENV_PATH)/bin/python -m scripts.build_awslambda_layer -.PHONY: aws-lambda-layer-build +aws-lambda-layer: dist + $(VENV_PATH)/bin/pip install -r aws-lambda-layer-requirements.txt + $(VENV_PATH)/bin/python -m scripts.build_aws_lambda_layer +.PHONY: aws-lambda-layer diff --git a/README.md b/README.md index 4871fdb2f4..e9d661eee8 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ _Bad software is everywhere, and we're tired of it. Sentry is on a mission to he # Official Sentry SDK for Python -[![Build Status](https://travis-ci.com/getsentry/sentry-python.svg?branch=master)](https://travis-ci.com/getsentry/sentry-python) +[![Build Status](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml/badge.svg)](https://github.com/getsentry/sentry-python/actions/workflows/ci.yml) [![PyPi page link -- version](https://img.shields.io/pypi/v/sentry-sdk.svg)](https://pypi.python.org/pypi/sentry-sdk) [![Discord](https://img.shields.io/discord/621778831602221064)](https://discord.gg/cWnMQeA) @@ -34,7 +34,6 @@ sentry_sdk.init( # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. - # We recommend adjusting this value in production. traces_sample_rate=1.0, ) ``` @@ -48,37 +47,36 @@ capture_message("Hello World") # Will create an event in Sentry. raise ValueError() # Will also create an event in Sentry. ``` -- To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/) -- Are you coming from raven-python? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/) -- To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/) +- To learn more about how to use the SDK [refer to our docs](https://docs.sentry.io/platforms/python/). +- Are you coming from `raven-python`? [Use this migration guide](https://docs.sentry.io/platforms/python/migration/). +- To learn about internals use the [API Reference](https://getsentry.github.io/sentry-python/). ## Integrations -(If you want to create a new integration have a look at the [Adding a new integration checklist](CONTRIBUTING.md#adding-a-new-integration-checklist).) - -- [Django](https://docs.sentry.io/platforms/python/guides/django/) -- [Flask](https://docs.sentry.io/platforms/python/guides/flask/) -- [Bottle](https://docs.sentry.io/platforms/python/guides/bottle/) -- [AWS Lambda](https://docs.sentry.io/platforms/python/guides/aws-lambda/) -- [Google Cloud Functions](https://docs.sentry.io/platforms/python/guides/gcp-functions/) -- [WSGI](https://docs.sentry.io/platforms/python/guides/wsgi/) -- [ASGI](https://docs.sentry.io/platforms/python/guides/asgi/) -- [AIOHTTP](https://docs.sentry.io/platforms/python/guides/aiohttp/) -- [RQ (Redis Queue)](https://docs.sentry.io/platforms/python/guides/rq/) -- [Celery](https://docs.sentry.io/platforms/python/guides/celery/) -- [Chalice](https://docs.sentry.io/platforms/python/guides/chalice/) -- [Falcon](https://docs.sentry.io/platforms/python/guides/falcon/) -- [Quart](https://docs.sentry.io/platforms/python/guides/quart/) -- [Sanic](https://docs.sentry.io/platforms/python/guides/sanic/) -- [Tornado](https://docs.sentry.io/platforms/python/guides/tornado/) -- [Tryton](https://docs.sentry.io/platforms/python/guides/tryton/) -- [Pyramid](https://docs.sentry.io/platforms/python/guides/pyramid/) -- [Logging](https://docs.sentry.io/platforms/python/guides/logging/) -- [Apache Airflow](https://docs.sentry.io/platforms/python/guides/airflow/) -- [Apache Beam](https://docs.sentry.io/platforms/python/guides/beam/) -- [Apache Spark](https://docs.sentry.io/platforms/python/guides/pyspark/) - -## Migrate From sentry-raven +(If you want to create a new integration, have a look at the [Adding a new integration checklist](https://github.com/getsentry/sentry-python/blob/master/CONTRIBUTING.md#adding-a-new-integration).) + +See [the documentation](https://docs.sentry.io/platforms/python/integrations/) for an up-to-date list of libraries and frameworks we support. Here are some examples: + +- [Django](https://docs.sentry.io/platforms/python/integrations/django/) +- [Flask](https://docs.sentry.io/platforms/python/integrations/flask/) +- [FastAPI](https://docs.sentry.io/platforms/python/integrations/fastapi/) +- [AIOHTTP](https://docs.sentry.io/platforms/python/integrations/aiohttp/) +- [SQLAlchemy](https://docs.sentry.io/platforms/python/integrations/sqlalchemy/) +- [asyncpg](https://docs.sentry.io/platforms/python/integrations/asyncpg/) +- [Redis](https://docs.sentry.io/platforms/python/integrations/redis/) +- [Celery](https://docs.sentry.io/platforms/python/integrations/celery/) +- [Apache Airflow](https://docs.sentry.io/platforms/python/integrations/airflow/) +- [Apache Spark](https://docs.sentry.io/platforms/python/integrations/pyspark/) +- [asyncio](https://docs.sentry.io/platforms/python/integrations/asyncio/) +- [Graphene](https://docs.sentry.io/platforms/python/integrations/graphene/) +- [Logging](https://docs.sentry.io/platforms/python/integrations/logging/) +- [Loguru](https://docs.sentry.io/platforms/python/integrations/loguru/) +- [HTTPX](https://docs.sentry.io/platforms/python/integrations/httpx/) +- [AWS Lambda](https://docs.sentry.io/platforms/python/integrations/aws-lambda/) +- [Google Cloud Functions](https://docs.sentry.io/platforms/python/integrations/gcp-functions/) + + +## Migrating From `raven-python` The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python). @@ -88,7 +86,7 @@ If you're using `raven-python`, we recommend you to migrate to this new SDK. You Please refer to [CONTRIBUTING.md](CONTRIBUTING.md). -## Getting help/support +## Getting Help/Support If you need help setting up or configuring the Python SDK (or anything else in the Sentry universe) please head over to the [Sentry Community on Discord](https://discord.com/invite/Ww9hbqr). There is a ton of great people in our Discord community ready to help you! @@ -102,4 +100,4 @@ If you need help setting up or configuring the Python SDK (or anything else in t ## License -Licensed under the BSD license, see [`LICENSE`](LICENSE) +Licensed under the MIT license, see [`LICENSE`](LICENSE) diff --git a/aws-lambda-layer-requirements.txt b/aws-lambda-layer-requirements.txt new file mode 100644 index 0000000000..8986fdafc0 --- /dev/null +++ b/aws-lambda-layer-requirements.txt @@ -0,0 +1,7 @@ +certifi + +# In Lambda functions botocore is used, and botocore is not +# yet supporting urllib3 1.27.0 never mind 2+. +# So we pin this here to make our Lambda layer work with +# Lambda Function using Python 3.7+ +urllib3<1.27 diff --git a/checkouts/data-schemas b/checkouts/data-schemas index f0a57f23cf..1e17eb5472 160000 --- a/checkouts/data-schemas +++ b/checkouts/data-schemas @@ -1 +1 @@ -Subproject commit f0a57f23cf04d0b4b1e19e1398d9712b09759911 +Subproject commit 1e17eb54727a77681a1b9e845c9a5d55b52d35a1 diff --git a/codecov.yml b/codecov.yml index 1989f1cd03..6e4467b675 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,9 +1,13 @@ +comment: false coverage: status: project: - default: false - patch: - default: false - python: - target: 90% -comment: false + default: + target: auto # auto compares coverage to the previous base commit + threshold: 10% # this allows a 10% drop from the previous base commit coverage + informational: true +ignore: + - "tests" + - "sentry_sdk/_types.py" +github_checks: + annotations: false \ No newline at end of file diff --git a/devenv-requirements.txt b/devenv-requirements.txt new file mode 100644 index 0000000000..2b7abae3c2 --- /dev/null +++ b/devenv-requirements.txt @@ -0,0 +1,5 @@ +-r linter-requirements.txt +-r test-requirements.txt +mockupdb # required by `pymongo` tests that are enabled by `pymongo` from linter requirements +pytest<7.0.0 # https://github.com/pytest-dev/pytest/issues/9621; see tox.ini +pytest-asyncio<=0.21.1 # https://github.com/pytest-dev/pytest-asyncio/issues/706 diff --git a/docs-requirements.txt b/docs-requirements.txt index f80c689cbf..a4bb031506 100644 --- a/docs-requirements.txt +++ b/docs-requirements.txt @@ -1,4 +1,4 @@ -sphinx==4.5.0 -sphinx-rtd-theme +shibuya +sphinx==7.2.6 sphinx-autodoc-typehints[type_comments]>=1.8.0 typing-extensions diff --git a/docs/api.rst b/docs/api.rst index 01bef3ee12..f504bbb642 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -1,9 +1,53 @@ -======== -Main API -======== +============= +Top Level API +============= -.. inherited-members necessary because of hack for Client and init methods +This is the user facing API of the SDK. It's exposed as ``sentry_sdk``. +With this API you can implement a custom performance monitoring or error reporting solution. -.. automodule:: sentry_sdk - :members: - :inherited-members: + +Capturing Data +============== + +.. autofunction:: sentry_sdk.api.capture_event +.. autofunction:: sentry_sdk.api.capture_exception +.. autofunction:: sentry_sdk.api.capture_message + + +Enriching Events +================ + +.. autofunction:: sentry_sdk.api.add_breadcrumb +.. autofunction:: sentry_sdk.api.set_context +.. autofunction:: sentry_sdk.api.set_extra +.. autofunction:: sentry_sdk.api.set_level +.. autofunction:: sentry_sdk.api.set_tag +.. autofunction:: sentry_sdk.api.set_user + + +Performance Monitoring +====================== + +.. autofunction:: sentry_sdk.api.continue_trace +.. autofunction:: sentry_sdk.api.get_current_span +.. autofunction:: sentry_sdk.api.start_span +.. autofunction:: sentry_sdk.api.start_transaction + + +Distributed Tracing +=================== + +.. autofunction:: sentry_sdk.api.get_baggage +.. autofunction:: sentry_sdk.api.get_traceparent + + +Managing Scope (advanced) +========================= + +.. autofunction:: sentry_sdk.api.configure_scope +.. autofunction:: sentry_sdk.api.push_scope + + +.. Not documented (On purpose. Not sure if anyone should use those) +.. last_event_id() +.. flush() diff --git a/docs/apidocs.rst b/docs/apidocs.rst new file mode 100644 index 0000000000..855778484d --- /dev/null +++ b/docs/apidocs.rst @@ -0,0 +1,48 @@ +======== +API Docs +======== + +.. autoclass:: sentry_sdk.Hub + :members: + +.. autoclass:: sentry_sdk.Scope + :members: + +.. autoclass:: sentry_sdk.Client + :members: + +.. autoclass:: sentry_sdk.client._Client + :members: + +.. autoclass:: sentry_sdk.Transport + :members: + +.. autoclass:: sentry_sdk.HttpTransport + :members: + +.. autoclass:: sentry_sdk.tracing.Transaction + :members: + +.. autoclass:: sentry_sdk.tracing.Span + :members: + +.. autoclass:: sentry_sdk.profiler.Profile + :members: + +.. autoclass:: sentry_sdk.session.Session + :members: + +.. autoclass:: sentry_sdk.attachments.Attachment + :members: + +.. autoclass:: sentry_sdk.scrubber.EventScrubber + :members: + +.. autoclass:: sentry_sdk.monitor.Monitor + :members: + +.. autoclass:: sentry_sdk.envelope.Envelope + :members: + +.. autoclass:: sentry_sdk.envelope.Item + :members: diff --git a/docs/conf.py b/docs/conf.py index e6ceb8d4c9..3d54f9f8af 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,15 +2,16 @@ import os import sys - import typing +from datetime import datetime # prevent circular imports import sphinx.builders.html import sphinx.builders.latex import sphinx.builders.texinfo import sphinx.builders.text -import sphinx.ext.autodoc +import sphinx.ext.autodoc # noqa: F401 +import urllib3.exceptions # noqa: F401 typing.TYPE_CHECKING = True @@ -25,11 +26,11 @@ # -- Project information ----------------------------------------------------- -project = u"sentry-python" -copyright = u"2019, Sentry Team and Contributors" -author = u"Sentry Team and Contributors" +project = "sentry-python" +copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year) +author = "Sentry Team and Contributors" -release = "1.5.12" +release = "1.45.1" version = ".".join(release.split(".")[:2]) # The short X.Y version. @@ -67,12 +68,12 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [u"_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. pygments_style = None @@ -86,13 +87,15 @@ on_rtd = os.environ.get("READTHEDOCS", None) == "True" -html_theme = "alabaster" +html_theme = "shibuya" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} +html_theme_options = { + "github_url": "https://github.com/getsentry/sentry-python", +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -140,8 +143,8 @@ ( master_doc, "sentry-python.tex", - u"sentry-python Documentation", - u"Sentry Team and Contributors", + "sentry-python Documentation", + "Sentry Team and Contributors", "manual", ) ] @@ -151,7 +154,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "sentry-python", u"sentry-python Documentation", [author], 1)] +man_pages = [(master_doc, "sentry-python", "sentry-python Documentation", [author], 1)] # -- Options for Texinfo output ---------------------------------------------- @@ -163,10 +166,10 @@ ( master_doc, "sentry-python", - u"sentry-python Documentation", + "sentry-python Documentation", author, "sentry-python", - "One line description of project.", + "The official Sentry SDK for Python.", "Miscellaneous", ) ] diff --git a/docs/index.rst b/docs/index.rst index ade1dc0da8..12668a2825 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -9,3 +9,4 @@ visit the `GitHub repository `_. .. toctree:: api integrations + apidocs diff --git a/docs/integrations.rst b/docs/integrations.rst index a04d99d660..fddf7d038a 100644 --- a/docs/integrations.rst +++ b/docs/integrations.rst @@ -2,6 +2,8 @@ Integrations ============ +TBD + Logging ======= diff --git a/examples/basic.py b/examples/basic.py deleted file mode 100644 index e6d928bbed..0000000000 --- a/examples/basic.py +++ /dev/null @@ -1,35 +0,0 @@ -import sentry_sdk -from sentry_sdk.integrations.excepthook import ExcepthookIntegration -from sentry_sdk.integrations.atexit import AtexitIntegration -from sentry_sdk.integrations.dedupe import DedupeIntegration -from sentry_sdk.integrations.stdlib import StdlibIntegration - - -sentry_sdk.init( - dsn="https://@sentry.io/", - default_integrations=False, - integrations=[ - ExcepthookIntegration(), - AtexitIntegration(), - DedupeIntegration(), - StdlibIntegration(), - ], - environment="Production", - release="1.0.0", - send_default_pii=False, - max_breadcrumbs=5, -) - -with sentry_sdk.push_scope() as scope: - scope.user = {"email": "john.doe@example.com"} - scope.set_tag("page_locale", "de-at") - scope.set_extra("request", {"id": "d5cf8a0fd85c494b9c6453c4fba8ab17"}) - scope.level = "warning" - sentry_sdk.capture_message("Something went wrong!") - -sentry_sdk.add_breadcrumb(category="auth", message="Authenticated user", level="info") - -try: - 1 / 0 -except Exception as e: - sentry_sdk.capture_exception(e) diff --git a/examples/tracing/README.md b/examples/tracing/README.md deleted file mode 100644 index ae7b79724a..0000000000 --- a/examples/tracing/README.md +++ /dev/null @@ -1,14 +0,0 @@ -To run this app: - -1. Have a Redis on the Redis default port (if you have Sentry running locally, - you probably already have this) -2. `pip install sentry-sdk flask rq` -3. `FLASK_APP=tracing flask run` -4. `FLASK_APP=tracing flask worker` -5. Go to `http://localhost:5000/` and enter a base64-encoded string (one is prefilled) -6. Hit submit, wait for heavy computation to end -7. `cat events | python traceviewer.py | dot -T svg > events.svg` -8. `open events.svg` - -The last two steps are for viewing the traces. Nothing gets sent to Sentry -right now because Sentry does not deal with this data yet. diff --git a/examples/tracing/events b/examples/tracing/events deleted file mode 100644 index 4e486f79a4..0000000000 --- a/examples/tracing/events +++ /dev/null @@ -1,10 +0,0 @@ -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "968cff94913ebb07"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "Upgrade-Insecure-Requests": "1", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "f9f4b21dd9da4c389426c1ffd2b62410", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "8eb30d5ae5f3403ba3a036e696111ec3", "span_id": "97e894108ff7a8cd"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1c71c7cb32934550bb49f05b6c2d4052", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "index", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "b7627895a90b41718be82d3ad21ab2f4", "span_id": "9fa95b4ffdcbe177"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive", "Pragma": "no-cache", "Cache-Control": "no-cache"}}, "event_id": "1430ad5b0a0d45dca3f02c10271628f9", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:38Z", "transaction": "static", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"trace_id": "1636fdb33db84e7c9a4e606c1b176971", "span_id": "b682a29ead55075f"}}, "timestamp": "2019-06-14T14:01:38Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/static/tracing.js.map", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0", "Connection": "keep-alive"}}, "event_id": "72b1224307294e0fb6d6b1958076c4cc", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "compute", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "946edde6ee421874"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/compute/aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c72fd945c1174140a00bdbf6f6ed8fc5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "bf5be759039ede9a"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "e8c17b0cbe2045758aaffc2f11672fab", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "b2d56249f7fdf327"}}, "timestamp": "2019-06-14T14:01:40Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "6577f8056383427d85df5b33bf9ccc2c", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "ac62ff8ae1b2eda6"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "c03dfbab8a8145eeaa0d1a1adfcfcaa5", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:40Z", "transaction": "tracing.decode_base64", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "worker"], "rq-job": {"kwargs": {"redis_key": "sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "encoded": "aGVsbG8gd29ybGQK"}, "args": [], "description": "tracing.decode_base64(encoded=u'aGVsbG8gd29ybGQK', redis_key='sentry-python-tracing-example-result:aGVsbG8gd29ybGQK')", "func": "tracing.decode_base64", "job_id": "fabff810-3dbb-45d3-987e-86395790dfa9"}}, "contexts": {"trace": {"parent_span_id": "946edde6ee421874", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9c2a6db8c79068a2"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "event_id": "2975518984734ef49d2f75db4e928ddc", "platform": "python", "spans": [{"start_timestamp": "2019-06-14T14:01:41Z", "same_process_as_parent": true, "description": "http://httpbin.org/base64/aGVsbG8gd29ybGQK GET", "tags": {"http.status_code": 200, "error": false}, "timestamp": "2019-06-14T14:01:41Z", "parent_span_id": "9c2a6db8c79068a2", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "op": "http", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "span_id": "8c931f4740435fb8"}], "breadcrumbs": [{"category": "httplib", "data": {"url": "http://httpbin.org/base64/aGVsbG8gd29ybGQK", "status_code": 200, "reason": "OK", "method": "GET"}, "type": "http", "timestamp": "2019-06-14T12:01:41Z"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "\u001b[32mdefault\u001b[39;49;00m: \u001b[34mJob OK\u001b[39;49;00m (fabff810-3dbb-45d3-987e-86395790dfa9)", "type": "default"}, {"category": "rq.worker", "type": "log", "timestamp": "2019-06-14T14:01:41Z", "level": "info", "data": {"asctime": "14:01:41"}, "message": "Result is kept for 500 seconds", "type": "default"}], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} -{"start_timestamp": "2019-06-14T14:01:41Z", "transaction": "wait", "server_name": "apfeltasche.local", "extra": {"sys.argv": ["/Users/untitaker/projects/sentry-python/.venv/bin/flask", "run", "--reload"]}, "contexts": {"trace": {"parent_span_id": "bce14471e0e9654d", "trace_id": "a0fa8803753e40fd8124b21eeb2986b5", "span_id": "9d91c6558b2e4c06"}}, "timestamp": "2019-06-14T14:01:41Z", "modules": {"more-itertools": "5.0.0", "six": "1.12.0", "funcsigs": "1.0.2", "vine": "1.2.0", "tqdm": "4.31.1", "configparser": "3.7.4", "py-cpuinfo": "5.0.0", "pygments": "2.3.1", "attrs": "19.1.0", "pip": "19.0.3", "blinker": "1.4", "parso": "0.4.0", "django": "1.11.20", "click": "7.0", "requests-toolbelt": "0.9.1", "virtualenv": "16.4.3", "autoflake": "1.3", "tox": "3.7.0", "statistics": "1.0.3.5", "rq": "1.0", "flask": "1.0.2", "pkginfo": "1.5.0.1", "py": "1.8.0", "redis": "3.2.1", "celery": "4.2.1", "docutils": "0.14", "jedi": "0.13.3", "pytest": "4.4.1", "kombu": "4.4.0", "werkzeug": "0.14.1", "webencodings": "0.5.1", "toml": "0.10.0", "itsdangerous": "1.1.0", "certifi": "2019.3.9", "readme-renderer": "24.0", "wheel": "0.33.1", "pathlib2": "2.3.3", "python": "2.7.15", "urllib3": "1.24.1", "sentry-sdk": "0.9.0", "twine": "1.13.0", "pytest-benchmark": "3.2.2", "markupsafe": "1.1.1", "billiard": "3.5.0.5", "jinja2": "2.10", "coverage": "4.5.3", "bleach": "3.1.0", "pluggy": "0.9.0", "atomicwrites": "1.3.0", "filelock": "3.0.10", "pyflakes": "2.1.1", "pytz": "2018.9", "futures": "3.2.0", "pytest-cov": "2.7.1", "backports.functools-lru-cache": "1.5", "wsgiref": "0.1.2", "python-jsonrpc-server": "0.1.2", "python-language-server": "0.26.1", "future": "0.17.1", "chardet": "3.0.4", "amqp": "2.4.2", "setuptools": "40.8.0", "requests": "2.21.0", "idna": "2.8", "scandir": "1.10.0"}, "request": {"url": "http://127.0.0.1:5000/wait/sentry-python-tracing-example-result:aGVsbG8gd29ybGQK", "query_string": "", "method": "GET", "env": {"SERVER_NAME": "127.0.0.1", "SERVER_PORT": "5000"}, "headers": {"Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Host": "127.0.0.1:5000", "Accept": "*/*", "Sentry-Trace": "00-a0fa8803753e40fd8124b21eeb2986b5-bce14471e0e9654d-00", "Connection": "keep-alive", "Referer": "http://127.0.0.1:5000/", "Pragma": "no-cache", "Cache-Control": "no-cache", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0"}}, "event_id": "339cfc84adf0405986514c808afb0f68", "platform": "python", "spans": [], "breadcrumbs": [], "type": "transaction", "sdk": {"version": "0.9.0", "name": "sentry.python", "packages": [{"version": "0.9.0", "name": "pypi:sentry-sdk"}], "integrations": ["argv", "atexit", "dedupe", "excepthook", "flask", "logging", "modules", "rq", "stdlib", "threading"]}} diff --git a/examples/tracing/events.svg b/examples/tracing/events.svg deleted file mode 100644 index 33f9c98f00..0000000000 --- a/examples/tracing/events.svg +++ /dev/null @@ -1,439 +0,0 @@ - - - - - - -mytrace - - - -213977312221895837199412816265326724789 - -trace:index (a0fa8803753e40fd8124b21eeb2986b5) - - - -10848326615985732359 - -span:index (968cff94913ebb07) - - - -213977312221895837199412816265326724789->10848326615985732359 - - - - - -10695730148961032308 - -span:compute (946edde6ee421874) - - - -213977312221895837199412816265326724789->10695730148961032308 - - - - - -13788869053623754394 - -span:wait (bf5be759039ede9a) - - - -213977312221895837199412816265326724789->13788869053623754394 - - - - - -12886313978623292199 - -span:wait (b2d56249f7fdf327) - - - -213977312221895837199412816265326724789->12886313978623292199 - - - - - -12421771694198418854 - -span:wait (ac62ff8ae1b2eda6) - - - -213977312221895837199412816265326724789->12421771694198418854 - - - - - -10129474377767673784 - -span:http://httpbin.org/base64/aGVsbG8gd29ybGQK GET (8c931f4740435fb8) - - - -213977312221895837199412816265326724789->10129474377767673784 - - - - - -11252927259328145570 - -span:tracing.decode_base64 (9c2a6db8c79068a2) - - - -213977312221895837199412816265326724789->11252927259328145570 - - - - - -11354074206287318022 - -span:wait (9d91c6558b2e4c06) - - - -213977312221895837199412816265326724789->11354074206287318022 - - - - - -189680067412161401408211119957991300803 - -trace:static (8eb30d5ae5f3403ba3a036e696111ec3) - - - -10946161693179750605 - -span:static (97e894108ff7a8cd) - - - -189680067412161401408211119957991300803->10946161693179750605 - - - - - -243760014067241244567037757667822711540 - -trace:index (b7627895a90b41718be82d3ad21ab2f4) - - - -11504827122213183863 - -span:index (9fa95b4ffdcbe177) - - - -243760014067241244567037757667822711540->11504827122213183863 - - - - - -29528545588201242414770090507008174449 - -trace:static (1636fdb33db84e7c9a4e606c1b176971) - - - -13151252664271832927 - -span:static (b682a29ead55075f) - - - -29528545588201242414770090507008174449->13151252664271832927 - - - - - -10695730148961032308->10848326615985732359 - - - - - -10695730148961032308->10946161693179750605 - - - - - -10695730148961032308->11504827122213183863 - - - - - -10695730148961032308->13151252664271832927 - - - - - -10695730148961032308->11252927259328145570 - - - - - -13610234804785734989 - -13610234804785734989 - - - -13610234804785734989->10695730148961032308 - - - - - -13610234804785734989->13788869053623754394 - - - - - -13610234804785734989->12886313978623292199 - - - - - -13610234804785734989->12421771694198418854 - - - - - -13610234804785734989->11354074206287318022 - - - - - -13788869053623754394->10848326615985732359 - - - - - -13788869053623754394->10946161693179750605 - - - - - -13788869053623754394->11504827122213183863 - - - - - -13788869053623754394->13151252664271832927 - - - - - -12886313978623292199->10848326615985732359 - - - - - -12886313978623292199->10946161693179750605 - - - - - -12886313978623292199->11504827122213183863 - - - - - -12886313978623292199->13151252664271832927 - - - - - -12421771694198418854->10848326615985732359 - - - - - -12421771694198418854->10946161693179750605 - - - - - -12421771694198418854->11504827122213183863 - - - - - -12421771694198418854->13151252664271832927 - - - - - -12421771694198418854->10695730148961032308 - - - - - -12421771694198418854->13788869053623754394 - - - - - -12421771694198418854->12886313978623292199 - - - - - -10129474377767673784->10848326615985732359 - - - - - -10129474377767673784->10946161693179750605 - - - - - -10129474377767673784->11504827122213183863 - - - - - -10129474377767673784->13151252664271832927 - - - - - -10129474377767673784->10695730148961032308 - - - - - -10129474377767673784->13788869053623754394 - - - - - -10129474377767673784->12886313978623292199 - - - - - -11252927259328145570->10848326615985732359 - - - - - -11252927259328145570->10946161693179750605 - - - - - -11252927259328145570->11504827122213183863 - - - - - -11252927259328145570->13151252664271832927 - - - - - -11252927259328145570->10129474377767673784 - - - - - -11354074206287318022->10848326615985732359 - - - - - -11354074206287318022->10946161693179750605 - - - - - -11354074206287318022->11504827122213183863 - - - - - -11354074206287318022->13151252664271832927 - - - - - -11354074206287318022->10695730148961032308 - - - - - -11354074206287318022->13788869053623754394 - - - - - -11354074206287318022->12886313978623292199 - - - - - diff --git a/examples/tracing/static/tracing.js b/examples/tracing/static/tracing.js deleted file mode 100644 index ad4dc9a822..0000000000 --- a/examples/tracing/static/tracing.js +++ /dev/null @@ -1,519 +0,0 @@ -(function (__window) { -var exports = {}; -Object.defineProperty(exports, '__esModule', { value: true }); - -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. All rights reserved. -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this file except in compliance with the License. You may obtain a copy of the -License at http://www.apache.org/licenses/LICENSE-2.0 - -THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED -WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, -MERCHANTABLITY OR NON-INFRINGEMENT. - -See the Apache Version 2.0 License for specific language governing permissions -and limitations under the License. -***************************************************************************** */ -/* global Reflect, Promise */ - -var extendStatics = function(d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); -}; - -function __extends(d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); -} - -var __assign = function() { - __assign = Object.assign || function __assign(t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - return __assign.apply(this, arguments); -}; - -function __read(o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -} - -function __spread() { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; -} - -/** An error emitted by Sentry SDKs and related utilities. */ -var SentryError = /** @class */ (function (_super) { - __extends(SentryError, _super); - function SentryError(message) { - var _newTarget = this.constructor; - var _this = _super.call(this, message) || this; - _this.message = message; - // tslint:disable:no-unsafe-any - _this.name = _newTarget.prototype.constructor.name; - Object.setPrototypeOf(_this, _newTarget.prototype); - return _this; - } - return SentryError; -}(Error)); - -/** - * Checks whether given value's type is one of a few Error or Error-like - * {@link isError}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -/** - * Checks whether given value's type is an regexp - * {@link isRegExp}. - * - * @param wat A value to be checked. - * @returns A boolean representing the result. - */ -function isRegExp(wat) { - return Object.prototype.toString.call(wat) === '[object RegExp]'; -} - -/** - * Requires a module which is protected _against bundler minification. - * - * @param request The module path to resolve - */ -/** - * Checks whether we're in the Node.js or Browser environment - * - * @returns Answer to given question - */ -function isNodeEnv() { - // tslint:disable:strict-type-predicates - return Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]'; -} -var fallbackGlobalObject = {}; -/** - * Safely get global scope object - * - * @returns Global scope object - */ -function getGlobalObject() { - return (isNodeEnv() - ? global - : typeof window !== 'undefined' - ? window - : typeof self !== 'undefined' - ? self - : fallbackGlobalObject); -} -/** JSDoc */ -function consoleSandbox(callback) { - var global = getGlobalObject(); - var levels = ['debug', 'info', 'warn', 'error', 'log', 'assert']; - if (!('console' in global)) { - return callback(); - } - var originalConsole = global.console; - var wrappedLevels = {}; - // Restore all wrapped console methods - levels.forEach(function (level) { - if (level in global.console && originalConsole[level].__sentry__) { - wrappedLevels[level] = originalConsole[level].__sentry_wrapped__; - originalConsole[level] = originalConsole[level].__sentry_original__; - } - }); - // Perform callback manipulations - var result = callback(); - // Revert restoration to wrapped state - Object.keys(wrappedLevels).forEach(function (level) { - originalConsole[level] = wrappedLevels[level]; - }); - return result; -} - -// TODO: Implement different loggers for different environments -var global$1 = getGlobalObject(); -/** Prefix for logging strings */ -var PREFIX = 'Sentry Logger '; -/** JSDoc */ -var Logger = /** @class */ (function () { - /** JSDoc */ - function Logger() { - this._enabled = false; - } - /** JSDoc */ - Logger.prototype.disable = function () { - this._enabled = false; - }; - /** JSDoc */ - Logger.prototype.enable = function () { - this._enabled = true; - }; - /** JSDoc */ - Logger.prototype.log = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.log(PREFIX + "[Log]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.warn(PREFIX + "[Warn]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - /** JSDoc */ - Logger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (!this._enabled) { - return; - } - consoleSandbox(function () { - global$1.console.error(PREFIX + "[Error]: " + args.join(' ')); // tslint:disable-line:no-console - }); - }; - return Logger; -}()); -// Ensure we only have a single logger instance, even if multiple versions of @sentry/utils are being used -global$1.__SENTRY__ = global$1.__SENTRY__ || {}; -var logger = global$1.__SENTRY__.logger || (global$1.__SENTRY__.logger = new Logger()); - -// tslint:disable:no-unsafe-any - -/** - * Wrap a given object method with a higher-order function - * - * @param source An object that contains a method to be wrapped. - * @param name A name of method to be wrapped. - * @param replacement A function that should be used to wrap a given method. - * @returns void - */ -function fill(source, name, replacement) { - if (!(name in source)) { - return; - } - var original = source[name]; - var wrapped = replacement(original); - // Make sure it's a function first, as we need to attach an empty prototype for `defineProperties` to work - // otherwise it'll throw "TypeError: Object.defineProperties called on non-object" - // tslint:disable-next-line:strict-type-predicates - if (typeof wrapped === 'function') { - try { - wrapped.prototype = wrapped.prototype || {}; - Object.defineProperties(wrapped, { - __sentry__: { - enumerable: false, - value: true, - }, - __sentry_original__: { - enumerable: false, - value: original, - }, - __sentry_wrapped__: { - enumerable: false, - value: wrapped, - }, - }); - } - catch (_Oo) { - // This can throw if multiple fill happens on a global object like XMLHttpRequest - // Fixes https://github.com/getsentry/sentry-javascript/issues/2043 - } - } - source[name] = wrapped; -} - -// Slightly modified (no IE8 support, ES6) and transcribed to TypeScript - -/** - * Checks if the value matches a regex or includes the string - * @param value The string value to be checked against - * @param pattern Either a regex or a string that must be contained in value - */ -function isMatchingPattern(value, pattern) { - if (isRegExp(pattern)) { - return pattern.test(value); - } - if (typeof pattern === 'string') { - return value.includes(pattern); - } - return false; -} - -/** - * Tells whether current environment supports Fetch API - * {@link supportsFetch}. - * - * @returns Answer to the given question. - */ -function supportsFetch() { - if (!('fetch' in getGlobalObject())) { - return false; - } - try { - // tslint:disable-next-line:no-unused-expression - new Headers(); - // tslint:disable-next-line:no-unused-expression - new Request(''); - // tslint:disable-next-line:no-unused-expression - new Response(); - return true; - } - catch (e) { - return false; - } -} -/** - * Tells whether current environment supports Fetch API natively - * {@link supportsNativeFetch}. - * - * @returns Answer to the given question. - */ -function supportsNativeFetch() { - if (!supportsFetch()) { - return false; - } - var global = getGlobalObject(); - return global.fetch.toString().indexOf('native') !== -1; -} - -/** SyncPromise internal states */ -var States; -(function (States) { - /** Pending */ - States["PENDING"] = "PENDING"; - /** Resolved / OK */ - States["RESOLVED"] = "RESOLVED"; - /** Rejected / Error */ - States["REJECTED"] = "REJECTED"; -})(States || (States = {})); - -/** - * Tracing Integration - */ -var Tracing = /** @class */ (function () { - /** - * Constructor for Tracing - * - * @param _options TracingOptions - */ - function Tracing(_options) { - if (_options === void 0) { _options = {}; } - this._options = _options; - /** - * @inheritDoc - */ - this.name = Tracing.id; - if (!Array.isArray(_options.tracingOrigins) || _options.tracingOrigins.length === 0) { - consoleSandbox(function () { - var defaultTracingOrigins = ['localhost', /^\//]; - // @ts-ignore - console.warn('Sentry: You need to define `tracingOrigins` in the options. Set an array of urls or patterns to trace.'); - // @ts-ignore - console.warn("Sentry: We added a reasonable default for you: " + defaultTracingOrigins); - _options.tracingOrigins = defaultTracingOrigins; - }); - } - } - /** - * @inheritDoc - */ - Tracing.prototype.setupOnce = function (_, getCurrentHub) { - if (this._options.traceXHR !== false) { - this._traceXHR(getCurrentHub); - } - if (this._options.traceFetch !== false) { - this._traceFetch(getCurrentHub); - } - if (this._options.autoStartOnDomReady !== false) { - getGlobalObject().addEventListener('DOMContentLoaded', function () { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - }); - getGlobalObject().document.onreadystatechange = function () { - if (document.readyState === 'complete') { - Tracing.startTrace(getCurrentHub(), getGlobalObject().location.href); - } - }; - } - }; - /** - * Starts a new trace - * @param hub The hub to start the trace on - * @param transaction Optional transaction - */ - Tracing.startTrace = function (hub, transaction) { - hub.configureScope(function (scope) { - scope.startSpan(); - scope.setTransaction(transaction); - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceXHR = function (getCurrentHub) { - if (!('XMLHttpRequest' in getGlobalObject())) { - return; - } - var xhrproto = XMLHttpRequest.prototype; - fill(xhrproto, 'open', function (originalOpen) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self) { - self._xhrUrl = args[1]; - } - // tslint:disable-next-line: no-unsafe-any - return originalOpen.apply(this, args); - }; - }); - fill(xhrproto, 'send', function (originalSend) { - return function () { - var _this = this; - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._xhrUrl && self._options.tracingOrigins) { - var url_1 = self._xhrUrl; - var headers_1 = getCurrentHub().traceHeaders(); - // tslint:disable-next-line: prefer-for-of - var isWhitelisted = self._options.tracingOrigins.some(function (origin) { - return isMatchingPattern(url_1, origin); - }); - if (isWhitelisted && this.setRequestHeader) { - Object.keys(headers_1).forEach(function (key) { - _this.setRequestHeader(key, headers_1[key]); - }); - } - } - // tslint:disable-next-line: no-unsafe-any - return originalSend.apply(this, args); - }; - }); - }; - /** - * JSDoc - */ - Tracing.prototype._traceFetch = function (getCurrentHub) { - if (!supportsNativeFetch()) { - return; - } - - console.log("PATCHING FETCH"); - - // tslint:disable: only-arrow-functions - fill(getGlobalObject(), 'fetch', function (originalFetch) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - // @ts-ignore - var self = getCurrentHub().getIntegration(Tracing); - if (self && self._options.tracingOrigins) { - console.log("blafalseq"); - var url_2 = args[0]; - var options = args[1] = args[1] || {}; - var whiteListed_1 = false; - self._options.tracingOrigins.forEach(function (whiteListUrl) { - if (!whiteListed_1) { - whiteListed_1 = isMatchingPattern(url_2, whiteListUrl); - console.log('a', url_2, whiteListUrl); - } - }); - if (whiteListed_1) { - console.log('aaaaaa', options, whiteListed_1); - if (options.headers) { - - if (Array.isArray(options.headers)) { - options.headers = __spread(options.headers, Object.entries(getCurrentHub().traceHeaders())); - } - else { - options.headers = __assign({}, options.headers, getCurrentHub().traceHeaders()); - } - } - else { - options.headers = getCurrentHub().traceHeaders(); - } - - console.log(options.headers); - } - } - - args[1] = options; - // tslint:disable-next-line: no-unsafe-any - return originalFetch.apply(getGlobalObject(), args); - }; - }); - // tslint:enable: only-arrow-functions - }; - /** - * @inheritDoc - */ - Tracing.id = 'Tracing'; - return Tracing; -}()); - -exports.Tracing = Tracing; - - - __window.Sentry = __window.Sentry || {}; - __window.Sentry.Integrations = __window.Sentry.Integrations || {}; - Object.assign(__window.Sentry.Integrations, exports); - - - - - - - - - - - - -}(window)); -//# sourceMappingURL=tracing.js.map diff --git a/examples/tracing/templates/index.html b/examples/tracing/templates/index.html deleted file mode 100644 index 5e930a720c..0000000000 --- a/examples/tracing/templates/index.html +++ /dev/null @@ -1,47 +0,0 @@ -{{ sentry_trace }} - - - - - - -

Decode your base64 string as a service (that calls another service)

- - A base64 string
- - -

Output:

-
diff --git a/examples/tracing/traceviewer.py b/examples/tracing/traceviewer.py
deleted file mode 100644
index 9c1435ff88..0000000000
--- a/examples/tracing/traceviewer.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import json
-import sys
-
-print("digraph mytrace {")
-print("rankdir=LR")
-
-all_spans = []
-
-for line in sys.stdin:
-    event = json.loads(line)
-    if event.get("type") != "transaction":
-        continue
-
-    trace_ctx = event["contexts"]["trace"]
-    trace_span = dict(trace_ctx)  # fake a span entry from transaction event
-    trace_span["description"] = event["transaction"]
-    trace_span["start_timestamp"] = event["start_timestamp"]
-    trace_span["timestamp"] = event["timestamp"]
-
-    if "parent_span_id" not in trace_ctx:
-        print(
-            '{} [label="trace:{} ({})"];'.format(
-                int(trace_ctx["trace_id"], 16),
-                event["transaction"],
-                trace_ctx["trace_id"],
-            )
-        )
-
-    for span in event["spans"] + [trace_span]:
-        print(
-            '{} [label="span:{} ({})"];'.format(
-                int(span["span_id"], 16), span["description"], span["span_id"]
-            )
-        )
-        if "parent_span_id" in span:
-            print(
-                "{} -> {};".format(
-                    int(span["parent_span_id"], 16), int(span["span_id"], 16)
-                )
-            )
-
-        print(
-            "{} -> {} [style=dotted];".format(
-                int(span["trace_id"], 16), int(span["span_id"], 16)
-            )
-        )
-
-        all_spans.append(span)
-
-
-for s1 in all_spans:
-    for s2 in all_spans:
-        if s1["start_timestamp"] > s2["timestamp"]:
-            print(
-                '{} -> {} [color="#efefef"];'.format(
-                    int(s1["span_id"], 16), int(s2["span_id"], 16)
-                )
-            )
-
-
-print("}")
diff --git a/examples/tracing/tracing.py b/examples/tracing/tracing.py
deleted file mode 100644
index b5ed98044d..0000000000
--- a/examples/tracing/tracing.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import json
-import flask
-import os
-import redis
-import rq
-import sentry_sdk
-import time
-import urllib3
-
-from sentry_sdk.integrations.flask import FlaskIntegration
-from sentry_sdk.integrations.rq import RqIntegration
-
-
-app = flask.Flask(__name__)
-redis_conn = redis.Redis()
-http = urllib3.PoolManager()
-queue = rq.Queue(connection=redis_conn)
-
-
-def write_event(event):
-    with open("events", "a") as f:
-        f.write(json.dumps(event))
-        f.write("\n")
-
-
-sentry_sdk.init(
-    integrations=[FlaskIntegration(), RqIntegration()],
-    traces_sample_rate=1.0,
-    debug=True,
-    transport=write_event,
-)
-
-
-def decode_base64(encoded, redis_key):
-    time.sleep(1)
-    r = http.request("GET", "http://httpbin.org/base64/{}".format(encoded))
-    redis_conn.set(redis_key, r.data)
-
-
-@app.route("/")
-def index():
-    return flask.render_template(
-        "index.html",
-        sentry_dsn=os.environ["SENTRY_DSN"],
-        traceparent=dict(sentry_sdk.Hub.current.iter_trace_propagation_headers()),
-    )
-
-
-@app.route("/compute/")
-def compute(input):
-    redis_key = "sentry-python-tracing-example-result:{}".format(input)
-    redis_conn.delete(redis_key)
-    queue.enqueue(decode_base64, encoded=input, redis_key=redis_key)
-
-    return redis_key
-
-
-@app.route("/wait/")
-def wait(redis_key):
-    result = redis_conn.get(redis_key)
-    if result is None:
-        return "NONE"
-    else:
-        redis_conn.delete(redis_key)
-        return "RESULT: {}".format(result)
-
-
-@app.cli.command("worker")
-def run_worker():
-    print("WORKING")
-    worker = rq.Worker([queue], connection=queue.connection)
-    worker.work()
diff --git a/linter-requirements.txt b/linter-requirements.txt
index ec736a59c5..e86ffd506b 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -1,10 +1,12 @@
-black==22.3.0
-flake8==3.9.2
-flake8-import-order==0.18.1
-mypy==0.950
+mypy
+black
+flake8==5.0.4  # flake8 depends on pyflakes>=3.0.0 and this dropped support for Python 2 "# type:" comments
 types-certifi
+types-protobuf==4.24.0.20240408  # newer raises an error on mypy sentry_sdk
 types-redis
 types-setuptools
-flake8-bugbear==21.4.3
-pep8-naming==0.11.1
+pymongo # There is no separate types module.
+loguru # There is no separate types module.
+flake8-bugbear
+pep8-naming
 pre-commit # local linting
diff --git a/mypy.ini b/mypy.ini
index 2a15e45e49..c1444d61e5 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,5 +1,5 @@
 [mypy]
-python_version = 3.7
+python_version = 3.11
 allow_redefinition = True
 check_untyped_defs = True
 ; disallow_any_decorated = True
@@ -59,7 +59,17 @@ ignore_missing_imports = True
 [mypy-sentry_sdk._queue]
 ignore_missing_imports = True
 disallow_untyped_defs = False
+[mypy-sentry_sdk._lru_cache]
+disallow_untyped_defs = False
 [mypy-celery.app.trace]
 ignore_missing_imports = True
 [mypy-flask.signals]
 ignore_missing_imports = True
+[mypy-huey.*]
+ignore_missing_imports = True
+[mypy-openai.*]
+ignore_missing_imports = True
+[mypy-arq.*]
+ignore_missing_imports = True
+[mypy-grpc.*]
+ignore_missing_imports = True
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000..20ee9680f7
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,10 @@
+[tool.black]
+# 'extend-exclude' excludes files or directories in addition to the defaults
+extend-exclude = '''
+# A regex preceded with ^/ will apply only to files and directories
+# in the root of the project.
+(
+    .*_pb2.py  # exclude autogenerated Protocol Buffer files anywhere in the project
+    | .*_pb2_grpc.py  # exclude autogenerated Protocol Buffer files anywhere in the project
+)
+'''
diff --git a/pytest.ini b/pytest.ini
index 4e987c1a90..f736c30496 100644
--- a/pytest.ini
+++ b/pytest.ini
@@ -3,7 +3,8 @@ DJANGO_SETTINGS_MODULE = tests.integrations.django.myapp.settings
 addopts = --tb=short
 markers =
     tests_internal_exceptions: Handle internal exceptions just as the SDK does, to test it. (Otherwise internal exceptions are recorded and reraised.)
-    only: A temporary marker, to make pytest only run the tests with the mark, similar to jest's `it.only`. To use, run `pytest -v -m only`.
+    only: A temporary marker, to make pytest only run the tests with the mark, similar to jests `it.only`. To use, run `pytest -v -m only`.
+asyncio_mode = strict
 
 [pytest-watch]
 ; Enable this to drop into pdb on errors
diff --git a/scripts/aws-attach-layer-to-lambda-function.sh b/scripts/aws-attach-layer-to-lambda-function.sh
new file mode 100755
index 0000000000..71e08c6318
--- /dev/null
+++ b/scripts/aws-attach-layer-to-lambda-function.sh
@@ -0,0 +1,33 @@
+#!/usr/bin/env bash
+#
+# Attaches the layer `SentryPythonServerlessSDK-local-dev` to a given lambda function.
+#
+
+set -euo pipefail
+
+# Check for argument
+if [ $# -eq 0 ]
+  then
+    SCRIPT_NAME=$(basename "$0")
+    echo "ERROR: No argument supplied. Please give the name of a Lambda function!"
+    echo ""
+    echo "Usage: $SCRIPT_NAME "
+    echo ""
+    exit 1
+fi
+
+FUNCTION_NAME=$1
+
+echo "Getting ARN of newest Sentry lambda layer..."
+LAYER_ARN=$(aws lambda list-layer-versions --layer-name SentryPythonServerlessSDK-local-dev --query "LayerVersions[0].LayerVersionArn" | tr -d '"')
+echo "Done getting ARN of newest Sentry lambda layer $LAYER_ARN."
+
+echo "Attaching Lamba layer to function $FUNCTION_NAME..."
+echo "Warning: This remove all other layers!"
+aws lambda update-function-configuration \
+    --function-name "$FUNCTION_NAME" \
+    --layers "$LAYER_ARN" \
+    --no-cli-pager
+echo "Done attaching Lamba layer to function '$FUNCTION_NAME'."
+
+echo "All done. Have a nice day!"
diff --git a/scripts/aws-cleanup.sh b/scripts/aws-cleanup.sh
old mode 100644
new mode 100755
index 1219668855..982835c283
--- a/scripts/aws-cleanup.sh
+++ b/scripts/aws-cleanup.sh
@@ -1,11 +1,18 @@
 #!/bin/sh
-# Delete all AWS Lambda functions
+#
+# Helper script to clean up AWS Lambda functions created
+# by the test suite (tests/integrations/aws_lambda/test_aws.py).
+#
+# This will delete all Lambda functions named `test_function_*`.
+#
 
+export AWS_DEFAULT_REGION="us-east-1"
 export AWS_ACCESS_KEY_ID="$SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"
 export AWS_SECRET_ACCESS_KEY="$SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"
-export AWS_IAM_ROLE="$SENTRY_PYTHON_TEST_AWS_IAM_ROLE"
 
-for func in $(aws lambda list-functions | jq -r .Functions[].FunctionName); do
+for func in $(aws lambda list-functions --output text --query 'Functions[?starts_with(FunctionName, `test_`) == `true`].FunctionName'); do
     echo "Deleting $func"
-    aws lambda delete-function --function-name $func
+    aws lambda delete-function --function-name "$func"
 done
+
+echo "All done! Have a nice day!"
diff --git a/scripts/aws-delete-lamba-layer-versions.sh b/scripts/aws-delete-lamba-layer-versions.sh
new file mode 100755
index 0000000000..f467f9398b
--- /dev/null
+++ b/scripts/aws-delete-lamba-layer-versions.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+#
+# Deletes all versions of the layer specified in LAYER_NAME in one region.
+#
+
+set -euo pipefail
+
+# override default AWS region
+export AWS_REGION=eu-central-1
+
+LAYER_NAME=SentryPythonServerlessSDK-local-dev
+VERSION="0"
+
+while [[ $VERSION != "1" ]]
+do
+  VERSION=$(aws lambda list-layer-versions --layer-name $LAYER_NAME | jq '.LayerVersions[0].Version')
+  aws lambda delete-layer-version --layer-name $LAYER_NAME --version-number $VERSION
+done
diff --git a/scripts/aws-deploy-local-layer.sh b/scripts/aws-deploy-local-layer.sh
new file mode 100755
index 0000000000..56f2087596
--- /dev/null
+++ b/scripts/aws-deploy-local-layer.sh
@@ -0,0 +1,30 @@
+#!/usr/bin/env bash
+#
+# Builds and deploys the Sentry AWS Lambda layer (including the Sentry SDK and the Sentry Lambda Extension)
+#
+# The currently checked out version of the SDK in your local directory is used.
+# The latest version of the Lambda Extension is fetched from the Sentry Release Registry.
+#
+
+set -euo pipefail
+
+# Creating Lambda layer
+echo "Creating Lambda layer in ./dist ..."
+make aws-lambda-layer
+echo "Done creating Lambda layer in ./dist"
+
+# Deploying zipped Lambda layer to AWS
+ZIP=$(ls dist | grep serverless | head -n 1)
+echo "Deploying zipped Lambda layer $ZIP to AWS..."
+
+aws lambda publish-layer-version \
+    --layer-name "SentryPythonServerlessSDK-local-dev" \
+    --region "eu-central-1" \
+    --zip-file "fileb://dist/$ZIP" \
+    --description "Local test build of SentryPythonServerlessSDK (can be deleted)" \
+    --compatible-runtimes python3.7 python3.8 python3.9 python3.10 python3.11 \
+    --no-cli-pager
+
+echo "Done deploying zipped Lambda layer to AWS as 'SentryPythonServerlessSDK-local-dev'."
+
+echo "All done. Have a nice day!"
diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py
new file mode 100644
index 0000000000..8704e4de01
--- /dev/null
+++ b/scripts/build_aws_lambda_layer.py
@@ -0,0 +1,140 @@
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+from typing import TYPE_CHECKING
+
+from sentry_sdk.consts import VERSION as SDK_VERSION
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+DIST_PATH = "dist"  # created by "make dist" that is called by "make aws-lambda-layer"
+PYTHON_SITE_PACKAGES = "python"  # see https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path
+
+
+class LayerBuilder:
+    def __init__(
+        self,
+        base_dir,  # type: str
+        out_zip_filename=None,  # type: Optional[str]
+    ):
+        # type: (...) -> None
+        self.base_dir = base_dir
+        self.python_site_packages = os.path.join(self.base_dir, PYTHON_SITE_PACKAGES)
+        self.out_zip_filename = (
+            f"sentry-python-serverless-{SDK_VERSION}.zip"
+            if out_zip_filename is None
+            else out_zip_filename
+        )
+
+    def make_directories(self):
+        # type: (...) -> None
+        os.makedirs(self.python_site_packages)
+
+    def install_python_packages(self):
+        # type: (...) -> None
+        # Install requirements for Lambda Layer (these are more limited than the SDK requirements,
+        # because Lambda does not support the newest versions of some packages)
+        subprocess.check_call(
+            [
+                sys.executable,
+                "-m",
+                "pip",
+                "install",
+                "-r",
+                "aws-lambda-layer-requirements.txt",
+                "--target",
+                self.python_site_packages,
+            ],
+        )
+
+        sentry_python_sdk = os.path.join(
+            DIST_PATH,
+            f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl",  # this is generated by "make dist" that is called by "make aws-lamber-layer"
+        )
+        subprocess.run(
+            [
+                "pip",
+                "install",
+                "--no-cache-dir",  # always access PyPI
+                "--no-deps",  # the right depencencies have been installed in the call above
+                "--quiet",
+                sentry_python_sdk,
+                "--target",
+                self.python_site_packages,
+            ],
+            check=True,
+        )
+
+    def create_init_serverless_sdk_package(self):
+        # type: (...) -> None
+        """
+        Method that creates the init_serverless_sdk pkg in the
+        sentry-python-serverless zip
+        """
+        serverless_sdk_path = (
+            f"{self.python_site_packages}/sentry_sdk/"
+            f"integrations/init_serverless_sdk"
+        )
+        if not os.path.exists(serverless_sdk_path):
+            os.makedirs(serverless_sdk_path)
+        shutil.copy(
+            "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py"
+        )
+
+    def zip(self):
+        # type: (...) -> None
+        subprocess.run(
+            [
+                "zip",
+                "-q",  # Quiet
+                "-x",  # Exclude files
+                "**/__pycache__/*",  # Files to be excluded
+                "-r",  # Recurse paths
+                self.out_zip_filename,  # Output filename
+                PYTHON_SITE_PACKAGES,  # Files to be zipped
+            ],
+            cwd=self.base_dir,
+            check=True,  # Raises CalledProcessError if exit status is non-zero
+        )
+
+        shutil.copy(
+            os.path.join(self.base_dir, self.out_zip_filename),
+            os.path.abspath(DIST_PATH),
+        )
+
+
+def build_packaged_zip(base_dir=None, make_dist=False, out_zip_filename=None):
+    if base_dir is None:
+        base_dir = tempfile.mkdtemp()
+
+    if make_dist:
+        # Same thing that is done by "make dist"
+        # (which is a dependency of "make aws-lambda-layer")
+        subprocess.check_call(
+            [sys.executable, "setup.py", "sdist", "bdist_wheel", "-d", DIST_PATH],
+        )
+
+    layer_builder = LayerBuilder(base_dir, out_zip_filename=out_zip_filename)
+    layer_builder.make_directories()
+    layer_builder.install_python_packages()
+    layer_builder.create_init_serverless_sdk_package()
+    layer_builder.zip()
+
+    # Just for debugging
+    dist_path = os.path.abspath(DIST_PATH)
+    print("Created Lambda Layer package with this information:")
+    print(" - Base directory for generating package: {}".format(layer_builder.base_dir))
+    print(
+        " - Created Python SDK distribution (in `{}`): {}".format(dist_path, make_dist)
+    )
+    if not make_dist:
+        print("    If 'False' we assume it was already created (by 'make dist')")
+    print(" - Package zip filename: {}".format(layer_builder.out_zip_filename))
+    print(" - Copied package zip to: {}".format(dist_path))
+
+
+if __name__ == "__main__":
+    build_packaged_zip()
diff --git a/scripts/build_awslambda_layer.py b/scripts/build_awslambda_layer.py
deleted file mode 100644
index 1fda06e79f..0000000000
--- a/scripts/build_awslambda_layer.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import os
-import subprocess
-import tempfile
-import shutil
-
-from sentry_sdk.consts import VERSION as SDK_VERSION
-from sentry_sdk._types import MYPY
-
-if MYPY:
-    from typing import Union
-
-
-class PackageBuilder:
-    def __init__(
-        self,
-        base_dir,  # type: str
-        pkg_parent_dir,  # type: str
-        dist_rel_path,  # type: str
-    ):
-        # type: (...) -> None
-        self.base_dir = base_dir
-        self.pkg_parent_dir = pkg_parent_dir
-        self.dist_rel_path = dist_rel_path
-        self.packages_dir = self.get_relative_path_of(pkg_parent_dir)
-
-    def make_directories(self):
-        # type: (...) -> None
-        os.makedirs(self.packages_dir)
-
-    def install_python_binaries(self):
-        # type: (...) -> None
-        wheels_filepath = os.path.join(
-            self.dist_rel_path, f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl"
-        )
-        subprocess.run(
-            [
-                "pip",
-                "install",
-                "--no-cache-dir",  # Disables the cache -> always accesses PyPI
-                "-q",  # Quiet
-                wheels_filepath,  # Copied to the target directory before installation
-                "-t",  # Target directory flag
-                self.packages_dir,
-            ],
-            check=True,
-        )
-
-    def create_init_serverless_sdk_package(self):
-        # type: (...) -> None
-        """
-        Method that creates the init_serverless_sdk pkg in the
-        sentry-python-serverless zip
-        """
-        serverless_sdk_path = (
-            f"{self.packages_dir}/sentry_sdk/" f"integrations/init_serverless_sdk"
-        )
-        if not os.path.exists(serverless_sdk_path):
-            os.makedirs(serverless_sdk_path)
-        shutil.copy(
-            "scripts/init_serverless_sdk.py", f"{serverless_sdk_path}/__init__.py"
-        )
-
-    def zip(
-        self, filename  # type: str
-    ):
-        # type: (...) -> None
-        subprocess.run(
-            [
-                "zip",
-                "-q",  # Quiet
-                "-x",  # Exclude files
-                "**/__pycache__/*",  # Files to be excluded
-                "-r",  # Recurse paths
-                filename,  # Output filename
-                self.pkg_parent_dir,  # Files to be zipped
-            ],
-            cwd=self.base_dir,
-            check=True,  # Raises CalledProcessError if exit status is non-zero
-        )
-
-    def get_relative_path_of(
-        self, subfile  # type: str
-    ):
-        # type: (...) -> str
-        return os.path.join(self.base_dir, subfile)
-
-
-# Ref to `pkg_parent_dir` Top directory in the ZIP file.
-# Placing the Sentry package in `/python` avoids
-# creating a directory for a specific version. For more information, see
-#  https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html#configuration-layers-path
-def build_packaged_zip(
-    dist_rel_path="dist",  # type: str
-    dest_zip_filename=f"sentry-python-serverless-{SDK_VERSION}.zip",  # type: str
-    pkg_parent_dir="python",  # type: str
-    dest_abs_path=None,  # type: Union[str, None]
-):
-    # type: (...) -> None
-    if dest_abs_path is None:
-        dest_abs_path = os.path.abspath(
-            os.path.join(os.path.dirname(__file__), "..", dist_rel_path)
-        )
-    with tempfile.TemporaryDirectory() as tmp_dir:
-        package_builder = PackageBuilder(tmp_dir, pkg_parent_dir, dist_rel_path)
-        package_builder.make_directories()
-        package_builder.install_python_binaries()
-        package_builder.create_init_serverless_sdk_package()
-        package_builder.zip(dest_zip_filename)
-        if not os.path.exists(dist_rel_path):
-            os.makedirs(dist_rel_path)
-        shutil.copy(
-            package_builder.get_relative_path_of(dest_zip_filename), dest_abs_path
-        )
-
-
-if __name__ == "__main__":
-    build_packaged_zip()
diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py
index 7a414ff406..be545b680b 100644
--- a/scripts/init_serverless_sdk.py
+++ b/scripts/init_serverless_sdk.py
@@ -5,15 +5,16 @@
 Then the Handler function sstring should be replaced with
 'sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler'
 """
+
 import os
 import sys
 import re
 
 import sentry_sdk
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index 01f29c7dd1..dbbb4f2e10 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -1,4 +1,8 @@
 #!/bin/bash
+
+# Usage: sh scripts/runtox.sh py3.12 
+# Runs all environments with substring py3.12 and the given arguments for pytest
+
 set -ex
 
 if [ -n "$TOXPATH" ]; then
@@ -9,21 +13,31 @@ else
     TOXPATH=./.venv/bin/tox
 fi
 
-# Usage: sh scripts/runtox.sh py3.7 
-# Runs all environments with substring py3.7 and the given arguments for pytest
-
-if [ -n "$1" ]; then
-    searchstring="$1"
-elif [ -n "$CI_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$CI_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = "pypy-2.7" ]; then
-        searchstring=pypy
-    fi
-elif [ -n "$AZURE_PYTHON_VERSION" ]; then
-    searchstring="$(echo py$AZURE_PYTHON_VERSION | sed -e 's/pypypy/pypy/g' -e 's/-dev//g')"
-    if [ "$searchstring" = pypy2 ]; then
-        searchstring=pypy
+excludelatest=false
+for arg in "$@"
+do
+    if [ "$arg" = "--exclude-latest" ]; then
+        excludelatest=true
+        shift
+        break
     fi
+done
+
+searchstring="$1"
+
+export TOX_PARALLEL_NO_SPINNER=1
+
+if $excludelatest; then
+    echo "Excluding latest"
+    ENV="$($TOXPATH -l | grep -- "$searchstring" | grep -v -- '-latest' | tr $'\n' ',')"
+else
+    echo "Including latest"
+    ENV="$($TOXPATH -l | grep -- "$searchstring" | tr $'\n' ',')"
+fi
+
+if [ -z "${ENV}" ]; then
+    echo "No targets found. Skipping."
+    exit 0
 fi
 
-exec $TOXPATH -e $($TOXPATH -l | grep "$searchstring" | tr $'\n' ',') -- "${@:2}"
+exec $TOXPATH -vv -e "$ENV" -- "${@:2}"
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
new file mode 100755
index 0000000000..13b81283ca
--- /dev/null
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -0,0 +1,308 @@
+"""Split Tox to GitHub Actions
+
+This is a small script to split a tox.ini config file into multiple GitHub actions configuration files.
+This way each group of frameworks defined in tox.ini will get its own GitHub actions configuration file
+which allows them to be run in parallel in GitHub actions.
+
+This will generate/update several configuration files, that need to be commited to Git afterwards.
+Whenever tox.ini is changed, this script needs to be run.
+
+Usage:
+    python split-tox-gh-actions.py [--fail-on-changes]
+
+If the parameter `--fail-on-changes` is set, the script will raise a RuntimeError in case the yaml
+files have been changed by the scripts execution. This is used in CI to check if the yaml files
+represent the current tox.ini file. (And if not the CI run fails.)
+"""
+
+import configparser
+import hashlib
+import sys
+from collections import defaultdict
+from functools import reduce
+from glob import glob
+from pathlib import Path
+
+from jinja2 import Environment, FileSystemLoader
+
+
+OUT_DIR = Path(__file__).resolve().parent.parent.parent / ".github" / "workflows"
+TOX_FILE = Path(__file__).resolve().parent.parent.parent / "tox.ini"
+TEMPLATE_DIR = Path(__file__).resolve().parent / "templates"
+
+FRAMEWORKS_NEEDING_POSTGRES = {
+    "django",
+    "asyncpg",
+}
+
+FRAMEWORKS_NEEDING_CLICKHOUSE = {
+    "clickhouse_driver",
+}
+
+FRAMEWORKS_NEEDING_AWS = {
+    "aws_lambda",
+}
+
+FRAMEWORKS_NEEDING_GITHUB_SECRETS = {
+    "aws_lambda",
+}
+
+# Frameworks grouped here will be tested together to not hog all GitHub runners.
+# If you add or remove a group, make sure to git rm the generated YAML file as
+# well.
+GROUPS = {
+    "Common": [
+        "common",
+    ],
+    "AWS Lambda": [
+        # this is separate from Cloud Computing because only this one test suite
+        # needs to run with access to GitHub secrets
+        "aws_lambda",
+    ],
+    "Cloud Computing": [
+        "boto3",
+        "chalice",
+        "cloud_resource_context",
+        "gcp",
+    ],
+    "Data Processing": [
+        "arq",
+        "beam",
+        "celery",
+        "huey",
+        "openai",
+        "rq",
+    ],
+    "Databases": [
+        "asyncpg",
+        "clickhouse_driver",
+        "pymongo",
+        "redis",
+        "rediscluster",
+        "sqlalchemy",
+    ],
+    "GraphQL": [
+        "ariadne",
+        "gql",
+        "graphene",
+        "strawberry",
+    ],
+    "Networking": [
+        "gevent",
+        "grpc",
+        "httpx",
+        "requests",
+    ],
+    "Web Frameworks 1": [
+        "django",
+        "fastapi",
+        "flask",
+        "starlette",
+    ],
+    "Web Frameworks 2": [
+        "aiohttp",
+        "asgi",
+        "bottle",
+        "falcon",
+        "pyramid",
+        "quart",
+        "sanic",
+        "starlite",
+        "tornado",
+    ],
+    "Miscellaneous": [
+        "loguru",
+        "opentelemetry",
+        "pure_eval",
+        "trytond",
+    ],
+}
+
+
+ENV = Environment(
+    loader=FileSystemLoader(TEMPLATE_DIR),
+)
+
+
+def main(fail_on_changes):
+    """Create one CI workflow for each framework defined in tox.ini."""
+    if fail_on_changes:
+        old_hash = get_files_hash()
+
+    print("Parsing tox.ini...")
+    py_versions_pinned, py_versions_latest = parse_tox()
+
+    if fail_on_changes:
+        print("Checking if all frameworks belong in a group...")
+        missing_frameworks = find_frameworks_missing_from_groups(
+            py_versions_pinned, py_versions_latest
+        )
+        if missing_frameworks:
+            raise RuntimeError(
+                "Please add the following frameworks to the corresponding group "
+                "in `GROUPS` in `scripts/split-tox-gh-actions/split-tox-gh-actions.py: "
+                + ", ".join(missing_frameworks)
+            )
+
+    print("Rendering templates...")
+    for group, frameworks in GROUPS.items():
+        contents = render_template(
+            group, frameworks, py_versions_pinned, py_versions_latest
+        )
+        filename = write_file(contents, group)
+        print(f"Created {filename}")
+
+    if fail_on_changes:
+        new_hash = get_files_hash()
+
+        if old_hash != new_hash:
+            raise RuntimeError(
+                "The yaml configuration files have changed. This means that either `tox.ini` "
+                "or one of the constants in `split-tox-gh-actions.py` has changed "
+                "but the changes have not been propagated to the GitHub actions config files. "
+                "Please run `python scripts/split-tox-gh-actions/split-tox-gh-actions.py` "
+                "locally and commit the changes of the yaml configuration files to continue. "
+            )
+
+    print("All done. Have a nice day!")
+
+
+def parse_tox():
+    config = configparser.ConfigParser()
+    config.read(TOX_FILE)
+    lines = [
+        line
+        for line in config["tox"]["envlist"].split("\n")
+        if line.strip() and not line.strip().startswith("#")
+    ]
+
+    py_versions_pinned = defaultdict(set)
+    py_versions_latest = defaultdict(set)
+
+    for line in lines:
+        # normalize lines
+        line = line.strip().lower()
+
+        try:
+            # parse tox environment definition
+            try:
+                (raw_python_versions, framework, framework_versions) = line.split("-")
+            except ValueError:
+                (raw_python_versions, framework) = line.split("-")
+                framework_versions = []
+
+            # collect python versions to test the framework in
+            raw_python_versions = set(
+                raw_python_versions.replace("{", "").replace("}", "").split(",")
+            )
+            if "latest" in framework_versions:
+                py_versions_latest[framework] |= raw_python_versions
+            else:
+                py_versions_pinned[framework] |= raw_python_versions
+
+        except ValueError:
+            print(f"ERROR reading line {line}")
+
+    py_versions_pinned = _normalize_py_versions(py_versions_pinned)
+    py_versions_latest = _normalize_py_versions(py_versions_latest)
+
+    return py_versions_pinned, py_versions_latest
+
+
+def find_frameworks_missing_from_groups(py_versions_pinned, py_versions_latest):
+    frameworks_in_a_group = _union(GROUPS.values())
+    all_frameworks = set(py_versions_pinned.keys()) | set(py_versions_latest.keys())
+    return all_frameworks - frameworks_in_a_group
+
+
+def _normalize_py_versions(py_versions):
+    def replace_and_sort(versions):
+        return sorted(
+            [py.replace("py", "") for py in versions],
+            key=lambda v: tuple(map(int, v.split("."))),
+        )
+
+    if isinstance(py_versions, dict):
+        normalized = defaultdict(set)
+        normalized |= {
+            framework: replace_and_sort(versions)
+            for framework, versions in py_versions.items()
+        }
+
+    elif isinstance(py_versions, set):
+        normalized = replace_and_sort(py_versions)
+
+    return normalized
+
+
+def get_files_hash():
+    """Calculate a hash of all the yaml configuration files"""
+    hasher = hashlib.md5()
+    path_pattern = (OUT_DIR / "test-integrations-*.yml").as_posix()
+    for file in glob(path_pattern):
+        with open(file, "rb") as f:
+            buf = f.read()
+            hasher.update(buf)
+
+    return hasher.hexdigest()
+
+
+def _union(seq):
+    return reduce(lambda x, y: set(x) | set(y), seq)
+
+
+def render_template(group, frameworks, py_versions_pinned, py_versions_latest):
+    template = ENV.get_template("base.jinja")
+
+    categories = set()
+    py_versions = defaultdict(set)
+    for framework in frameworks:
+        if py_versions_pinned[framework]:
+            categories.add("pinned")
+            py_versions["pinned"] |= set(py_versions_pinned[framework])
+        if py_versions_latest[framework]:
+            categories.add("latest")
+            py_versions["latest"] |= set(py_versions_latest[framework])
+        if "2.7" in py_versions_pinned[framework]:
+            categories.add("py27")
+
+    py_versions["pinned"].discard("2.7")
+    py_versions["latest"].discard("2.7")
+
+    context = {
+        "group": group,
+        "frameworks": frameworks,
+        "categories": sorted(categories),
+        "needs_aws_credentials": bool(set(frameworks) & FRAMEWORKS_NEEDING_AWS),
+        "needs_clickhouse": bool(set(frameworks) & FRAMEWORKS_NEEDING_CLICKHOUSE),
+        "needs_postgres": bool(set(frameworks) & FRAMEWORKS_NEEDING_POSTGRES),
+        "needs_github_secrets": bool(
+            set(frameworks) & FRAMEWORKS_NEEDING_GITHUB_SECRETS
+        ),
+        "py_versions": {
+            category: [f'"{version}"' for version in _normalize_py_versions(versions)]
+            for category, versions in py_versions.items()
+        },
+    }
+    rendered = template.render(context)
+    rendered = postprocess_template(rendered)
+    return rendered
+
+
+def postprocess_template(rendered):
+    return "\n".join([line for line in rendered.split("\n") if line.strip()]) + "\n"
+
+
+def write_file(contents, group):
+    group = group.lower().replace(" ", "-")
+    outfile = OUT_DIR / f"test-integrations-{group}.yml"
+
+    with open(outfile, "w") as file:
+        file.write(contents)
+
+    return outfile
+
+
+if __name__ == "__main__":
+    fail_on_changes = len(sys.argv) == 2 and sys.argv[1] == "--fail-on-changes"
+    main(fail_on_changes)
diff --git a/scripts/split-tox-gh-actions/templates/base.jinja b/scripts/split-tox-gh-actions/templates/base.jinja
new file mode 100644
index 0000000000..cb1bffc616
--- /dev/null
+++ b/scripts/split-tox-gh-actions/templates/base.jinja
@@ -0,0 +1,55 @@
+{% with lowercase_group=group | replace(" ", "_") | lower %}
+name: Test {{ group }}
+
+on:
+  push:
+    branches:
+      - master
+      - release/**
+      - 1.x
+
+  {% if needs_github_secrets %}
+  # XXX: We are using `pull_request_target` instead of `pull_request` because we want
+  # this to run on forks with access to the secrets necessary to run the test suite.
+  # Prefer to use `pull_request` when possible.
+  pull_request_target:
+    types: [labeled, opened, reopened, synchronize]
+  {% else %}
+  pull_request:
+  {% endif %}
+
+# Cancel in progress workflows on pull_requests.
+# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
+concurrency:
+  group: {% raw %}${{ github.workflow }}-${{ github.head_ref || github.run_id }}{% endraw %}
+  cancel-in-progress: true
+
+permissions:
+  contents: read
+  {% if needs_github_secrets %}
+  # `write` is needed to remove the `Trigger: tests using secrets` label
+  pull-requests: write
+  {% endif %}
+
+env:
+{% if needs_aws_credentials %}
+{% raw %}
+  SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID: ${{ secrets.SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID }}
+  SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY: ${{ secrets.SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY }}
+{% endraw %}
+{% endif %}
+  BUILD_CACHE_KEY: {% raw %}${{ github.sha }}{% endraw %}
+  CACHED_BUILD_PATHS: |
+    {% raw %}${{ github.workspace }}/dist-serverless{% endraw %}
+
+jobs:
+{% if needs_github_secrets %}
+{% include "check_permissions.jinja" %}
+{% endif %}
+
+{% for category in categories %}
+{% include "test_group.jinja" %}
+{% endfor %}
+
+{% include "check_required.jinja" %}
+{% endwith %}
diff --git a/scripts/split-tox-gh-actions/templates/check_permissions.jinja b/scripts/split-tox-gh-actions/templates/check_permissions.jinja
new file mode 100644
index 0000000000..2b9eaa83f9
--- /dev/null
+++ b/scripts/split-tox-gh-actions/templates/check_permissions.jinja
@@ -0,0 +1,30 @@
+  check-permissions:
+    name: permissions check
+    runs-on: ubuntu-20.04
+    steps:
+      - uses: actions/checkout@v4.1.1
+        with:
+          persist-credentials: false
+
+      - name: Check permissions on PR
+        if: github.event_name == 'pull_request_target'
+        run: |
+          {% raw %}
+          python3 -uS .github/workflows/scripts/trigger_tests_on_label.py \
+              --repo-id ${{ github.event.repository.id }} \
+              --pr ${{ github.event.number }} \
+              --event ${{ github.event.action }} \
+              --username "$ARG_USERNAME" \
+              --label-names "$ARG_LABEL_NAMES"
+          {% endraw %}
+        env:
+          {% raw %}
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          # these can contain special characters
+          ARG_USERNAME: ${{ github.event.pull_request.user.login }}
+          ARG_LABEL_NAMES: ${{ toJSON(github.event.pull_request.labels.*.name) }}
+          {% endraw %}
+
+      - name: Check permissions on repo branch
+        if: github.event_name == 'push'
+        run: true
diff --git a/scripts/split-tox-gh-actions/templates/check_required.jinja b/scripts/split-tox-gh-actions/templates/check_required.jinja
new file mode 100644
index 0000000000..f5aa11212f
--- /dev/null
+++ b/scripts/split-tox-gh-actions/templates/check_required.jinja
@@ -0,0 +1,21 @@
+  check_required_tests:
+    name: All {{ group }} tests passed
+    {% if "pinned" in categories and "py27" in categories %}
+    needs: [test-{{ group | replace(" ", "_") | lower }}-pinned, test-{{ group | replace(" ", "_") | lower }}-py27]
+    {% elif "pinned" in categories %}
+    needs: test-{{ group | replace(" ", "_") | lower }}-pinned
+    {% endif %}
+    # Always run this, even if a dependent job failed
+    if: always()
+    runs-on: ubuntu-20.04
+    steps:
+      - name: Check for failures
+        if: contains(needs.test-{{ lowercase_group }}-pinned.result, 'failure') || contains(needs.test-{{ lowercase_group }}-pinned.result, 'skipped')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      {% if "py27" in categories %}
+      - name: Check for 2.7 failures
+        if: contains(needs.test-{{ lowercase_group }}-py27.result, 'failure') || contains(needs.test-{{ lowercase_group }}-py27.result, 'skipped')
+        run: |
+          echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
+      {% endif %}
diff --git a/scripts/split-tox-gh-actions/templates/test_group.jinja b/scripts/split-tox-gh-actions/templates/test_group.jinja
new file mode 100644
index 0000000000..20ef8e6fb5
--- /dev/null
+++ b/scripts/split-tox-gh-actions/templates/test_group.jinja
@@ -0,0 +1,105 @@
+  test-{{ lowercase_group }}-{{ category }}:
+    name: {{ group }} ({{ category }})
+    timeout-minutes: 30
+
+    {% if needs_github_secrets %}
+    needs: check-permissions
+    {% endif %}
+
+    {% if category == "py27" %}
+    runs-on: ubuntu-20.04
+    container: python:2.7
+    {% else %}
+    runs-on: {% raw %}${{ matrix.os }}{% endraw %}
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: [{{ py_versions.get(category)|join(",") }}]
+        # python3.6 reached EOL and is no longer being supported on
+        # new versions of hosted runners on Github Actions
+        # ubuntu-20.04 is the last version that supported python3.6
+        # see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
+        os: [ubuntu-20.04]
+    {% endif %}
+
+    {% if needs_postgres %}
+    services:
+      postgres:
+        image: postgres
+        env:
+          POSTGRES_PASSWORD: sentry
+        # Set health checks to wait until postgres has started
+        options: >-
+          --health-cmd pg_isready
+          --health-interval 10s
+          --health-timeout 5s
+          --health-retries 5
+        # Maps tcp port 5432 on service container to the host
+        ports:
+          - 5432:5432
+    env:
+      SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
+      SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
+      SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
+      SENTRY_PYTHON_TEST_POSTGRES_HOST: {% if category == "py27" %}postgres{% else %}localhost{% endif %}
+    {% endif %}
+
+    steps:
+      - uses: actions/checkout@v4.1.1
+      {% if needs_github_secrets %}
+      {% raw %}
+        with:
+          ref: ${{ github.event.pull_request.head.sha || github.ref }}
+      {% endraw %}
+      {% endif %}
+      {% if category != "py27" %}
+      - uses: actions/setup-python@v5
+        with:
+          python-version: {% raw %}${{ matrix.python-version }}{% endraw %}
+        env:
+          PIP_TRUSTED_HOST: "pypi.python.org pypi.org files.pythonhosted.org"
+      {% endif %}
+      {% if needs_clickhouse %}
+      - uses: getsentry/action-clickhouse-in-ci@v1
+      {% endif %}
+
+      - name: Setup Test Env
+        run: |
+          pip install coverage "tox>=3,<4"
+          {% if needs_postgres %}
+          {% if category == "py27" %}
+          psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
+          psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+          {% else %}
+          psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
+          psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+          {% endif %}
+          {% endif %}
+
+      - name: Erase coverage
+        run: |
+          coverage erase
+
+      {% for framework in frameworks %}
+      - name: Test {{ framework }} {{ category }}
+        run: |
+          set -x # print commands that are executed
+
+          {% if category == "py27" %}
+          ./scripts/runtox.sh --exclude-latest "py2.7-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          {% elif category == "pinned" %}
+          ./scripts/runtox.sh --exclude-latest "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          {% elif category == "latest" %}
+          ./scripts/runtox.sh "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+          {% endif %}
+      {% endfor %}
+
+      - name: Generate coverage XML
+        run: |
+          coverage combine .coverage*
+          coverage xml -i
+
+      - uses: codecov/codecov-action@v4
+        with:
+          token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %}
+          files: coverage.xml
diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py
index ab5123ec64..562da90739 100644
--- a/sentry_sdk/__init__.py
+++ b/sentry_sdk/__init__.py
@@ -7,6 +7,9 @@
 
 from sentry_sdk.consts import VERSION  # noqa
 
+from sentry_sdk.crons import monitor  # noqa
+from sentry_sdk.tracing import trace  # noqa
+
 __all__ = [  # noqa
     "Hub",
     "Scope",
@@ -31,6 +34,12 @@
     "set_extra",
     "set_user",
     "set_level",
+    "set_measurement",
+    "get_current_span",
+    "get_traceparent",
+    "get_baggage",
+    "continue_trace",
+    "trace",
 ]
 
 # Initialize the debug support after everything is loaded
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 49a55392a7..38872051ff 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -1,21 +1,29 @@
 import sys
+import contextlib
+from datetime import datetime, timedelta
+from functools import wraps
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
     from typing import Tuple
     from typing import Any
     from typing import Type
     from typing import TypeVar
+    from typing import Callable
 
     T = TypeVar("T")
 
 
 PY2 = sys.version_info[0] == 2
+PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
+PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
+PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
+PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
 
 if PY2:
-    import urlparse  # noqa
+    import urlparse
 
     text_type = unicode  # noqa
 
@@ -23,6 +31,21 @@
     number_types = (int, long, float)  # noqa
     int_types = (int, long)  # noqa
     iteritems = lambda x: x.iteritems()  # noqa: B301
+    binary_sequence_types = (bytearray, memoryview)
+
+    def datetime_utcnow():
+        # type: () -> datetime
+        return datetime.utcnow()
+
+    def utc_from_timestamp(timestamp):
+        # type: (float) -> datetime
+        return datetime.utcfromtimestamp(timestamp)
+
+    def duration_in_milliseconds(delta):
+        # type: (timedelta) -> float
+        seconds = delta.days * 24 * 60 * 60 + delta.seconds
+        milliseconds = seconds * 1000 + float(delta.microseconds) / 1000
+        return milliseconds
 
     def implements_str(cls):
         # type: (T) -> T
@@ -30,17 +53,67 @@ def implements_str(cls):
         cls.__str__ = lambda x: unicode(x).encode("utf-8")  # noqa
         return cls
 
+    # The line below is written as an "exec" because it triggers a syntax error in Python 3
     exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
 
+    def contextmanager(func):
+        # type: (Callable) -> Callable
+        """
+        Decorator which creates a contextmanager that can also be used as a
+        decorator, similar to how the built-in contextlib.contextmanager
+        function works in Python 3.2+.
+        """
+        contextmanager_func = contextlib.contextmanager(func)
+
+        @wraps(func)
+        class DecoratorContextManager:
+            def __init__(self, *args, **kwargs):
+                # type: (...) -> None
+                self.the_contextmanager = contextmanager_func(*args, **kwargs)
+
+            def __enter__(self):
+                # type: () -> None
+                self.the_contextmanager.__enter__()
+
+            def __exit__(self, *args, **kwargs):
+                # type: (...) -> None
+                self.the_contextmanager.__exit__(*args, **kwargs)
+
+            def __call__(self, decorated_func):
+                # type: (Callable) -> Callable[...]
+                @wraps(decorated_func)
+                def when_called(*args, **kwargs):
+                    # type: (...) -> Any
+                    with self.the_contextmanager:
+                        return_val = decorated_func(*args, **kwargs)
+                    return return_val
+
+                return when_called
+
+        return DecoratorContextManager
 
 else:
+    from datetime import timezone
     import urllib.parse as urlparse  # noqa
 
     text_type = str
     string_types = (text_type,)  # type: Tuple[type]
     number_types = (int, float)  # type: Tuple[type, type]
-    int_types = (int,)  # noqa
+    int_types = (int,)
     iteritems = lambda x: x.items()
+    binary_sequence_types = (bytes, bytearray, memoryview)
+
+    def datetime_utcnow():
+        # type: () -> datetime
+        return datetime.now(timezone.utc)
+
+    def utc_from_timestamp(timestamp):
+        # type: (float) -> datetime
+        return datetime.fromtimestamp(timestamp, timezone.utc)
+
+    def duration_in_milliseconds(delta):
+        # type: (timedelta) -> float
+        return delta / timedelta(milliseconds=1)
 
     def implements_str(x):
         # type: (T) -> T
@@ -53,6 +126,9 @@ def reraise(tp, value, tb=None):
             raise value.with_traceback(tb)
         raise value
 
+    # contextlib.contextmanager already can be used as decorator in Python 3.2+
+    contextmanager = contextlib.contextmanager
+
 
 def with_metaclass(meta, *bases):
     # type: (Any, *Any) -> Any
@@ -64,26 +140,74 @@ def __new__(metacls, name, this_bases, d):
     return type.__new__(MetaClass, "temporary_class", (), {})
 
 
-def check_thread_support():
-    # type: () -> None
+def check_uwsgi_thread_support():
+    # type: () -> bool
+    # We check two things here:
+    #
+    # 1. uWSGI doesn't run in threaded mode by default -- issue a warning if
+    #    that's the case.
+    #
+    # 2. Additionally, if uWSGI is running in preforking mode (default), it needs
+    #    the --py-call-uwsgi-fork-hooks option for the SDK to work properly. This
+    #    is because any background threads spawned before the main process is
+    #    forked are NOT CLEANED UP IN THE CHILDREN BY DEFAULT even if
+    #    --enable-threads is on. One has to explicitly provide
+    #    --py-call-uwsgi-fork-hooks to force uWSGI to run regular cpython
+    #    after-fork hooks that take care of cleaning up stale thread data.
     try:
         from uwsgi import opt  # type: ignore
     except ImportError:
-        return
+        return True
+
+    from sentry_sdk.consts import FALSE_VALUES
+
+    def enabled(option):
+        # type: (str) -> bool
+        value = opt.get(option, False)
+        if isinstance(value, bool):
+            return value
+
+        if isinstance(value, bytes):
+            try:
+                value = value.decode()
+            except Exception:
+                pass
+
+        return value and str(value).lower() not in FALSE_VALUES
 
     # When `threads` is passed in as a uwsgi option,
     # `enable-threads` is implied on.
-    if "threads" in opt:
-        return
+    threads_enabled = "threads" in opt or enabled("enable-threads")
+    fork_hooks_on = enabled("py-call-uwsgi-fork-hooks")
+    lazy_mode = enabled("lazy-apps") or enabled("lazy")
 
-    if str(opt.get("enable-threads", "0")).lower() in ("false", "off", "no", "0"):
+    if lazy_mode and not threads_enabled:
         from warnings import warn
 
         warn(
             Warning(
-                "We detected the use of uwsgi with disabled threads.  "
-                "This will cause issues with the transport you are "
-                "trying to use.  Please enable threading for uwsgi.  "
-                '(Add the "enable-threads" flag).'
+                "IMPORTANT: "
+                "We detected the use of uWSGI without thread support. "
+                "This might lead to unexpected issues. "
+                'Please run uWSGI with "--enable-threads" for full support.'
             )
         )
+
+        return False
+
+    elif not lazy_mode and (not threads_enabled or not fork_hooks_on):
+        from warnings import warn
+
+        warn(
+            Warning(
+                "IMPORTANT: "
+                "We detected the use of uWSGI in preforking mode without "
+                "thread support. This might lead to crashing workers. "
+                'Please run uWSGI with both "--enable-threads" and '
+                '"--py-call-uwsgi-fork-hooks" for full support.'
+            )
+        )
+
+        return False
+
+    return True
diff --git a/sentry_sdk/_functools.py b/sentry_sdk/_functools.py
index 8dcf79caaa..6bcc85f3b4 100644
--- a/sentry_sdk/_functools.py
+++ b/sentry_sdk/_functools.py
@@ -1,13 +1,68 @@
 """
 A backport of Python 3 functools to Python 2/3. The only important change
 we rely upon is that `update_wrapper` handles AttributeError gracefully.
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
 """
 
 from functools import partial
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
 
diff --git a/sentry_sdk/_lru_cache.py b/sentry_sdk/_lru_cache.py
new file mode 100644
index 0000000000..91cf55d09a
--- /dev/null
+++ b/sentry_sdk/_lru_cache.py
@@ -0,0 +1,156 @@
+"""
+A fork of Python 3.6's stdlib lru_cache (found in Python's 'cpython/Lib/functools.py')
+adapted into a data structure for single threaded uses.
+
+https://github.com/python/cpython/blob/v3.6.12/Lib/functools.py
+
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+"""
+
+SENTINEL = object()
+
+
+# aliases to the entries in a node
+PREV = 0
+NEXT = 1
+KEY = 2
+VALUE = 3
+
+
+class LRUCache(object):
+    def __init__(self, max_size):
+        assert max_size > 0
+
+        self.max_size = max_size
+        self.full = False
+
+        self.cache = {}
+
+        # root of the circularly linked list to keep track of
+        # the least recently used key
+        self.root = []  # type: ignore
+        # the node looks like [PREV, NEXT, KEY, VALUE]
+        self.root[:] = [self.root, self.root, None, None]
+
+        self.hits = self.misses = 0
+
+    def set(self, key, value):
+        link = self.cache.get(key, SENTINEL)
+
+        if link is not SENTINEL:
+            # have to move the node to the front of the linked list
+            link_prev, link_next, _key, _value = link
+
+            # first remove the node from the lsnked list
+            link_prev[NEXT] = link_next
+            link_next[PREV] = link_prev
+
+            # insert the node between the root and the last
+            last = self.root[PREV]
+            last[NEXT] = self.root[PREV] = link
+            link[PREV] = last
+            link[NEXT] = self.root
+
+            # update the value
+            link[VALUE] = value
+
+        elif self.full:
+            # reuse the root node, so update its key/value
+            old_root = self.root
+            old_root[KEY] = key
+            old_root[VALUE] = value
+
+            self.root = old_root[NEXT]
+            old_key = self.root[KEY]
+
+            self.root[KEY] = self.root[VALUE] = None
+
+            del self.cache[old_key]
+
+            self.cache[key] = old_root
+
+        else:
+            # insert new node after last
+            last = self.root[PREV]
+            link = [last, self.root, key, value]
+            last[NEXT] = self.root[PREV] = self.cache[key] = link
+            self.full = len(self.cache) >= self.max_size
+
+    def get(self, key, default=None):
+        link = self.cache.get(key, SENTINEL)
+
+        if link is SENTINEL:
+            self.misses += 1
+            return default
+
+        # have to move the node to the front of the linked list
+        link_prev, link_next, _key, _value = link
+
+        # first remove the node from the lsnked list
+        link_prev[NEXT] = link_next
+        link_next[PREV] = link_prev
+
+        # insert the node between the root and the last
+        last = self.root[PREV]
+        last[NEXT] = self.root[PREV] = link
+        link[PREV] = last
+        link[NEXT] = self.root
+
+        self.hits += 1
+
+        return link[VALUE]
diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py
index e368da2229..129b6e58a6 100644
--- a/sentry_sdk/_queue.py
+++ b/sentry_sdk/_queue.py
@@ -1,14 +1,74 @@
 """
-A fork of Python 3.6's stdlib queue with Lock swapped out for RLock to avoid a
-deadlock while garbage collecting.
+A fork of Python 3.6's stdlib queue (found in Pythons 'cpython/Lib/queue.py')
+with Lock swapped out for RLock to avoid a deadlock while garbage collecting.
 
-See
+https://github.com/python/cpython/blob/v3.6.12/Lib/queue.py
+
+
+See also
 https://codewithoutrules.com/2017/08/16/concurrency-python/
 https://bugs.python.org/issue14976
 https://github.com/sqlalchemy/sqlalchemy/blob/4eb747b61f0c1b1c25bdee3856d7195d10a0c227/lib/sqlalchemy/queue.py#L1
 
 We also vendor the code to evade eventlet's broken monkeypatching, see
 https://github.com/getsentry/sentry-python/pull/484
+
+
+Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+
+All Rights Reserved
+
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
+All Rights Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis.  PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED.  BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee.  This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
 """
 
 import threading
@@ -16,20 +76,20 @@
 from collections import deque
 from time import time
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
-__all__ = ["Empty", "Full", "Queue"]
+__all__ = ["EmptyError", "FullError", "Queue"]
 
 
-class Empty(Exception):
+class EmptyError(Exception):
     "Exception raised by Queue.get(block=0)/get_nowait()."
     pass
 
 
-class Full(Exception):
+class FullError(Exception):
     "Exception raised by Queue.put(block=0)/put_nowait()."
     pass
 
@@ -134,16 +194,16 @@ def put(self, item, block=True, timeout=None):
         If optional args 'block' is true and 'timeout' is None (the default),
         block if necessary until a free slot is available. If 'timeout' is
         a non-negative number, it blocks at most 'timeout' seconds and raises
-        the Full exception if no free slot was available within that time.
+        the FullError exception if no free slot was available within that time.
         Otherwise ('block' is false), put an item on the queue if a free slot
-        is immediately available, else raise the Full exception ('timeout'
+        is immediately available, else raise the FullError exception ('timeout'
         is ignored in that case).
         """
         with self.not_full:
             if self.maxsize > 0:
                 if not block:
                     if self._qsize() >= self.maxsize:
-                        raise Full()
+                        raise FullError()
                 elif timeout is None:
                     while self._qsize() >= self.maxsize:
                         self.not_full.wait()
@@ -154,7 +214,7 @@ def put(self, item, block=True, timeout=None):
                     while self._qsize() >= self.maxsize:
                         remaining = endtime - time()
                         if remaining <= 0.0:
-                            raise Full
+                            raise FullError()
                         self.not_full.wait(remaining)
             self._put(item)
             self.unfinished_tasks += 1
@@ -166,15 +226,15 @@ def get(self, block=True, timeout=None):
         If optional args 'block' is true and 'timeout' is None (the default),
         block if necessary until an item is available. If 'timeout' is
         a non-negative number, it blocks at most 'timeout' seconds and raises
-        the Empty exception if no item was available within that time.
+        the EmptyError exception if no item was available within that time.
         Otherwise ('block' is false), return an item if one is immediately
-        available, else raise the Empty exception ('timeout' is ignored
+        available, else raise the EmptyError exception ('timeout' is ignored
         in that case).
         """
         with self.not_empty:
             if not block:
                 if not self._qsize():
-                    raise Empty()
+                    raise EmptyError()
             elif timeout is None:
                 while not self._qsize():
                     self.not_empty.wait()
@@ -185,7 +245,7 @@ def get(self, block=True, timeout=None):
                 while not self._qsize():
                     remaining = endtime - time()
                     if remaining <= 0.0:
-                        raise Empty()
+                        raise EmptyError()
                     self.not_empty.wait(remaining)
             item = self._get()
             self.not_full.notify()
@@ -195,7 +255,7 @@ def put_nowait(self, item):
         """Put an item into the queue without blocking.
 
         Only enqueue the item if a free slot is immediately available.
-        Otherwise raise the Full exception.
+        Otherwise raise the FullError exception.
         """
         return self.put(item, block=False)
 
@@ -203,7 +263,7 @@ def get_nowait(self):
         """Remove and return an item from the queue without blocking.
 
         Only get an item if one is immediately available. Otherwise
-        raise the Empty exception.
+        raise the EmptyError exception.
         """
         return self.get(block=False)
 
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 59970ad60a..368db17138 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -1,25 +1,91 @@
 try:
-    from typing import TYPE_CHECKING as MYPY
+    from typing import TYPE_CHECKING
 except ImportError:
-    MYPY = False
+    TYPE_CHECKING = False
 
 
-if MYPY:
+# Re-exported for compat, since code out there in the wild might use this variable.
+MYPY = TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from collections.abc import MutableMapping
+
+    from datetime import datetime
+
     from types import TracebackType
     from typing import Any
     from typing import Callable
     from typing import Dict
+    from typing import List
+    from typing import Mapping
     from typing import Optional
     from typing import Tuple
     from typing import Type
     from typing import Union
-    from typing_extensions import Literal
+    from typing_extensions import Literal, TypedDict
+
+    # "critical" is an alias of "fatal" recognized by Relay
+    LogLevelStr = Literal["fatal", "critical", "error", "warning", "info", "debug"]
+
+    Event = TypedDict(
+        "Event",
+        {
+            "breadcrumbs": dict[
+                Literal["values"], list[dict[str, Any]]
+            ],  # TODO: We can expand on this type
+            "check_in_id": str,
+            "contexts": dict[str, dict[str, object]],
+            "dist": str,
+            "duration": Optional[float],
+            "environment": str,
+            "errors": list[dict[str, Any]],  # TODO: We can expand on this type
+            "event_id": str,
+            "exception": dict[
+                Literal["values"], list[dict[str, Any]]
+            ],  # TODO: We can expand on this type
+            "extra": MutableMapping[str, object],
+            "fingerprint": list[str],
+            "level": LogLevelStr,
+            "logentry": Mapping[str, object],
+            "logger": str,
+            "measurements": dict[str, object],
+            "message": str,
+            "modules": dict[str, str],
+            "monitor_config": Mapping[str, object],
+            "monitor_slug": Optional[str],
+            "platform": Literal["python"],
+            "profile": object,  # Should be sentry_sdk.profiler.Profile, but we can't import that here due to circular imports
+            "release": str,
+            "request": dict[str, object],
+            "sdk": Mapping[str, object],
+            "server_name": str,
+            "spans": list[dict[str, object]],
+            "stacktrace": dict[
+                str, object
+            ],  # We access this key in the code, but I am unsure whether we ever set it
+            "start_timestamp": datetime,
+            "status": Optional[str],
+            "tags": MutableMapping[
+                str, str
+            ],  # Tags must be less than 200 characters each
+            "threads": dict[
+                Literal["values"], list[dict[str, Any]]
+            ],  # TODO: We can expand on this type
+            "timestamp": Optional[datetime],  # Must be set before sending the event
+            "transaction": str,
+            "transaction_info": Mapping[str, Any],  # TODO: We can expand on this type
+            "type": Literal["check_in", "transaction"],
+            "user": dict[str, object],
+            "_metrics_summary": dict[str, object],
+        },
+        total=False,
+    )
 
     ExcInfo = Tuple[
         Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
     ]
 
-    Event = Dict[str, Any]
     Hint = Dict[str, Any]
 
     Breadcrumb = Dict[str, Any]
@@ -30,6 +96,7 @@
     EventProcessor = Callable[[Event, Hint], Optional[Event]]
     ErrorProcessor = Callable[[Event, ExcInfo], Optional[Event]]
     BreadcrumbProcessor = Callable[[Breadcrumb, BreadcrumbHint], Optional[Breadcrumb]]
+    TransactionProcessor = Callable[[Event, Hint], Optional[Event]]
 
     TracesSampler = Callable[[SamplingContext], Union[float, int, bool]]
 
@@ -45,6 +112,9 @@
         "attachment",
         "session",
         "internal",
+        "profile",
+        "metric_bucket",
+        "monitor",
     ]
     SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
     EndpointType = Literal["store", "envelope"]
@@ -79,3 +149,66 @@
 
     FractionUnit = Literal["ratio", "percent"]
     MeasurementUnit = Union[DurationUnit, InformationUnit, FractionUnit, str]
+
+    ProfilerMode = Literal["sleep", "thread", "gevent", "unknown"]
+
+    # Type of the metric.
+    MetricType = Literal["d", "s", "g", "c"]
+
+    # Value of the metric.
+    MetricValue = Union[int, float, str]
+
+    # Internal representation of tags as a tuple of tuples (this is done in order to allow for the same key to exist
+    # multiple times).
+    MetricTagsInternal = Tuple[Tuple[str, str], ...]
+
+    # External representation of tags as a dictionary.
+    MetricTagValue = Union[
+        str,
+        int,
+        float,
+        None,
+        List[Union[int, str, float, None]],
+        Tuple[Union[int, str, float, None], ...],
+    ]
+    MetricTags = Mapping[str, MetricTagValue]
+
+    # Value inside the generator for the metric value.
+    FlushedMetricValue = Union[int, float]
+
+    BucketKey = Tuple[MetricType, str, MeasurementUnit, MetricTagsInternal]
+    MetricMetaKey = Tuple[MetricType, str, MeasurementUnit]
+
+    MonitorConfigScheduleType = Literal["crontab", "interval"]
+    MonitorConfigScheduleUnit = Literal[
+        "year",
+        "month",
+        "week",
+        "day",
+        "hour",
+        "minute",
+        "second",  # not supported in Sentry and will result in a warning
+    ]
+
+    MonitorConfigSchedule = TypedDict(
+        "MonitorConfigSchedule",
+        {
+            "type": MonitorConfigScheduleType,
+            "value": Union[int, str],
+            "unit": MonitorConfigScheduleUnit,
+        },
+        total=False,
+    )
+
+    MonitorConfig = TypedDict(
+        "MonitorConfig",
+        {
+            "schedule": MonitorConfigSchedule,
+            "timezone": str,
+            "checkin_margin": int,
+            "max_runtime": int,
+            "failure_issue_threshold": int,
+            "recovery_threshold": int,
+        },
+        total=False,
+    )
diff --git a/sentry_sdk/_werkzeug.py b/sentry_sdk/_werkzeug.py
new file mode 100644
index 0000000000..197c5c19b1
--- /dev/null
+++ b/sentry_sdk/_werkzeug.py
@@ -0,0 +1,100 @@
+"""
+Copyright (c) 2007 by the Pallets team.
+
+Some rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright notice,
+  this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright
+  notice, this list of conditions and the following disclaimer in the
+  documentation and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND
+CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
+BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
+NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+"""
+
+from sentry_sdk._compat import iteritems
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Dict
+    from typing import Iterator
+    from typing import Tuple
+
+
+#
+# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
+# https://github.com/pallets/werkzeug/blob/0.14.1/werkzeug/datastructures.py#L1361
+#
+# We need this function because Django does not give us a "pure" http header
+# dict. So we might as well use it for all WSGI integrations.
+#
+def _get_headers(environ):
+    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
+    """
+    Returns only proper HTTP headers.
+    """
+    for key, value in iteritems(environ):
+        key = str(key)
+        if key.startswith("HTTP_") and key not in (
+            "HTTP_CONTENT_TYPE",
+            "HTTP_CONTENT_LENGTH",
+        ):
+            yield key[5:].replace("_", "-").title(), value
+        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
+            yield key.replace("_", "-").title(), value
+
+
+#
+# `get_host` comes from `werkzeug.wsgi.get_host`
+# https://github.com/pallets/werkzeug/blob/1.0.1/src/werkzeug/wsgi.py#L145
+#
+def get_host(environ, use_x_forwarded_for=False):
+    # type: (Dict[str, str], bool) -> str
+    """
+    Return the host for the given WSGI environment.
+    """
+    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
+        rv = environ["HTTP_X_FORWARDED_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("HTTP_HOST"):
+        rv = environ["HTTP_HOST"]
+        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
+            rv = rv[:-3]
+        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
+            rv = rv[:-4]
+    elif environ.get("SERVER_NAME"):
+        rv = environ["SERVER_NAME"]
+        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
+            ("https", "443"),
+            ("http", "80"),
+        ):
+            rv += ":" + environ["SERVER_PORT"]
+    else:
+        # In spite of the WSGI spec, SERVER_NAME might not be present.
+        rv = "unknown"
+
+    return rv
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index f4a44e4500..3148c43f1a 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -1,11 +1,11 @@
 import inspect
 
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.hub import Hub
 from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import NoOpSpan, Transaction
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -15,8 +15,16 @@
     from typing import ContextManager
     from typing import Union
 
-    from sentry_sdk._types import Event, Hint, Breadcrumb, BreadcrumbHint, ExcInfo
-    from sentry_sdk.tracing import Span, Transaction
+    from sentry_sdk._types import (
+        Event,
+        Hint,
+        Breadcrumb,
+        BreadcrumbHint,
+        ExcInfo,
+        MeasurementUnit,
+        LogLevelStr,
+    )
+    from sentry_sdk.tracing import Span
 
     T = TypeVar("T")
     F = TypeVar("F", bound=Callable[..., Any])
@@ -44,6 +52,11 @@ def overload(x):
     "set_extra",
     "set_user",
     "set_level",
+    "set_measurement",
+    "get_current_span",
+    "get_traceparent",
+    "get_baggage",
+    "continue_trace",
 ]
 
 
@@ -70,31 +83,31 @@ def capture_event(
     event,  # type: Event
     hint=None,  # type: Optional[Hint]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Any
+    **scope_kwargs  # type: Any
 ):
     # type: (...) -> Optional[str]
-    return Hub.current.capture_event(event, hint, scope=scope, **scope_args)
+    return Hub.current.capture_event(event, hint, scope=scope, **scope_kwargs)
 
 
 @hubmethod
 def capture_message(
     message,  # type: str
-    level=None,  # type: Optional[str]
+    level=None,  # type: Optional[LogLevelStr]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Any
+    **scope_kwargs  # type: Any
 ):
     # type: (...) -> Optional[str]
-    return Hub.current.capture_message(message, level, scope=scope, **scope_args)
+    return Hub.current.capture_message(message, level, scope=scope, **scope_kwargs)
 
 
 @hubmethod
 def capture_exception(
     error=None,  # type: Optional[Union[BaseException, ExcInfo]]
     scope=None,  # type: Optional[Any]
-    **scope_args  # type: Any
+    **scope_kwargs  # type: Any
 ):
     # type: (...) -> Optional[str]
-    return Hub.current.capture_exception(error, scope=scope, **scope_args)
+    return Hub.current.capture_exception(error, scope=scope, **scope_kwargs)
 
 
 @hubmethod
@@ -108,7 +121,7 @@ def add_breadcrumb(
 
 
 @overload
-def configure_scope():  # noqa: F811
+def configure_scope():
     # type: () -> ContextManager[Scope]
     pass
 
@@ -130,7 +143,7 @@ def configure_scope(  # noqa: F811
 
 
 @overload
-def push_scope():  # noqa: F811
+def push_scope():
     # type: () -> ContextManager[Scope]
     pass
 
@@ -151,33 +164,33 @@ def push_scope(  # noqa: F811
     return Hub.current.push_scope(callback)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_tag(key, value):
     # type: (str, Any) -> None
     return Hub.current.scope.set_tag(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_context(key, value):
     # type: (str, Dict[str, Any]) -> None
     return Hub.current.scope.set_context(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_extra(key, value):
     # type: (str, Any) -> None
     return Hub.current.scope.set_extra(key, value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_user(value):
     # type: (Optional[Dict[str, Any]]) -> None
     return Hub.current.scope.set_user(value)
 
 
-@scopemethod  # noqa
+@scopemethod
 def set_level(value):
-    # type: (str) -> None
+    # type: (LogLevelStr) -> None
     return Hub.current.scope.set_level(value)
 
 
@@ -193,6 +206,7 @@ def flush(
 @hubmethod
 def last_event_id():
     # type: () -> Optional[str]
+
     return Hub.current.last_event_id()
 
 
@@ -210,5 +224,48 @@ def start_transaction(
     transaction=None,  # type: Optional[Transaction]
     **kwargs  # type: Any
 ):
-    # type: (...) -> Transaction
+    # type: (...) -> Union[Transaction, NoOpSpan]
     return Hub.current.start_transaction(transaction, **kwargs)
+
+
+def set_measurement(name, value, unit=""):
+    # type: (str, float, MeasurementUnit) -> None
+    transaction = Hub.current.scope.transaction
+    if transaction is not None:
+        transaction.set_measurement(name, value, unit)
+
+
+def get_current_span(hub=None):
+    # type: (Optional[Hub]) -> Optional[Span]
+    """
+    Returns the currently active span if there is one running, otherwise `None`
+    """
+    if hub is None:
+        hub = Hub.current
+
+    current_span = hub.scope.span
+    return current_span
+
+
+def get_traceparent():
+    # type: () -> Optional[str]
+    """
+    Returns the traceparent either from the active span or from the scope.
+    """
+    return Hub.current.get_traceparent()
+
+
+def get_baggage():
+    # type: () -> Optional[str]
+    """
+    Returns Baggage either from the active span or from the scope.
+    """
+    return Hub.current.get_baggage()
+
+
+def continue_trace(environ_or_headers, op=None, name=None, source=None):
+    # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
+    """
+    Sets the propagation context from environment or headers and returns a transaction.
+    """
+    return Hub.current.continue_trace(environ_or_headers, op, name, source)
diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py
index b7b6b0b45b..c15afd447b 100644
--- a/sentry_sdk/attachments.py
+++ b/sentry_sdk/attachments.py
@@ -1,10 +1,10 @@
 import os
 import mimetypes
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.envelope import Item, PayloadRef
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional, Union, Callable
 
 
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 63a1205f57..296de71804 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -1,37 +1,65 @@
+try:
+    from collections.abc import Mapping
+except ImportError:
+    from collections import Mapping  # type: ignore[attr-defined]
+
+from importlib import import_module
 import os
 import uuid
 import random
-from datetime import datetime
 import socket
 
-from sentry_sdk._compat import string_types, text_type, iteritems
+from sentry_sdk._compat import (
+    PY37,
+    datetime_utcnow,
+    string_types,
+    text_type,
+    iteritems,
+    check_uwsgi_thread_support,
+)
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     current_stacktrace,
     disable_capture_event,
     format_timestamp,
+    get_sdk_name,
     get_type_name,
     get_default_release,
     handle_in_app,
+    is_gevent,
     logger,
 )
 from sentry_sdk.serializer import serialize
-from sentry_sdk.transport import make_transport
-from sentry_sdk.consts import DEFAULT_OPTIONS, SDK_INFO, ClientConstructor
-from sentry_sdk.integrations import setup_integrations
+from sentry_sdk.tracing import trace, has_tracing_enabled
+from sentry_sdk.transport import HttpTransport, make_transport
+from sentry_sdk.consts import (
+    DEFAULT_MAX_VALUE_LENGTH,
+    DEFAULT_OPTIONS,
+    INSTRUMENTER,
+    VERSION,
+    ClientConstructor,
+)
+from sentry_sdk.integrations import _DEFAULT_INTEGRATIONS, setup_integrations
 from sentry_sdk.utils import ContextVar
 from sentry_sdk.sessions import SessionFlusher
 from sentry_sdk.envelope import Envelope
-from sentry_sdk.tracing_utils import has_tracestate_enabled, reinflate_tracestate
+from sentry_sdk.profiler import has_profiling_enabled, Profile, setup_profiler
+from sentry_sdk.scrubber import EventScrubber
+from sentry_sdk.monitor import Monitor
+from sentry_sdk.spotlight import setup_spotlight
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
     from typing import Optional
+    from typing import Sequence
+    from typing import Type
+    from typing import Union
 
+    from sentry_sdk.integrations import Integration
     from sentry_sdk.scope import Scope
     from sentry_sdk._types import Event, Hint
     from sentry_sdk.session import Session
@@ -40,6 +68,13 @@
 _client_init_debug = ContextVar("client_init_debug")
 
 
+SDK_INFO = {
+    "name": "sentry.python",  # SDK name will be overridden after integrations have been loaded with sentry_sdk.integrations.setup_integrations()
+    "version": VERSION,
+    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
+}
+
+
 def _get_options(*args, **kwargs):
     # type: (*Optional[str], **Any) -> Dict[str, Any]
     if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
@@ -58,7 +93,28 @@ def _get_options(*args, **kwargs):
 
     for key, value in iteritems(options):
         if key not in rv:
+            # Option "with_locals" was renamed to "include_local_variables"
+            if key == "with_locals":
+                msg = (
+                    "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. "
+                    "Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+                )
+                logger.warning(msg)
+                rv["include_local_variables"] = value
+                continue
+
+            # Option "request_bodies" was renamed to "max_request_body_size"
+            if key == "request_bodies":
+                msg = (
+                    "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. "
+                    "Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
+                )
+                logger.warning(msg)
+                rv["max_request_body_size"] = value
+                continue
+
             raise TypeError("Unknown option %r" % (key,))
+
         rv[key] = value
 
     if rv["dsn"] is None:
@@ -70,22 +126,63 @@ def _get_options(*args, **kwargs):
     if rv["environment"] is None:
         rv["environment"] = os.environ.get("SENTRY_ENVIRONMENT") or "production"
 
+    if rv["debug"] is None:
+        rv["debug"] = os.environ.get("SENTRY_DEBUG", "False").lower() in (
+            "true",
+            "1",
+            "t",
+        )
+
     if rv["server_name"] is None and hasattr(socket, "gethostname"):
         rv["server_name"] = socket.gethostname()
 
+    if rv["instrumenter"] is None:
+        rv["instrumenter"] = INSTRUMENTER.SENTRY
+
+    if rv["project_root"] is None:
+        try:
+            project_root = os.getcwd()
+        except Exception:
+            project_root = None
+
+        rv["project_root"] = project_root
+
+    if rv["enable_tracing"] is True and rv["traces_sample_rate"] is None:
+        rv["traces_sample_rate"] = 1.0
+
+    if rv["event_scrubber"] is None:
+        rv["event_scrubber"] = EventScrubber()
+
+    if rv["socket_options"] and not isinstance(rv["socket_options"], list):
+        logger.warning(
+            "Ignoring socket_options because of unexpected format. See urllib3.HTTPConnection.socket_options for the expected format."
+        )
+        rv["socket_options"] = None
+
     return rv
 
 
+try:
+    # Python 3.6+
+    module_not_found_error = ModuleNotFoundError
+except Exception:
+    # Older Python versions
+    module_not_found_error = ImportError  # type: ignore
+
+
 class _Client(object):
     """The client is internally responsible for capturing the events and
     forwarding them to sentry through the configured transport.  It takes
     the client options as keyword arguments and optionally the DSN as first
     argument.
+
+    Alias of :py:class:`Client`. (Was created for better intelisense support)
     """
 
     def __init__(self, *args, **kwargs):
         # type: (*Any, **Any) -> None
         self.options = get_options(*args, **kwargs)  # type: Dict[str, Any]
+
         self._init_impl()
 
     def __getstate__(self):
@@ -97,6 +194,58 @@ def __setstate__(self, state):
         self.options = state["options"]
         self._init_impl()
 
+    def _setup_instrumentation(self, functions_to_trace):
+        # type: (Sequence[Dict[str, str]]) -> None
+        """
+        Instruments the functions given in the list `functions_to_trace` with the `@sentry_sdk.tracing.trace` decorator.
+        """
+        for function in functions_to_trace:
+            class_name = None
+            function_qualname = function["qualified_name"]
+            module_name, function_name = function_qualname.rsplit(".", 1)
+
+            try:
+                # Try to import module and function
+                # ex: "mymodule.submodule.funcname"
+
+                module_obj = import_module(module_name)
+                function_obj = getattr(module_obj, function_name)
+                setattr(module_obj, function_name, trace(function_obj))
+                logger.debug("Enabled tracing for %s", function_qualname)
+
+            except module_not_found_error:
+                try:
+                    # Try to import a class
+                    # ex: "mymodule.submodule.MyClassName.member_function"
+
+                    module_name, class_name = module_name.rsplit(".", 1)
+                    module_obj = import_module(module_name)
+                    class_obj = getattr(module_obj, class_name)
+                    function_obj = getattr(class_obj, function_name)
+                    function_type = type(class_obj.__dict__[function_name])
+                    traced_function = trace(function_obj)
+
+                    if function_type in (staticmethod, classmethod):
+                        traced_function = staticmethod(traced_function)
+
+                    setattr(class_obj, function_name, traced_function)
+                    setattr(module_obj, class_name, class_obj)
+                    logger.debug("Enabled tracing for %s", function_qualname)
+
+                except Exception as e:
+                    logger.warning(
+                        "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
+                        function_qualname,
+                        e,
+                    )
+
+            except Exception as e:
+                logger.warning(
+                    "Can not enable tracing for '%s'. (%s) Please check your `functions_to_trace` parameter.",
+                    function_qualname,
+                    e,
+                )
+
     def _init_impl(self):
         # type: () -> None
         old_debug = _client_init_debug.get(False)
@@ -110,16 +259,50 @@ def _capture_envelope(envelope):
             _client_init_debug.set(self.options["debug"])
             self.transport = make_transport(self.options)
 
+            self.monitor = None
+            if self.transport:
+                if self.options["enable_backpressure_handling"]:
+                    self.monitor = Monitor(self.transport)
+
             self.session_flusher = SessionFlusher(capture_func=_capture_envelope)
 
-            request_bodies = ("always", "never", "small", "medium")
-            if self.options["request_bodies"] not in request_bodies:
+            self.metrics_aggregator = None  # type: Optional[MetricsAggregator]
+            experiments = self.options.get("_experiments", {})
+            if experiments.get("enable_metrics", True):
+                # Context vars are not working correctly on Python <=3.6
+                # with gevent.
+                metrics_supported = not is_gevent() or PY37
+                if metrics_supported:
+                    from sentry_sdk.metrics import MetricsAggregator
+
+                    self.metrics_aggregator = MetricsAggregator(
+                        capture_func=_capture_envelope,
+                        enable_code_locations=bool(
+                            experiments.get("metric_code_locations", True)
+                        ),
+                    )
+                else:
+                    logger.info(
+                        "Metrics not supported on Python 3.6 and lower with gevent."
+                    )
+
+            max_request_body_size = ("always", "never", "small", "medium")
+            if self.options["max_request_body_size"] not in max_request_body_size:
                 raise ValueError(
-                    "Invalid value for request_bodies. Must be one of {}".format(
-                        request_bodies
+                    "Invalid value for max_request_body_size. Must be one of {}".format(
+                        max_request_body_size
                     )
                 )
 
+            if self.options["_experiments"].get("otel_powered_performance", False):
+                logger.debug(
+                    "[OTel] Enabling experimental OTel-powered performance monitoring."
+                )
+                self.options["instrumenter"] = INSTRUMENTER.OTEL
+                _DEFAULT_INTEGRATIONS.append(
+                    "sentry_sdk.integrations.opentelemetry.integration.OpenTelemetryIntegration",
+                )
+
             self.integrations = setup_integrations(
                 self.options["integrations"],
                 with_defaults=self.options["default_integrations"],
@@ -127,9 +310,36 @@ def _capture_envelope(envelope):
                     "auto_enabling_integrations"
                 ],
             )
+
+            self.spotlight = None
+            if self.options.get("spotlight"):
+                self.spotlight = setup_spotlight(self.options)
+
+            sdk_name = get_sdk_name(list(self.integrations.keys()))
+            SDK_INFO["name"] = sdk_name
+            logger.debug("Setting SDK name to '%s'", sdk_name)
+
+            if has_profiling_enabled(self.options):
+                try:
+                    setup_profiler(self.options)
+                except Exception as e:
+                    logger.debug("Can not set up profiler. (%s)", e)
+
         finally:
             _client_init_debug.set(old_debug)
 
+        self._setup_instrumentation(self.options.get("functions_to_trace", []))
+
+        if (
+            self.monitor
+            or self.metrics_aggregator
+            or has_profiling_enabled(self.options)
+            or isinstance(self.transport, HttpTransport)
+        ):
+            # If we have anything on that could spawn a background thread, we
+            # need to check if it's safe to use them.
+            check_uwsgi_thread_support()
+
     @property
     def dsn(self):
         # type: () -> Optional[str]
@@ -145,11 +355,11 @@ def _prepare_event(
         # type: (...) -> Optional[Event]
 
         if event.get("timestamp") is None:
-            event["timestamp"] = datetime.utcnow()
+            event["timestamp"] = datetime_utcnow()
 
         if scope is not None:
             is_transaction = event.get("type") == "transaction"
-            event_ = scope.apply_to_event(event, hint)
+            event_ = scope.apply_to_event(event, hint, self.options)
 
             # one of the event/error processors returned None
             if event_ is None:
@@ -173,7 +383,12 @@ def _prepare_event(
                     "values": [
                         {
                             "stacktrace": current_stacktrace(
-                                self.options["with_locals"]
+                                include_local_variables=self.options.get(
+                                    "include_local_variables", True
+                                ),
+                                max_value_length=self.options.get(
+                                    "max_value_length", DEFAULT_MAX_VALUE_LENGTH
+                                ),
                             ),
                             "crashed": False,
                             "current": True,
@@ -183,7 +398,7 @@ def _prepare_event(
 
         for key in "release", "environment", "server_name", "dist":
             if event.get(key) is None and self.options[key] is not None:
-                event[key] = text_type(self.options[key]).strip()
+                event[key] = text_type(self.options[key]).strip()  # type: ignore[literal-required]
         if event.get("sdk") is None:
             sdk_info = dict(SDK_INFO)
             sdk_info["integrations"] = sorted(self.integrations.keys())
@@ -193,32 +408,60 @@ def _prepare_event(
             event["platform"] = "python"
 
         event = handle_in_app(
-            event, self.options["in_app_exclude"], self.options["in_app_include"]
+            event,
+            self.options["in_app_exclude"],
+            self.options["in_app_include"],
+            self.options["project_root"],
         )
 
+        if event is not None:
+            event_scrubber = self.options["event_scrubber"]
+            if event_scrubber and not self.options["send_default_pii"]:
+                event_scrubber.scrub_event(event)
+
         # Postprocess the event here so that annotated types do
         # generally not surface in before_send
         if event is not None:
             event = serialize(
                 event,
-                smart_transaction_trimming=self.options["_experiments"].get(
-                    "smart_transaction_trimming"
-                ),
+                max_request_body_size=self.options.get("max_request_body_size"),
+                max_value_length=self.options.get("max_value_length"),
             )
 
         before_send = self.options["before_send"]
-        if before_send is not None and event.get("type") != "transaction":
+        if (
+            before_send is not None
+            and event is not None
+            and event.get("type") != "transaction"
+        ):
             new_event = None
             with capture_internal_exceptions():
                 new_event = before_send(event, hint or {})
             if new_event is None:
-                logger.info("before send dropped event (%s)", event)
+                logger.info("before send dropped event")
                 if self.transport:
                     self.transport.record_lost_event(
                         "before_send", data_category="error"
                     )
             event = new_event  # type: ignore
 
+        before_send_transaction = self.options["before_send_transaction"]
+        if (
+            before_send_transaction is not None
+            and event is not None
+            and event.get("type") == "transaction"
+        ):
+            new_event = None
+            with capture_internal_exceptions():
+                new_event = before_send_transaction(event, hint or {})
+            if new_event is None:
+                logger.info("before send transaction dropped event")
+                if self.transport:
+                    self.transport.record_lost_event(
+                        "before_send", data_category="transaction"
+                    )
+            event = new_event  # type: ignore
+
         return event
 
     def _is_ignored_error(self, event, hint):
@@ -268,12 +511,42 @@ def _should_capture(
     def _should_sample_error(
         self,
         event,  # type: Event
+        hint,  # type: Hint
     ):
         # type: (...) -> bool
-        not_in_sample_rate = (
-            self.options["sample_rate"] < 1.0
-            and random.random() >= self.options["sample_rate"]
-        )
+        error_sampler = self.options.get("error_sampler", None)
+
+        if callable(error_sampler):
+            with capture_internal_exceptions():
+                sample_rate = error_sampler(event, hint)
+        else:
+            sample_rate = self.options["sample_rate"]
+
+        try:
+            not_in_sample_rate = sample_rate < 1.0 and random.random() >= sample_rate
+        except NameError:
+            logger.warning(
+                "The provided error_sampler raised an error. Defaulting to sampling the event."
+            )
+
+            # If the error_sampler raised an error, we should sample the event, since the default behavior
+            # (when no sample_rate or error_sampler is provided) is to sample all events.
+            not_in_sample_rate = False
+        except TypeError:
+            parameter, verb = (
+                ("error_sampler", "returned")
+                if callable(error_sampler)
+                else ("sample_rate", "contains")
+            )
+            logger.warning(
+                "The provided %s %s an invalid value of %s. The value should be a float or a bool. Defaulting to sampling the event."
+                % (parameter, verb, repr(sample_rate))
+            )
+
+            # If the sample_rate has an invalid value, we should sample the event, since the default behavior
+            # (when no sample_rate or error_sampler is provided) is to sample all events.
+            not_in_sample_rate = False
+
         if not_in_sample_rate:
             # because we will not sample this event, record a "lost event".
             if self.transport:
@@ -299,7 +572,7 @@ def _update_session_from_event(
             errored = True
             for error in exceptions:
                 mechanism = error.get("mechanism")
-                if mechanism and mechanism.get("handled") is False:
+                if isinstance(mechanism, Mapping) and mechanism.get("handled") is False:
                     crashed = True
                     break
 
@@ -307,7 +580,7 @@ def _update_session_from_event(
 
         if session.user_agent is None:
             headers = (event.get("request") or {}).get("headers")
-            for (k, v) in iteritems(headers or {}):
+            for k, v in iteritems(headers or {}):
                 if k.lower() == "user-agent":
                     user_agent = v
                     break
@@ -332,13 +605,14 @@ def capture_event(
 
         :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
 
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
         :returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
         """
         if disable_capture_event.get(False):
             return None
 
-        if self.transport is None:
-            return None
         if hint is None:
             hint = {}
         event_id = event.get("event_id")
@@ -349,6 +623,8 @@ def capture_event(
         if not self._should_capture(event, hint, scope):
             return None
 
+        profile = event.pop("profile", None)
+
         event_opt = self._prepare_event(event, hint, scope)
         if event_opt is None:
             return None
@@ -360,47 +636,68 @@ def capture_event(
             self._update_session_from_event(session, event)
 
         is_transaction = event_opt.get("type") == "transaction"
+        is_checkin = event_opt.get("type") == "check_in"
 
-        if not is_transaction and not self._should_sample_error(event):
+        if (
+            not is_transaction
+            and not is_checkin
+            and not self._should_sample_error(event, hint)
+        ):
             return None
 
+        tracing_enabled = has_tracing_enabled(self.options)
         attachments = hint.get("attachments")
 
-        # this is outside of the `if` immediately below because even if we don't
-        # use the value, we want to make sure we remove it before the event is
-        # sent
-        raw_tracestate = (
-            event_opt.get("contexts", {}).get("trace", {}).pop("tracestate", "")
+        trace_context = event_opt.get("contexts", {}).get("trace") or {}
+        dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {})
+
+        # If tracing is enabled all events should go to /envelope endpoint.
+        # If no tracing is enabled only transactions, events with attachments, and checkins should go to the /envelope endpoint.
+        should_use_envelope_endpoint = (
+            tracing_enabled
+            or is_transaction
+            or is_checkin
+            or bool(attachments)
+            or bool(self.spotlight)
         )
-
-        # Transactions or events with attachments should go to the /envelope/
-        # endpoint.
-        if is_transaction or attachments:
-
+        if should_use_envelope_endpoint:
             headers = {
                 "event_id": event_opt["event_id"],
-                "sent_at": format_timestamp(datetime.utcnow()),
-            }
+                "sent_at": format_timestamp(datetime_utcnow()),
+            }  # type: dict[str, object]
 
-            tracestate_data = raw_tracestate and reinflate_tracestate(
-                raw_tracestate.replace("sentry=", "")
-            )
-            if tracestate_data and has_tracestate_enabled():
-                headers["trace"] = tracestate_data
+            if dynamic_sampling_context:
+                headers["trace"] = dynamic_sampling_context
 
             envelope = Envelope(headers=headers)
 
             if is_transaction:
+                if isinstance(profile, Profile):
+                    envelope.add_profile(profile.to_json(event_opt, self.options))
                 envelope.add_transaction(event_opt)
+            elif is_checkin:
+                envelope.add_checkin(event_opt)
             else:
                 envelope.add_event(event_opt)
 
             for attachment in attachments or ():
                 envelope.add_item(attachment.to_envelope_item())
+
+            if self.spotlight:
+                self.spotlight.capture_envelope(envelope)
+
+            if self.transport is None:
+                return None
+
             self.transport.capture_envelope(envelope)
+
         else:
-            # All other events go to the /store/ endpoint.
+            if self.transport is None:
+                return None
+
+            # All other events go to the legacy /store/ endpoint (will be removed in the future).
             self.transport.capture_event(event_opt)
+
         return event_id
 
     def capture_session(
@@ -412,6 +709,22 @@ def capture_session(
         else:
             self.session_flusher.add_session(session)
 
+    def get_integration(
+        self, name_or_class  # type: Union[str, Type[Integration]]
+    ):
+        # type: (...) -> Any
+        """Returns the integration for this client by name or class.
+        If the client does not have that integration then `None` is returned.
+        """
+        if isinstance(name_or_class, str):
+            integration_name = name_or_class
+        elif name_or_class.identifier is not None:
+            integration_name = name_or_class.identifier
+        else:
+            raise ValueError("Integration has no name")
+
+        return self.integrations.get(integration_name)
+
     def close(
         self,
         timeout=None,  # type: Optional[float]
@@ -425,6 +738,10 @@ def close(
         if self.transport is not None:
             self.flush(timeout=timeout, callback=callback)
             self.session_flusher.kill()
+            if self.metrics_aggregator is not None:
+                self.metrics_aggregator.kill()
+            if self.monitor:
+                self.monitor.kill()
             self.transport.kill()
             self.transport = None
 
@@ -445,6 +762,8 @@ def flush(
             if timeout is None:
                 timeout = self.options["shutdown_timeout"]
             self.session_flusher.flush()
+            if self.metrics_aggregator is not None:
+                self.metrics_aggregator.flush()
             self.transport.flush(timeout=timeout, callback=callback)
 
     def __enter__(self):
@@ -456,9 +775,9 @@ def __exit__(self, exc_type, exc_value, tb):
         self.close()
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     # Make mypy, PyCharm and other static analyzers think `get_options` is a
     # type to have nicer autocompletion for params.
     #
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 34faec3c12..f078aed2fc 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -1,6 +1,9 @@
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+# up top to prevent circular import due to integration import
+DEFAULT_MAX_VALUE_LENGTH = 1024
+
+if TYPE_CHECKING:
     import sentry_sdk
 
     from typing import Optional
@@ -11,6 +14,7 @@
     from typing import Dict
     from typing import Any
     from typing import Sequence
+    from typing import Tuple
     from typing_extensions import TypedDict
 
     from sentry_sdk.integrations import Integration
@@ -19,7 +23,13 @@
         BreadcrumbProcessor,
         Event,
         EventProcessor,
+        Hint,
+        MeasurementUnit,
+        ProfilerMode,
         TracesSampler,
+        TransactionProcessor,
+        MetricTags,
+        MetricValue,
     )
 
     # Experiments are feature flags to enable and disable certain unstable SDK
@@ -29,17 +39,218 @@
     Experiments = TypedDict(
         "Experiments",
         {
+            "attach_explain_plans": dict[str, Any],
             "max_spans": Optional[int],
             "record_sql_params": Optional[bool],
-            "smart_transaction_trimming": Optional[bool],
-            "propagate_tracestate": Optional[bool],
-            "custom_measurements": Optional[bool],
+            # TODO: Remove these 2 profiling related experiments
+            "profiles_sample_rate": Optional[float],
+            "profiler_mode": Optional[ProfilerMode],
+            "otel_powered_performance": Optional[bool],
+            "transport_zlib_compression_level": Optional[int],
+            "transport_num_pools": Optional[int],
+            "enable_metrics": Optional[bool],
+            "before_emit_metric": Optional[
+                Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool]
+            ],
+            "metric_code_locations": Optional[bool],
         },
         total=False,
     )
 
 DEFAULT_QUEUE_SIZE = 100
 DEFAULT_MAX_BREADCRUMBS = 100
+MATCH_ALL = r".*"
+
+FALSE_VALUES = [
+    "false",
+    "no",
+    "off",
+    "n",
+    "0",
+]
+
+
+class INSTRUMENTER:
+    SENTRY = "sentry"
+    OTEL = "otel"
+
+
+class SPANDATA:
+    """
+    Additional information describing the type of the span.
+    See: https://develop.sentry.dev/sdk/performance/span-data-conventions/
+    """
+
+    DB_NAME = "db.name"
+    """
+    The name of the database being accessed. For commands that switch the database, this should be set to the target database (even if the command fails).
+    Example: myDatabase
+    """
+
+    DB_USER = "db.user"
+    """
+    The name of the database user used for connecting to the database.
+    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
+    Example: my_user
+    """
+
+    DB_OPERATION = "db.operation"
+    """
+    The name of the operation being executed, e.g. the MongoDB command name such as findAndModify, or the SQL keyword.
+    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
+    Example: findAndModify, HMSET, SELECT
+    """
+
+    DB_SYSTEM = "db.system"
+    """
+    An identifier for the database management system (DBMS) product being used.
+    See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/database.md
+    Example: postgresql
+    """
+
+    CACHE_HIT = "cache.hit"
+    """
+    A boolean indicating whether the requested data was found in the cache.
+    Example: true
+    """
+
+    CACHE_ITEM_SIZE = "cache.item_size"
+    """
+    The size of the requested data in bytes.
+    Example: 58
+    """
+
+    HTTP_QUERY = "http.query"
+    """
+    The Query string present in the URL.
+    Example: ?foo=bar&bar=baz
+    """
+
+    HTTP_FRAGMENT = "http.fragment"
+    """
+    The Fragments present in the URL.
+    Example: #foo=bar
+    """
+
+    HTTP_METHOD = "http.method"
+    """
+    The HTTP method used.
+    Example: GET
+    """
+
+    HTTP_STATUS_CODE = "http.response.status_code"
+    """
+    The HTTP status code as an integer.
+    Example: 418
+    """
+
+    SERVER_ADDRESS = "server.address"
+    """
+    Name of the database host.
+    Example: example.com
+    """
+
+    SERVER_PORT = "server.port"
+    """
+    Logical server port number
+    Example: 80; 8080; 443
+    """
+
+    SERVER_SOCKET_ADDRESS = "server.socket.address"
+    """
+    Physical server IP address or Unix socket address.
+    Example: 10.5.3.2
+    """
+
+    SERVER_SOCKET_PORT = "server.socket.port"
+    """
+    Physical server port.
+    Recommended: If different than server.port.
+    Example: 16456
+    """
+
+    CODE_FILEPATH = "code.filepath"
+    """
+    The source code file name that identifies the code unit as uniquely as possible (preferably an absolute file path).
+    Example: "/app/myapplication/http/handler/server.py"
+    """
+
+    CODE_LINENO = "code.lineno"
+    """
+    The line number in `code.filepath` best representing the operation. It SHOULD point within the code unit named in `code.function`.
+    Example: 42
+    """
+
+    CODE_FUNCTION = "code.function"
+    """
+    The method or function name, or equivalent (usually rightmost part of the code unit's name).
+    Example: "server_request"
+    """
+
+    CODE_NAMESPACE = "code.namespace"
+    """
+    The "namespace" within which `code.function` is defined. Usually the qualified class or module name, such that `code.namespace` + some separator + `code.function` form a unique identifier for the code unit.
+    Example: "http.handler"
+    """
+
+    THREAD_ID = "thread.id"
+    """
+    Identifier of a thread from where the span originated. This should be a string.
+    Example: "7972576320"
+    """
+
+    THREAD_NAME = "thread.name"
+    """
+    Label identifying a thread from where the span originated. This should be a string.
+    Example: "MainThread"
+    """
+
+
+class OP:
+    CACHE_GET_ITEM = "cache.get_item"
+    DB = "db"
+    DB_REDIS = "db.redis"
+    EVENT_DJANGO = "event.django"
+    FUNCTION = "function"
+    FUNCTION_AWS = "function.aws"
+    FUNCTION_GCP = "function.gcp"
+    GRAPHQL_EXECUTE = "graphql.execute"
+    GRAPHQL_MUTATION = "graphql.mutation"
+    GRAPHQL_PARSE = "graphql.parse"
+    GRAPHQL_RESOLVE = "graphql.resolve"
+    GRAPHQL_SUBSCRIPTION = "graphql.subscription"
+    GRAPHQL_QUERY = "graphql.query"
+    GRAPHQL_VALIDATE = "graphql.validate"
+    GRPC_CLIENT = "grpc.client"
+    GRPC_SERVER = "grpc.server"
+    HTTP_CLIENT = "http.client"
+    HTTP_CLIENT_STREAM = "http.client.stream"
+    HTTP_SERVER = "http.server"
+    MIDDLEWARE_DJANGO = "middleware.django"
+    MIDDLEWARE_STARLETTE = "middleware.starlette"
+    MIDDLEWARE_STARLETTE_RECEIVE = "middleware.starlette.receive"
+    MIDDLEWARE_STARLETTE_SEND = "middleware.starlette.send"
+    MIDDLEWARE_STARLITE = "middleware.starlite"
+    MIDDLEWARE_STARLITE_RECEIVE = "middleware.starlite.receive"
+    MIDDLEWARE_STARLITE_SEND = "middleware.starlite.send"
+    OPENAI_CHAT_COMPLETIONS_CREATE = "ai.chat_completions.create.openai"
+    OPENAI_EMBEDDINGS_CREATE = "ai.embeddings.create.openai"
+    QUEUE_SUBMIT_ARQ = "queue.submit.arq"
+    QUEUE_TASK_ARQ = "queue.task.arq"
+    QUEUE_SUBMIT_CELERY = "queue.submit.celery"
+    QUEUE_TASK_CELERY = "queue.task.celery"
+    QUEUE_TASK_RQ = "queue.task.rq"
+    QUEUE_SUBMIT_HUEY = "queue.submit.huey"
+    QUEUE_TASK_HUEY = "queue.task.huey"
+    SUBPROCESS = "subprocess"
+    SUBPROCESS_WAIT = "subprocess.wait"
+    SUBPROCESS_COMMUNICATE = "subprocess.communicate"
+    TEMPLATE_RENDER = "template.render"
+    VIEW_RENDER = "view.render"
+    VIEW_RESPONSE_RENDER = "view.response.render"
+    WEBSOCKET_SERVER = "websocket.server"
+    SOCKET_CONNECTION = "socket.connection"
+    SOCKET_DNS = "socket.dns"
 
 
 # This type exists to trick mypy and PyCharm into thinking `init` and `Client`
@@ -48,7 +259,6 @@ class ClientConstructor(object):
     def __init__(
         self,
         dsn=None,  # type: Optional[str]
-        with_locals=True,  # type: bool
         max_breadcrumbs=DEFAULT_MAX_BREADCRUMBS,  # type: int
         release=None,  # type: Optional[str]
         environment=None,  # type: Optional[str]
@@ -65,20 +275,43 @@ def __init__(
         send_default_pii=False,  # type: bool
         http_proxy=None,  # type: Optional[str]
         https_proxy=None,  # type: Optional[str]
-        ignore_errors=[],  # type: List[Union[type, str]]  # noqa: B006
-        request_bodies="medium",  # type: str
+        ignore_errors=[],  # type: Sequence[Union[type, str]]  # noqa: B006
+        max_request_body_size="medium",  # type: str
+        socket_options=None,  # type: Optional[List[Tuple[int, int, int | bytes]]]
+        keep_alive=False,  # type: bool
         before_send=None,  # type: Optional[EventProcessor]
         before_breadcrumb=None,  # type: Optional[BreadcrumbProcessor]
-        debug=False,  # type: bool
+        debug=None,  # type: Optional[bool]
         attach_stacktrace=False,  # type: bool
         ca_certs=None,  # type: Optional[str]
         propagate_traces=True,  # type: bool
         traces_sample_rate=None,  # type: Optional[float]
         traces_sampler=None,  # type: Optional[TracesSampler]
+        profiles_sample_rate=None,  # type: Optional[float]
+        profiles_sampler=None,  # type: Optional[TracesSampler]
+        profiler_mode=None,  # type: Optional[ProfilerMode]
         auto_enabling_integrations=True,  # type: bool
         auto_session_tracking=True,  # type: bool
         send_client_reports=True,  # type: bool
         _experiments={},  # type: Experiments  # noqa: B006
+        proxy_headers=None,  # type: Optional[Dict[str, str]]
+        instrumenter=INSTRUMENTER.SENTRY,  # type: Optional[str]
+        before_send_transaction=None,  # type: Optional[TransactionProcessor]
+        project_root=None,  # type: Optional[str]
+        enable_tracing=None,  # type: Optional[bool]
+        include_local_variables=True,  # type: Optional[bool]
+        include_source_context=True,  # type: Optional[bool]
+        trace_propagation_targets=[  # noqa: B006
+            MATCH_ALL
+        ],  # type: Optional[Sequence[str]]
+        functions_to_trace=[],  # type: Sequence[Dict[str, str]]  # noqa: B006
+        event_scrubber=None,  # type: Optional[sentry_sdk.scrubber.EventScrubber]
+        max_value_length=DEFAULT_MAX_VALUE_LENGTH,  # type: int
+        enable_backpressure_handling=True,  # type: bool
+        error_sampler=None,  # type: Optional[Callable[[Event, Hint], Union[float, bool]]]
+        enable_db_query_source=True,  # type: bool
+        db_query_source_threshold_ms=100,  # type: int
+        spotlight=None,  # type: Optional[Union[bool, str]]
     ):
         # type: (...) -> None
         pass
@@ -102,9 +335,4 @@ def _get_default_options():
 del _get_default_options
 
 
-VERSION = "1.5.12"
-SDK_INFO = {
-    "name": "sentry.python",
-    "version": VERSION,
-    "packages": [{"name": "pypi:sentry-sdk", "version": VERSION}],
-}
+VERSION = "1.45.1"
diff --git a/sentry_sdk/crons/__init__.py b/sentry_sdk/crons/__init__.py
new file mode 100644
index 0000000000..5d1fe357d2
--- /dev/null
+++ b/sentry_sdk/crons/__init__.py
@@ -0,0 +1,3 @@
+from sentry_sdk.crons.api import capture_checkin  # noqa
+from sentry_sdk.crons.consts import MonitorStatus  # noqa
+from sentry_sdk.crons.decorator import monitor  # noqa
diff --git a/sentry_sdk/crons/_decorator.py b/sentry_sdk/crons/_decorator.py
new file mode 100644
index 0000000000..2d0612f681
--- /dev/null
+++ b/sentry_sdk/crons/_decorator.py
@@ -0,0 +1,61 @@
+from functools import wraps
+from inspect import iscoroutinefunction
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Awaitable, Callable
+    from typing import Any, cast, overload, ParamSpec, TypeVar, Union
+
+    P = ParamSpec("P")
+    R = TypeVar("R")
+
+
+class MonitorMixin:
+    if TYPE_CHECKING:
+
+        @overload
+        def __call__(self, fn):
+            # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
+            # Unfortunately, mypy does not give us any reliable way to type check the
+            # return value of an Awaitable (i.e. async function) for this overload,
+            # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]].
+            ...
+
+        @overload
+        def __call__(self, fn):
+            # type: (Callable[P, R]) -> Callable[P, R]
+            ...
+
+    def __call__(
+        self,
+        fn,  # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]]
+    ):
+        # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]]
+        if iscoroutinefunction(fn):
+            return self._async_wrapper(fn)
+
+        else:
+            if TYPE_CHECKING:
+                fn = cast("Callable[P, R]", fn)
+            return self._sync_wrapper(fn)
+
+    def _async_wrapper(self, fn):
+        # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
+        @wraps(fn)
+        async def inner(*args: "P.args", **kwargs: "P.kwargs"):
+            # type: (...) -> R
+            with self:  # type: ignore[attr-defined]
+                return await fn(*args, **kwargs)
+
+        return inner
+
+    def _sync_wrapper(self, fn):
+        # type: (Callable[P, R]) -> Callable[P, R]
+        @wraps(fn)
+        def inner(*args: "P.args", **kwargs: "P.kwargs"):
+            # type: (...) -> R
+            with self:  # type: ignore[attr-defined]
+                return fn(*args, **kwargs)
+
+        return inner
diff --git a/sentry_sdk/crons/_decorator_py2.py b/sentry_sdk/crons/_decorator_py2.py
new file mode 100644
index 0000000000..9e1da797e2
--- /dev/null
+++ b/sentry_sdk/crons/_decorator_py2.py
@@ -0,0 +1,21 @@
+from functools import wraps
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, ParamSpec, TypeVar
+
+    P = ParamSpec("P")
+    R = TypeVar("R")
+
+
+class MonitorMixin:
+    def __call__(self, fn):
+        # type: (Callable[P, R]) -> Callable[P, R]
+        @wraps(fn)
+        def inner(*args, **kwargs):
+            # type: (Any, Any) -> Any
+            with self:  # type: ignore[attr-defined]
+                return fn(*args, **kwargs)
+
+        return inner
diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py
new file mode 100644
index 0000000000..1a95583301
--- /dev/null
+++ b/sentry_sdk/crons/api.py
@@ -0,0 +1,58 @@
+import uuid
+
+from sentry_sdk import Hub
+from sentry_sdk._types import TYPE_CHECKING
+
+
+if TYPE_CHECKING:
+    from typing import Optional
+    from sentry_sdk._types import Event, MonitorConfig
+
+
+def _create_check_in_event(
+    monitor_slug=None,  # type: Optional[str]
+    check_in_id=None,  # type: Optional[str]
+    status=None,  # type: Optional[str]
+    duration_s=None,  # type: Optional[float]
+    monitor_config=None,  # type: Optional[MonitorConfig]
+):
+    # type: (...) -> Event
+    options = Hub.current.client.options if Hub.current.client else {}
+    check_in_id = check_in_id or uuid.uuid4().hex  # type: str
+
+    check_in = {
+        "type": "check_in",
+        "monitor_slug": monitor_slug,
+        "check_in_id": check_in_id,
+        "status": status,
+        "duration": duration_s,
+        "environment": options.get("environment", None),
+        "release": options.get("release", None),
+    }  # type: Event
+
+    if monitor_config:
+        check_in["monitor_config"] = monitor_config
+
+    return check_in
+
+
+def capture_checkin(
+    monitor_slug=None,  # type: Optional[str]
+    check_in_id=None,  # type: Optional[str]
+    status=None,  # type: Optional[str]
+    duration=None,  # type: Optional[float]
+    monitor_config=None,  # type: Optional[MonitorConfig]
+):
+    # type: (...) -> str
+    check_in_event = _create_check_in_event(
+        monitor_slug=monitor_slug,
+        check_in_id=check_in_id,
+        status=status,
+        duration_s=duration,
+        monitor_config=monitor_config,
+    )
+
+    hub = Hub.current
+    hub.capture_event(check_in_event)
+
+    return check_in_event["check_in_id"]
diff --git a/sentry_sdk/crons/consts.py b/sentry_sdk/crons/consts.py
new file mode 100644
index 0000000000..be686b4539
--- /dev/null
+++ b/sentry_sdk/crons/consts.py
@@ -0,0 +1,4 @@
+class MonitorStatus:
+    IN_PROGRESS = "in_progress"
+    OK = "ok"
+    ERROR = "error"
diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py
new file mode 100644
index 0000000000..6c5f747b97
--- /dev/null
+++ b/sentry_sdk/crons/decorator.py
@@ -0,0 +1,80 @@
+from sentry_sdk._compat import PY2
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.crons import capture_checkin
+from sentry_sdk.crons.consts import MonitorStatus
+from sentry_sdk.utils import now
+
+if TYPE_CHECKING:
+    from typing import Optional, Type
+    from types import TracebackType
+    from sentry_sdk._types import MonitorConfig
+
+if PY2:
+    from sentry_sdk.crons._decorator_py2 import MonitorMixin
+else:
+    # This is in its own module so that we don't make Python 2
+    # angery over `async def`s.
+    # Once we drop Python 2, remove the mixin and merge it
+    # into the main monitor class.
+    from sentry_sdk.crons._decorator import MonitorMixin
+
+
+class monitor(MonitorMixin):  # noqa: N801
+    """
+    Decorator/context manager to capture checkin events for a monitor.
+
+    Usage (as decorator):
+    ```
+    import sentry_sdk
+
+    app = Celery()
+
+    @app.task
+    @sentry_sdk.monitor(monitor_slug='my-fancy-slug')
+    def test(arg):
+        print(arg)
+    ```
+
+    This does not have to be used with Celery, but if you do use it with celery,
+    put the `@sentry_sdk.monitor` decorator below Celery's `@app.task` decorator.
+
+    Usage (as context manager):
+    ```
+    import sentry_sdk
+
+    def test(arg):
+        with sentry_sdk.monitor(monitor_slug='my-fancy-slug'):
+            print(arg)
+    ```
+    """
+
+    def __init__(self, monitor_slug=None, monitor_config=None):
+        # type: (Optional[str], Optional[MonitorConfig]) -> None
+        self.monitor_slug = monitor_slug
+        self.monitor_config = monitor_config
+
+    def __enter__(self):
+        # type: () -> None
+        self.start_timestamp = now()
+        self.check_in_id = capture_checkin(
+            monitor_slug=self.monitor_slug,
+            status=MonitorStatus.IN_PROGRESS,
+            monitor_config=self.monitor_config,
+        )
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]) -> None
+        duration_s = now() - self.start_timestamp
+
+        if exc_type is None and exc_value is None and traceback is None:
+            status = MonitorStatus.OK
+        else:
+            status = MonitorStatus.ERROR
+
+        capture_checkin(
+            monitor_slug=self.monitor_slug,
+            check_in_id=self.check_in_id,
+            status=status,
+            duration=duration_s,
+            monitor_config=self.monitor_config,
+        )
diff --git a/sentry_sdk/db/__init__.py b/sentry_sdk/db/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/sentry_sdk/db/explain_plan/__init__.py b/sentry_sdk/db/explain_plan/__init__.py
new file mode 100644
index 0000000000..2699b6f49e
--- /dev/null
+++ b/sentry_sdk/db/explain_plan/__init__.py
@@ -0,0 +1,61 @@
+import datetime
+
+from sentry_sdk._compat import datetime_utcnow
+from sentry_sdk.consts import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+EXPLAIN_CACHE = {}
+EXPLAIN_CACHE_SIZE = 50
+EXPLAIN_CACHE_TIMEOUT_SECONDS = 60 * 60 * 24
+
+
+def cache_statement(statement, options):
+    # type: (str, dict[str, Any]) -> None
+    global EXPLAIN_CACHE
+
+    now = datetime_utcnow()
+    explain_cache_timeout_seconds = options.get(
+        "explain_cache_timeout_seconds", EXPLAIN_CACHE_TIMEOUT_SECONDS
+    )
+    expiration_time = now + datetime.timedelta(seconds=explain_cache_timeout_seconds)
+
+    EXPLAIN_CACHE[hash(statement)] = expiration_time
+
+
+def remove_expired_cache_items():
+    # type: () -> None
+    """
+    Remove expired cache items from the cache.
+    """
+    global EXPLAIN_CACHE
+
+    now = datetime_utcnow()
+
+    for key, expiration_time in EXPLAIN_CACHE.items():
+        expiration_in_the_past = expiration_time < now
+        if expiration_in_the_past:
+            del EXPLAIN_CACHE[key]
+
+
+def should_run_explain_plan(statement, options):
+    # type: (str, dict[str, Any]) -> bool
+    """
+    Check cache if the explain plan for the given statement should be run.
+    """
+    global EXPLAIN_CACHE
+
+    remove_expired_cache_items()
+
+    key = hash(statement)
+    if key in EXPLAIN_CACHE:
+        return False
+
+    explain_cache_size = options.get("explain_cache_size", EXPLAIN_CACHE_SIZE)
+    cache_is_full = len(EXPLAIN_CACHE.keys()) >= explain_cache_size
+    if cache_is_full:
+        return False
+
+    return True
diff --git a/sentry_sdk/db/explain_plan/django.py b/sentry_sdk/db/explain_plan/django.py
new file mode 100644
index 0000000000..b395f1c82b
--- /dev/null
+++ b/sentry_sdk/db/explain_plan/django.py
@@ -0,0 +1,47 @@
+from sentry_sdk.consts import TYPE_CHECKING
+from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+
+    from sentry_sdk.tracing import Span
+
+
+def attach_explain_plan_to_span(
+    span, connection, statement, parameters, mogrify, options
+):
+    # type: (Span, Any, str, Any, Callable[[str, Any], bytes], dict[str, Any]) -> None
+    """
+    Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data.
+
+    Usage:
+    ```
+    sentry_sdk.init(
+        dsn="...",
+        _experiments={
+            "attach_explain_plans": {
+                "explain_cache_size": 1000,  # Run explain plan for the 1000 most run queries
+                "explain_cache_timeout_seconds": 60 * 60 * 24,  # Run the explain plan for each statement only every 24 hours
+                "use_explain_analyze": True,  # Run "explain analyze" instead of only "explain"
+            }
+        }
+    ```
+    """
+    if not statement.strip().upper().startswith("SELECT"):
+        return
+
+    if not should_run_explain_plan(statement, options):
+        return
+
+    analyze = "ANALYZE" if options.get("use_explain_analyze", False) else ""
+    explain_statement = ("EXPLAIN %s " % analyze) + mogrify(
+        statement, parameters
+    ).decode("utf-8")
+
+    with connection.cursor() as cursor:
+        cursor.execute(explain_statement)
+        explain_plan = [row for row in cursor.fetchall()]
+
+        span.set_data("db.explain_plan", explain_plan)
+        cache_statement(statement, options)
diff --git a/sentry_sdk/db/explain_plan/sqlalchemy.py b/sentry_sdk/db/explain_plan/sqlalchemy.py
new file mode 100644
index 0000000000..fac0729f70
--- /dev/null
+++ b/sentry_sdk/db/explain_plan/sqlalchemy.py
@@ -0,0 +1,49 @@
+from __future__ import absolute_import
+
+from sentry_sdk.consts import TYPE_CHECKING
+from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan
+from sentry_sdk.integrations import DidNotEnable
+
+try:
+    from sqlalchemy.sql import text  # type: ignore
+except ImportError:
+    raise DidNotEnable("SQLAlchemy not installed.")
+
+if TYPE_CHECKING:
+    from typing import Any
+
+    from sentry_sdk.tracing import Span
+
+
+def attach_explain_plan_to_span(span, connection, statement, parameters, options):
+    # type: (Span, Any, str, Any, dict[str, Any]) -> None
+    """
+    Run EXPLAIN or EXPLAIN ANALYZE on the given statement and attach the explain plan to the span data.
+
+    Usage:
+    ```
+    sentry_sdk.init(
+        dsn="...",
+        _experiments={
+            "attach_explain_plans": {
+                "explain_cache_size": 1000,  # Run explain plan for the 1000 most run queries
+                "explain_cache_timeout_seconds": 60 * 60 * 24,  # Run the explain plan for each statement only every 24 hours
+                "use_explain_analyze": True,  # Run "explain analyze" instead of only "explain"
+            }
+        }
+    ```
+    """
+    if not statement.strip().upper().startswith("SELECT"):
+        return
+
+    if not should_run_explain_plan(statement, options):
+        return
+
+    analyze = "ANALYZE" if options.get("use_explain_analyze", False) else ""
+    explain_statement = (("EXPLAIN %s " % analyze) + statement) % parameters
+
+    result = connection.execute(text(explain_statement))
+    explain_plan = [row for row in result]
+
+    span.set_data("db.explain_plan", explain_plan)
+    cache_statement(statement, options)
diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py
index 928c691cdd..fb214a45f4 100644
--- a/sentry_sdk/envelope.py
+++ b/sentry_sdk/envelope.py
@@ -3,11 +3,11 @@
 import mimetypes
 
 from sentry_sdk._compat import text_type, PY2
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.session import Session
 from sentry_sdk.utils import json_dumps, capture_internal_exceptions
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Union
@@ -62,6 +62,18 @@ def add_transaction(
         # type: (...) -> None
         self.add_item(Item(payload=PayloadRef(json=transaction), type="transaction"))
 
+    def add_profile(
+        self, profile  # type: Any
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=profile), type="profile"))
+
+    def add_checkin(
+        self, checkin  # type: Any
+    ):
+        # type: (...) -> None
+        self.add_item(Item(payload=PayloadRef(json=checkin), type="check_in"))
+
     def add_session(
         self, session  # type: Union[Session, Any]
     ):
@@ -246,6 +258,12 @@ def data_category(self):
             return "error"
         elif ty == "client_report":
             return "internal"
+        elif ty == "profile":
+            return "profile"
+        elif ty == "statsd":
+            return "metric_bucket"
+        elif ty == "check_in":
+            return "monitor"
         else:
             return "default"
 
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index d2b57a2e45..a716d33433 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -1,36 +1,37 @@
 import copy
 import sys
-
-from datetime import datetime
 from contextlib import contextmanager
 
 from sentry_sdk._compat import with_metaclass
+from sentry_sdk.consts import INSTRUMENTER
 from sentry_sdk.scope import Scope
 from sentry_sdk.client import Client
-from sentry_sdk.tracing import Span, Transaction
-from sentry_sdk.session import Session
+from sentry_sdk.tracing import (
+    NoOpSpan,
+    Span,
+    Transaction,
+)
+
 from sentry_sdk.utils import (
-    exc_info_from_error,
-    event_from_exception,
     logger,
     ContextVar,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
-    from typing import Union
+if TYPE_CHECKING:
     from typing import Any
-    from typing import Optional
-    from typing import Tuple
-    from typing import Dict
-    from typing import List
     from typing import Callable
+    from typing import ContextManager
+    from typing import Dict
     from typing import Generator
+    from typing import List
+    from typing import Optional
+    from typing import overload
+    from typing import Tuple
     from typing import Type
     from typing import TypeVar
-    from typing import overload
-    from typing import ContextManager
+    from typing import Union
 
     from sentry_sdk.integrations import Integration
     from sentry_sdk._types import (
@@ -39,6 +40,7 @@
         Breadcrumb,
         BreadcrumbHint,
         ExcInfo,
+        LogLevelStr,
     )
     from sentry_sdk.consts import ClientConstructor
 
@@ -54,24 +56,6 @@ def overload(x):
 _local = ContextVar("sentry_current_hub")
 
 
-def _update_scope(base, scope_change, scope_kwargs):
-    # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
-    if scope_change and scope_kwargs:
-        raise TypeError("cannot provide scope and kwargs")
-    if scope_change is not None:
-        final_scope = copy.copy(base)
-        if callable(scope_change):
-            scope_change(final_scope)
-        else:
-            final_scope.update_from_scope(scope_change)
-    elif scope_kwargs:
-        final_scope = copy.copy(base)
-        final_scope.update_from_kwargs(**scope_kwargs)
-    else:
-        final_scope = base
-    return final_scope
-
-
 def _should_send_default_pii():
     # type: () -> bool
     client = Hub.current.client
@@ -96,6 +80,20 @@ def __exit__(self, exc_type, exc_value, tb):
             c.close()
 
 
+def _check_python_deprecations():
+    # type: () -> None
+    version = sys.version_info[:2]
+
+    if version == (3, 4) or version == (3, 5):
+        logger.warning(
+            "sentry-sdk 2.0.0 will drop support for Python %s.",
+            "{}.{}".format(*version),
+        )
+        logger.warning(
+            "Please upgrade to the latest version to continue receiving upgrades and bugfixes."
+        )
+
+
 def _init(*args, **kwargs):
     # type: (*Optional[str], **Any) -> ContextManager[Any]
     """Initializes the SDK and optionally integrations.
@@ -104,13 +102,14 @@ def _init(*args, **kwargs):
     """
     client = Client(*args, **kwargs)  # type: ignore
     Hub.current.bind_client(client)
+    _check_python_deprecations()
     rv = _InitGuard(client)
     return rv
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     # Make mypy, PyCharm and other static analyzers think `init` is a type to
     # have nicer autocompletion for params.
     #
@@ -206,7 +205,7 @@ class Hub(with_metaclass(HubMeta)):  # type: ignore
 
     # Mypy doesn't pick up on the metaclass.
 
-    if MYPY:
+    if TYPE_CHECKING:
         current = None  # type: Hub
         main = None  # type: Hub
 
@@ -267,18 +266,9 @@ def get_integration(
         If the return value is not `None` the hub is guaranteed to have a
         client attached.
         """
-        if isinstance(name_or_class, str):
-            integration_name = name_or_class
-        elif name_or_class.identifier is not None:
-            integration_name = name_or_class.identifier
-        else:
-            raise ValueError("Integration has no name")
-
         client = self.client
         if client is not None:
-            rv = client.integrations.get(integration_name)
-            if rv is not None:
-                return rv
+            return client.get_integration(name_or_class)
 
     @property
     def client(self):
@@ -294,7 +284,15 @@ def scope(self):
 
     def last_event_id(self):
         # type: () -> Optional[str]
-        """Returns the last event ID."""
+        """
+        Returns the last event ID.
+
+        .. deprecated:: 1.40.5
+            This function is deprecated and will be removed in a future release. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly.
+        """
+        logger.warning(
+            "Deprecated: last_event_id is deprecated. This will be removed in the future. The functions `capture_event`, `capture_message`, and `capture_exception` return the event ID directly."
+        )
         return self._last_event_id
 
     def bind_client(
@@ -305,74 +303,100 @@ def bind_client(
         top = self._stack[-1]
         self._stack[-1] = (new, top[1])
 
-    def capture_event(
-        self,
-        event,  # type: Event
-        hint=None,  # type: Optional[Hint]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
-        """Captures an event. Alias of :py:meth:`sentry_sdk.Client.capture_event`."""
+    def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
+        # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures an event.
+
+        Alias of :py:meth:`sentry_sdk.Scope.capture_event`.
+
+        :param event: A ready-made event that can be directly sent to Sentry.
+
+        :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+        """
         client, top_scope = self._stack[-1]
-        scope = _update_scope(top_scope, scope, scope_args)
-        if client is not None:
-            is_transaction = event.get("type") == "transaction"
-            rv = client.capture_event(event, hint, scope)
-            if rv is not None and not is_transaction:
-                self._last_event_id = rv
-            return rv
-        return None
+        if client is None:
+            return None
 
-    def capture_message(
-        self,
-        message,  # type: str
-        level=None,  # type: Optional[str]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
-        """Captures a message.  The message is just a string.  If no level
-        is provided the default level is `info`.
+        last_event_id = top_scope.capture_event(
+            event, hint, client=client, scope=scope, **scope_kwargs
+        )
+
+        is_transaction = event.get("type") == "transaction"
+        if last_event_id is not None and not is_transaction:
+            self._last_event_id = last_event_id
+
+        return last_event_id
+
+    def capture_message(self, message, level=None, scope=None, **scope_kwargs):
+        # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures a message.
+
+        Alias of :py:meth:`sentry_sdk.Scope.capture_message`.
+
+        :param message: The string to send as the message to Sentry.
+
+        :param level: If no level is provided, the default level is `info`.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
 
         :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
         """
-        if self.client is None:
+        client, top_scope = self._stack[-1]
+        if client is None:
             return None
-        if level is None:
-            level = "info"
-        return self.capture_event(
-            {"message": message, "level": level}, scope=scope, **scope_args
+
+        last_event_id = top_scope.capture_message(
+            message, level=level, client=client, scope=scope, **scope_kwargs
         )
 
-    def capture_exception(
-        self,
-        error=None,  # type: Optional[Union[BaseException, ExcInfo]]
-        scope=None,  # type: Optional[Any]
-        **scope_args  # type: Any
-    ):
-        # type: (...) -> Optional[str]
+        if last_event_id is not None:
+            self._last_event_id = last_event_id
+
+        return last_event_id
+
+    def capture_exception(self, error=None, scope=None, **scope_kwargs):
+        # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
         """Captures an exception.
 
-        :param error: An exception to catch. If `None`, `sys.exc_info()` will be used.
+        Alias of :py:meth:`sentry_sdk.Scope.capture_exception`.
+
+        :param error: An exception to capture. If `None`, `sys.exc_info()` will be used.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
 
         :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
         """
-        client = self.client
+        client, top_scope = self._stack[-1]
         if client is None:
             return None
-        if error is not None:
-            exc_info = exc_info_from_error(error)
-        else:
-            exc_info = sys.exc_info()
 
-        event, hint = event_from_exception(exc_info, client_options=client.options)
-        try:
-            return self.capture_event(event, hint=hint, scope=scope, **scope_args)
-        except Exception:
-            self._capture_internal_exception(sys.exc_info())
+        last_event_id = top_scope.capture_exception(
+            error, client=client, scope=scope, **scope_kwargs
+        )
 
-        return None
+        if last_event_id is not None:
+            self._last_event_id = last_event_id
+
+        return last_event_id
 
     def _capture_internal_exception(
         self, exc_info  # type: Any
@@ -382,17 +406,14 @@ def _capture_internal_exception(
         Capture an exception that is likely caused by a bug in the SDK
         itself.
 
+        Duplicated in :py:meth:`sentry_sdk.Client._capture_internal_exception`.
+
         These exceptions do not end up in Sentry and are just logged instead.
         """
         logger.error("Internal error in sentry_sdk", exc_info=exc_info)
 
-    def add_breadcrumb(
-        self,
-        crumb=None,  # type: Optional[Breadcrumb]
-        hint=None,  # type: Optional[BreadcrumbHint]
-        **kwargs  # type: Any
-    ):
-        # type: (...) -> None
+    def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
+        # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
         """
         Adds a breadcrumb.
 
@@ -406,82 +427,38 @@ def add_breadcrumb(
             logger.info("Dropped breadcrumb because no client bound")
             return
 
-        crumb = dict(crumb or ())  # type: Breadcrumb
-        crumb.update(kwargs)
-        if not crumb:
-            return
-
-        hint = dict(hint or ())  # type: Hint
-
-        if crumb.get("timestamp") is None:
-            crumb["timestamp"] = datetime.utcnow()
-        if crumb.get("type") is None:
-            crumb["type"] = "default"
-
-        if client.options["before_breadcrumb"] is not None:
-            new_crumb = client.options["before_breadcrumb"](crumb, hint)
-        else:
-            new_crumb = crumb
-
-        if new_crumb is not None:
-            scope._breadcrumbs.append(new_crumb)
-        else:
-            logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
+        kwargs["client"] = client
 
-        max_breadcrumbs = client.options["max_breadcrumbs"]  # type: int
-        while len(scope._breadcrumbs) > max_breadcrumbs:
-            scope._breadcrumbs.popleft()
+        scope.add_breadcrumb(crumb, hint, **kwargs)
 
-    def start_span(
-        self,
-        span=None,  # type: Optional[Span]
-        **kwargs  # type: Any
-    ):
-        # type: (...) -> Span
+    def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (Optional[Span], str, Any) -> Span
         """
-        Create and start timing a new span whose parent is the currently active
-        span or transaction, if any. The return value is a span instance,
+        Start a span whose parent is the currently active span or transaction, if any.
+
+        The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
         typically used as a context manager to start and stop timing in a `with`
         block.
 
         Only spans contained in a transaction are sent to Sentry. Most
         integrations start a transaction at the appropriate time, for example
-        for every incoming HTTP request. Use `start_transaction` to start a new
-        transaction when one is not already in progress.
-        """
-        # TODO: consider removing this in a future release.
-        # This is for backwards compatibility with releases before
-        # start_transaction existed, to allow for a smoother transition.
-        if isinstance(span, Transaction) or "transaction" in kwargs:
-            deprecation_msg = (
-                "Deprecated: use start_transaction to start transactions and "
-                "Transaction.start_child to start spans."
-            )
-            if isinstance(span, Transaction):
-                logger.warning(deprecation_msg)
-                return self.start_transaction(span)
-            if "transaction" in kwargs:
-                logger.warning(deprecation_msg)
-                name = kwargs.pop("transaction")
-                return self.start_transaction(name=name, **kwargs)
+        for every incoming HTTP request. Use
+        :py:meth:`sentry_sdk.start_transaction` to start a new transaction when
+        one is not already in progress.
 
-        if span is not None:
-            return span
-
-        kwargs.setdefault("hub", self)
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
+        """
+        client, scope = self._stack[-1]
 
-        span = self.scope.span
-        if span is not None:
-            return span.start_child(**kwargs)
+        kwargs["hub"] = self
+        kwargs["client"] = client
 
-        return Span(**kwargs)
+        return scope.start_span(span=span, instrumenter=instrumenter, **kwargs)
 
     def start_transaction(
-        self,
-        transaction=None,  # type: Optional[Transaction]
-        **kwargs  # type: Any
+        self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs
     ):
-        # type: (...) -> Transaction
+        # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan]
         """
         Start and return a transaction.
 
@@ -503,35 +480,31 @@ def start_transaction(
 
         When the transaction is finished, it will be sent to Sentry with all its
         finished child spans.
+
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
         """
-        custom_sampling_context = kwargs.pop("custom_sampling_context", {})
+        client, scope = self._stack[-1]
 
-        # if we haven't been given a transaction, make one
-        if transaction is None:
-            kwargs.setdefault("hub", self)
-            transaction = Transaction(**kwargs)
+        kwargs["hub"] = self
+        kwargs["client"] = client
 
-        # use traces_sample_rate, traces_sampler, and/or inheritance to make a
-        # sampling decision
-        sampling_context = {
-            "transaction_context": transaction.to_json(),
-            "parent_sampled": transaction.parent_sampled,
-        }
-        sampling_context.update(custom_sampling_context)
-        transaction._set_initial_sampling_decision(sampling_context=sampling_context)
+        return scope.start_transaction(
+            transaction=transaction, instrumenter=instrumenter, **kwargs
+        )
 
-        # we don't bother to keep spans if we already know we're not going to
-        # send the transaction
-        if transaction.sampled:
-            max_spans = (
-                self.client and self.client.options["_experiments"].get("max_spans")
-            ) or 1000
-            transaction.init_span_recorder(maxlen=max_spans)
+    def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
+        # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
+        """
+        Sets the propagation context from environment or headers and returns a transaction.
+        """
+        scope = self._stack[-1][1]
 
-        return transaction
+        return scope.continue_trace(
+            environ_or_headers=environ_or_headers, op=op, name=name, source=source
+        )
 
     @overload
-    def push_scope(  # noqa: F811
+    def push_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
@@ -545,7 +518,9 @@ def push_scope(  # noqa: F811
         pass
 
     def push_scope(  # noqa
-        self, callback=None  # type: Optional[Callable[[Scope], None]]
+        self,
+        callback=None,  # type: Optional[Callable[[Scope], None]]
+        continue_trace=True,  # type: bool
     ):
         # type: (...) -> Optional[ContextManager[Scope]]
         """
@@ -563,7 +538,13 @@ def push_scope(  # noqa
             return None
 
         client, scope = self._stack[-1]
-        new_layer = (client, copy.copy(scope))
+
+        new_scope = copy.copy(scope)
+
+        if continue_trace:
+            new_scope.generate_propagation_context()
+
+        new_layer = (client, new_scope)
         self._stack.append(new_layer)
 
         return _ScopeManager(self)
@@ -580,7 +561,7 @@ def pop_scope_unsafe(self):
         return rv
 
     @overload
-    def configure_scope(  # noqa: F811
+    def configure_scope(
         self, callback=None  # type: Optional[None]
     ):
         # type: (...) -> ContextManager[Scope]
@@ -594,8 +575,10 @@ def configure_scope(  # noqa: F811
         pass
 
     def configure_scope(  # noqa
-        self, callback=None  # type: Optional[Callable[[Scope], None]]
-    ):  # noqa
+        self,
+        callback=None,  # type: Optional[Callable[[Scope], None]]
+        continue_trace=True,  # type: bool
+    ):
         # type: (...) -> Optional[ContextManager[Scope]]
 
         """
@@ -607,6 +590,10 @@ def configure_scope(  # noqa
         """
 
         client, scope = self._stack[-1]
+
+        if continue_trace:
+            scope.generate_propagation_context()
+
         if callback is not None:
             if client is not None:
                 callback(scope)
@@ -628,12 +615,9 @@ def start_session(
     ):
         # type: (...) -> None
         """Starts a new session."""
-        self.end_session()
         client, scope = self._stack[-1]
-        scope._session = Session(
-            release=client.options["release"] if client else None,
-            environment=client.options["environment"] if client else None,
-            user=scope._user,
+        scope.start_session(
+            client=client,
             session_mode=session_mode,
         )
 
@@ -641,13 +625,7 @@ def end_session(self):
         # type: (...) -> None
         """Ends the current session if there is one."""
         client, scope = self._stack[-1]
-        session = scope._session
-        self.scope._session = None
-
-        if session is not None:
-            session.close()
-            if client is not None:
-                client.capture_session(session)
+        scope.end_session(client=client)
 
     def stop_auto_session_tracking(self):
         # type: (...) -> None
@@ -656,9 +634,8 @@ def stop_auto_session_tracking(self):
         This temporarily session tracking for the current scope when called.
         To resume session tracking call `resume_auto_session_tracking`.
         """
-        self.end_session()
         client, scope = self._stack[-1]
-        scope._force_auto_session_tracking = False
+        scope.stop_auto_session_tracking(client=client)
 
     def resume_auto_session_tracking(self):
         # type: (...) -> None
@@ -666,8 +643,8 @@ def resume_auto_session_tracking(self):
         disabled earlier.  This requires that generally automatic session
         tracking is enabled.
         """
-        client, scope = self._stack[-1]
-        scope._force_auto_session_tracking = None
+        scope = self._stack[-1][1]
+        scope.resume_auto_session_tracking()
 
     def flush(
         self,
@@ -682,6 +659,27 @@ def flush(
         if client is not None:
             return client.flush(timeout=timeout, callback=callback)
 
+    def get_traceparent(self):
+        # type: () -> Optional[str]
+        """
+        Returns the traceparent either from the active span or from the scope.
+        """
+        client, scope = self._stack[-1]
+        return scope.get_traceparent(client=client)
+
+    def get_baggage(self):
+        # type: () -> Optional[str]
+        """
+        Returns Baggage either from the active span or from the scope.
+        """
+        client, scope = self._stack[-1]
+        baggage = scope.get_baggage(client=client)
+
+        if baggage is not None:
+            return baggage.serialize()
+
+        return None
+
     def iter_trace_propagation_headers(self, span=None):
         # type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
         """
@@ -689,18 +687,23 @@ def iter_trace_propagation_headers(self, span=None):
         from the span representing the request, if available, or the current
         span on the scope if not.
         """
-        span = span or self.scope.span
-        if not span:
-            return
+        client, scope = self._stack[-1]
 
-        client = self._stack[-1][0]
+        return scope.iter_trace_propagation_headers(span=span, client=client)
 
-        propagate_traces = client and client.options["propagate_traces"]
-        if not propagate_traces:
-            return
+    def trace_propagation_meta(self, span=None):
+        # type: (Optional[Span]) -> str
+        """
+        Return meta tags which should be injected into HTML templates
+        to allow propagation of trace information.
+        """
+        if span is not None:
+            logger.warning(
+                "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
+            )
 
-        for header in span.iter_headers():
-            yield header
+        client, scope = self._stack[-1]
+        return scope.trace_propagation_meta(span=span, client=client)
 
 
 GLOBAL_HUB = Hub()
diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
index 114a3a1f41..c9737ae589 100644
--- a/sentry_sdk/integrations/__init__.py
+++ b/sentry_sdk/integrations/__init__.py
@@ -1,29 +1,34 @@
-"""This package"""
 from __future__ import absolute_import
-
 from threading import Lock
 
 from sentry_sdk._compat import iteritems
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import logger
 
-from sentry_sdk._types import MYPY
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Dict
     from typing import Iterator
     from typing import List
     from typing import Set
-    from typing import Tuple
     from typing import Type
 
 
 _installer_lock = Lock()
+
+# Set of all integration identifiers we have attempted to install
+_processed_integrations = set()  # type: Set[str]
+
+# Set of all integration identifiers we have actually installed
 _installed_integrations = set()  # type: Set[str]
 
 
-def _generate_default_integrations_iterator(integrations, auto_enabling_integrations):
-    # type: (Tuple[str, ...], Tuple[str, ...]) -> Callable[[bool], Iterator[Type[Integration]]]
+def _generate_default_integrations_iterator(
+    integrations,  # type: List[str]
+    auto_enabling_integrations,  # type: List[str]
+):
+    # type: (...) -> Callable[[bool], Iterator[Type[Integration]]]
 
     def iter_default_integrations(with_auto_enabling_integrations):
         # type: (bool) -> Iterator[Type[Integration]]
@@ -51,35 +56,41 @@ def iter_default_integrations(with_auto_enabling_integrations):
     return iter_default_integrations
 
 
-_AUTO_ENABLING_INTEGRATIONS = (
-    "sentry_sdk.integrations.django.DjangoIntegration",
-    "sentry_sdk.integrations.flask.FlaskIntegration",
+_DEFAULT_INTEGRATIONS = [
+    # stdlib/base runtime integrations
+    "sentry_sdk.integrations.argv.ArgvIntegration",
+    "sentry_sdk.integrations.atexit.AtexitIntegration",
+    "sentry_sdk.integrations.dedupe.DedupeIntegration",
+    "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
+    "sentry_sdk.integrations.logging.LoggingIntegration",
+    "sentry_sdk.integrations.modules.ModulesIntegration",
+    "sentry_sdk.integrations.stdlib.StdlibIntegration",
+    "sentry_sdk.integrations.threading.ThreadingIntegration",
+]
+
+_AUTO_ENABLING_INTEGRATIONS = [
+    "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
+    "sentry_sdk.integrations.boto3.Boto3Integration",
     "sentry_sdk.integrations.bottle.BottleIntegration",
-    "sentry_sdk.integrations.falcon.FalconIntegration",
-    "sentry_sdk.integrations.sanic.SanicIntegration",
     "sentry_sdk.integrations.celery.CeleryIntegration",
+    "sentry_sdk.integrations.django.DjangoIntegration",
+    "sentry_sdk.integrations.falcon.FalconIntegration",
+    "sentry_sdk.integrations.fastapi.FastApiIntegration",
+    "sentry_sdk.integrations.flask.FlaskIntegration",
+    "sentry_sdk.integrations.httpx.HttpxIntegration",
+    "sentry_sdk.integrations.openai.OpenAIIntegration",
+    "sentry_sdk.integrations.pyramid.PyramidIntegration",
+    "sentry_sdk.integrations.redis.RedisIntegration",
     "sentry_sdk.integrations.rq.RqIntegration",
-    "sentry_sdk.integrations.aiohttp.AioHttpIntegration",
-    "sentry_sdk.integrations.tornado.TornadoIntegration",
+    "sentry_sdk.integrations.sanic.SanicIntegration",
     "sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
-    "sentry_sdk.integrations.redis.RedisIntegration",
-    "sentry_sdk.integrations.pyramid.PyramidIntegration",
-    "sentry_sdk.integrations.boto3.Boto3Integration",
-)
+    "sentry_sdk.integrations.starlette.StarletteIntegration",
+    "sentry_sdk.integrations.tornado.TornadoIntegration",
+]
 
 
 iter_default_integrations = _generate_default_integrations_iterator(
-    integrations=(
-        # stdlib/base runtime integrations
-        "sentry_sdk.integrations.logging.LoggingIntegration",
-        "sentry_sdk.integrations.stdlib.StdlibIntegration",
-        "sentry_sdk.integrations.excepthook.ExcepthookIntegration",
-        "sentry_sdk.integrations.dedupe.DedupeIntegration",
-        "sentry_sdk.integrations.atexit.AtexitIntegration",
-        "sentry_sdk.integrations.modules.ModulesIntegration",
-        "sentry_sdk.integrations.argv.ArgvIntegration",
-        "sentry_sdk.integrations.threading.ThreadingIntegration",
-    ),
+    integrations=_DEFAULT_INTEGRATIONS,
     auto_enabling_integrations=_AUTO_ENABLING_INTEGRATIONS,
 )
 
@@ -90,8 +101,10 @@ def setup_integrations(
     integrations, with_defaults=True, with_auto_enabling_integrations=False
 ):
     # type: (List[Integration], bool, bool) -> Dict[str, Integration]
-    """Given a list of integration instances this installs them all.  When
-    `with_defaults` is set to `True` then all default integrations are added
+    """
+    Given a list of integration instances, this installs them all.
+
+    When `with_defaults` is set to `True` all default integrations are added
     unless they were already provided before.
     """
     integrations = dict(
@@ -114,7 +127,7 @@ def setup_integrations(
 
     for identifier, integration in iteritems(integrations):
         with _installer_lock:
-            if identifier not in _installed_integrations:
+            if identifier not in _processed_integrations:
                 logger.debug(
                     "Setting up previously not enabled integration %s", identifier
                 )
@@ -137,8 +150,16 @@ def setup_integrations(
                     logger.debug(
                         "Did not enable default integration %s: %s", identifier, e
                     )
+                else:
+                    _installed_integrations.add(identifier)
+
+                _processed_integrations.add(identifier)
 
-                _installed_integrations.add(identifier)
+    integrations = {
+        identifier: integration
+        for identifier, integration in iteritems(integrations)
+        if identifier in _installed_integrations
+    }
 
     for identifier in integrations:
         logger.debug("Enabling integration %s", identifier)
@@ -146,7 +167,7 @@ def setup_integrations(
     return integrations
 
 
-class DidNotEnable(Exception):
+class DidNotEnable(Exception):  # noqa: N818
     """
     The integration could not be enabled due to a trivial user error like
     `flask` not being installed for the `FlaskIntegration`.
diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py
new file mode 100644
index 0000000000..41946cc7c2
--- /dev/null
+++ b/sentry_sdk/integrations/_asgi_common.py
@@ -0,0 +1,104 @@
+import urllib
+
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+    from typing_extensions import Literal
+
+
+def _get_headers(asgi_scope):
+    # type: (Any) -> Dict[str, str]
+    """
+    Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
+    """
+    headers = {}  # type: Dict[str, str]
+    for raw_key, raw_value in asgi_scope["headers"]:
+        key = raw_key.decode("latin-1")
+        value = raw_value.decode("latin-1")
+        if key in headers:
+            headers[key] = headers[key] + ", " + value
+        else:
+            headers[key] = value
+
+    return headers
+
+
+def _get_url(asgi_scope, default_scheme, host):
+    # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
+    """
+    Extract URL from the ASGI scope, without also including the querystring.
+    """
+    scheme = asgi_scope.get("scheme", default_scheme)
+
+    server = asgi_scope.get("server", None)
+    path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
+
+    if host:
+        return "%s://%s%s" % (scheme, host, path)
+
+    if server is not None:
+        host, port = server
+        default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
+        if port != default_port:
+            return "%s://%s:%s%s" % (scheme, host, port, path)
+        return "%s://%s%s" % (scheme, host, path)
+    return path
+
+
+def _get_query(asgi_scope):
+    # type: (Any) -> Any
+    """
+    Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
+    """
+    qs = asgi_scope.get("query_string")
+    if not qs:
+        return None
+    return urllib.parse.unquote(qs.decode("latin-1"))
+
+
+def _get_ip(asgi_scope):
+    # type: (Any) -> str
+    """
+    Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
+    """
+    headers = _get_headers(asgi_scope)
+    try:
+        return headers["x-forwarded-for"].split(",")[0].strip()
+    except (KeyError, IndexError):
+        pass
+
+    try:
+        return headers["x-real-ip"]
+    except KeyError:
+        pass
+
+    return asgi_scope.get("client")[0]
+
+
+def _get_request_data(asgi_scope):
+    # type: (Any) -> Dict[str, Any]
+    """
+    Returns data related to the HTTP request from the ASGI scope.
+    """
+    request_data = {}  # type: Dict[str, Any]
+    ty = asgi_scope["type"]
+    if ty in ("http", "websocket"):
+        request_data["method"] = asgi_scope.get("method")
+
+        request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
+        request_data["query_string"] = _get_query(asgi_scope)
+
+        request_data["url"] = _get_url(
+            asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
+        )
+
+    client = asgi_scope.get("client")
+    if client and _should_send_default_pii():
+        request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
+
+    return request_data
diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
index 4f253acc35..b72ebde126 100644
--- a/sentry_sdk/integrations/_wsgi_common.py
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -1,18 +1,28 @@
+from __future__ import absolute_import
+
 import json
+from copy import deepcopy
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import AnnotatedValue
 from sentry_sdk._compat import text_type, iteritems
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    from django.http.request import RawPostDataException
+except ImportError:
+    RawPostDataException = None
 
-if MYPY:
+
+if TYPE_CHECKING:
     import sentry_sdk
 
     from typing import Any
     from typing import Dict
     from typing import Optional
     from typing import Union
+    from sentry_sdk._types import Event
 
 
 SENSITIVE_ENV_KEYS = (
@@ -36,7 +46,7 @@ def request_body_within_bounds(client, content_length):
     if client is None:
         return False
 
-    bodies = client.options["request_bodies"]
+    bodies = client.options["max_request_body_size"]
     return not (
         bodies == "never"
         or (bodies == "small" and content_length > 10**3)
@@ -50,7 +60,7 @@ def __init__(self, request):
         self.request = request
 
     def extract_into_event(self, event):
-        # type: (Dict[str, Any]) -> None
+        # type: (Event) -> None
         client = Hub.current.client
         if client is None:
             return
@@ -64,26 +74,32 @@ def extract_into_event(self, event):
             request_info["cookies"] = dict(self.cookies())
 
         if not request_body_within_bounds(client, content_length):
-            data = AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, content_length]], "len": content_length},
-            )
+            data = AnnotatedValue.removed_because_over_size_limit()
         else:
+            # First read the raw body data
+            # It is important to read this first because if it is Django
+            # it will cache the body and then we can read the cached version
+            # again in parsed_body() (or json() or wherever).
+            raw_data = None
+            try:
+                raw_data = self.raw_data()
+            except (RawPostDataException, ValueError):
+                # If DjangoRestFramework is used it already read the body for us
+                # so reading it here will fail. We can ignore this.
+                pass
+
             parsed_body = self.parsed_body()
             if parsed_body is not None:
                 data = parsed_body
-            elif self.raw_data():
-                data = AnnotatedValue(
-                    "",
-                    {"rem": [["!raw", "x", 0, content_length]], "len": content_length},
-                )
+            elif raw_data:
+                data = AnnotatedValue.removed_because_raw_data()
             else:
                 data = None
 
         if data is not None:
             request_info["data"] = data
 
-        event["request"] = request_info
+        event["request"] = deepcopy(request_info)
 
     def content_length(self):
         # type: () -> int
@@ -110,11 +126,8 @@ def parsed_body(self):
         files = self.files()
         if form or files:
             data = dict(iteritems(form))
-            for k, v in iteritems(files):
-                size = self.size_of_file(v)
-                data[k] = AnnotatedValue(
-                    "", {"len": size, "rem": [["!raw", "x", 0, size]]}
-                )
+            for key, _ in iteritems(files):
+                data[key] = AnnotatedValue.removed_because_raw_data()
 
             return data
 
@@ -175,7 +188,7 @@ def _filter_headers(headers):
         k: (
             v
             if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
-            else AnnotatedValue("", {"rem": [["!config", "x", 0, len(v)]]})
+            else AnnotatedValue.removed_because_over_size_limit()
         )
         for k, v in iteritems(headers)
     }
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index 8a828b2fe3..19974030ed 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -1,21 +1,33 @@
 import sys
 import weakref
 
+from sentry_sdk.api import continue_trace
 from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import (
     _filter_headers,
     request_body_within_bounds,
 )
-from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SOURCE_FOR_STYLE,
+    TRANSACTION_SOURCE_ROUTE,
+)
+from sentry_sdk.tracing_utils import should_propagate_trace
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    logger,
+    parse_url,
+    parse_version,
     transaction_from_function,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
     AnnotatedValue,
 )
 
@@ -23,24 +35,25 @@
     import asyncio
 
     from aiohttp import __version__ as AIOHTTP_VERSION
+    from aiohttp import ClientSession, TraceConfig
     from aiohttp.web import Application, HTTPException, UrlDispatcher
 except ImportError:
     raise DidNotEnable("AIOHTTP not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from aiohttp.web_request import Request
-    from aiohttp.abc import AbstractMatchInfo
+    from aiohttp.web_urldispatcher import UrlMappingMatchInfo
+    from aiohttp import TraceRequestStartParams, TraceRequestEndParams
+    from types import SimpleNamespace
     from typing import Any
-    from typing import Dict
     from typing import Optional
     from typing import Tuple
-    from typing import Callable
     from typing import Union
 
     from sentry_sdk.utils import ExcInfo
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
 
 
 TRANSACTION_STYLE_VALUES = ("handler_name", "method_and_path_pattern")
@@ -62,10 +75,10 @@ def __init__(self, transaction_style="handler_name"):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, AIOHTTP_VERSION.split(".")[:2]))
-        except (TypeError, ValueError):
-            raise DidNotEnable("AIOHTTP version unparsable: {}".format(AIOHTTP_VERSION))
+        version = parse_version(AIOHTTP_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable AIOHTTP version: {}".format(AIOHTTP_VERSION))
 
         if version < (3, 4):
             raise DidNotEnable("AIOHTTP 3.4 or newer required.")
@@ -91,44 +104,48 @@ async def sentry_app_handle(self, request, *args, **kwargs):
             weak_request = weakref.ref(request)
 
             with Hub(hub) as hub:
-                # Scope data will not leak between requests because aiohttp
-                # create a task to wrap each request.
-                with hub.configure_scope() as scope:
-                    scope.clear_breadcrumbs()
-                    scope.add_event_processor(_make_request_processor(weak_request))
-
-                transaction = Transaction.continue_from_headers(
-                    request.headers,
-                    op="http.server",
-                    # If this transaction name makes it to the UI, AIOHTTP's
-                    # URL resolver did not find a route or died trying.
-                    name="generic AIOHTTP request",
-                )
-                with hub.start_transaction(
-                    transaction, custom_sampling_context={"aiohttp_request": request}
-                ):
-                    try:
-                        response = await old_handle(self, request)
-                    except HTTPException as e:
-                        transaction.set_http_status(e.status_code)
-                        raise
-                    except (asyncio.CancelledError, ConnectionResetError):
-                        transaction.set_status("cancelled")
-                        raise
-                    except Exception:
-                        # This will probably map to a 500 but seems like we
-                        # have no way to tell. Do not set span status.
-                        reraise(*_capture_exception(hub))
-
-                    transaction.set_http_status(response.status)
-                    return response
+                with auto_session_tracking(hub, session_mode="request"):
+                    # Scope data will not leak between requests because aiohttp
+                    # create a task to wrap each request.
+                    with hub.configure_scope() as scope:
+                        scope.clear_breadcrumbs()
+                        scope.add_event_processor(_make_request_processor(weak_request))
+
+                    headers = dict(request.headers)
+                    transaction = continue_trace(
+                        headers,
+                        op=OP.HTTP_SERVER,
+                        # If this transaction name makes it to the UI, AIOHTTP's
+                        # URL resolver did not find a route or died trying.
+                        name="generic AIOHTTP request",
+                        source=TRANSACTION_SOURCE_ROUTE,
+                    )
+                    with hub.start_transaction(
+                        transaction,
+                        custom_sampling_context={"aiohttp_request": request},
+                    ):
+                        try:
+                            response = await old_handle(self, request)
+                        except HTTPException as e:
+                            transaction.set_http_status(e.status_code)
+                            raise
+                        except (asyncio.CancelledError, ConnectionResetError):
+                            transaction.set_status("cancelled")
+                            raise
+                        except Exception:
+                            # This will probably map to a 500 but seems like we
+                            # have no way to tell. Do not set span status.
+                            reraise(*_capture_exception(hub))
+
+                        transaction.set_http_status(response.status)
+                        return response
 
         Application._handle = sentry_app_handle
 
         old_urldispatcher_resolve = UrlDispatcher.resolve
 
         async def sentry_urldispatcher_resolve(self, request):
-            # type: (UrlDispatcher, Request) -> AbstractMatchInfo
+            # type: (UrlDispatcher, Request) -> UrlMappingMatchInfo
             rv = await old_urldispatcher_resolve(self, request)
 
             hub = Hub.current
@@ -148,20 +165,100 @@ async def sentry_urldispatcher_resolve(self, request):
 
             if name is not None:
                 with Hub.current.configure_scope() as scope:
-                    scope.transaction = name
+                    scope.set_transaction_name(
+                        name,
+                        source=SOURCE_FOR_STYLE[integration.transaction_style],
+                    )
 
             return rv
 
         UrlDispatcher.resolve = sentry_urldispatcher_resolve
 
+        old_client_session_init = ClientSession.__init__
+
+        def init(*args, **kwargs):
+            # type: (Any, Any) -> None
+            hub = Hub.current
+            if hub.get_integration(AioHttpIntegration) is None:
+                return old_client_session_init(*args, **kwargs)
+
+            client_trace_configs = list(kwargs.get("trace_configs") or ())
+            trace_config = create_trace_config()
+            client_trace_configs.append(trace_config)
+
+            kwargs["trace_configs"] = client_trace_configs
+            return old_client_session_init(*args, **kwargs)
+
+        ClientSession.__init__ = init
+
+
+def create_trace_config():
+    # type: () -> TraceConfig
+    async def on_request_start(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
+        hub = Hub.current
+        if hub.get_integration(AioHttpIntegration) is None:
+            return
+
+        method = params.method.upper()
+
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(params.url), sanitize=False)
+
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+        )
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        if parsed_url is not None:
+            span.set_data("url", parsed_url.url)
+            span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+            span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+        if should_propagate_trace(hub, str(params.url)):
+            for key, value in hub.iter_trace_propagation_headers(span):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                        key=key, value=value, url=params.url
+                    )
+                )
+                if key == BAGGAGE_HEADER_NAME and params.headers.get(
+                    BAGGAGE_HEADER_NAME
+                ):
+                    # do not overwrite any existing baggage, just append to it
+                    params.headers[key] += "," + value
+                else:
+                    params.headers[key] = value
+
+        trace_config_ctx.span = span
+
+    async def on_request_end(session, trace_config_ctx, params):
+        # type: (ClientSession, SimpleNamespace, TraceRequestEndParams) -> None
+        if trace_config_ctx.span is None:
+            return
+
+        span = trace_config_ctx.span
+        span.set_http_status(int(params.response.status))
+        span.set_data("reason", params.response.reason)
+        span.finish()
+
+    trace_config = TraceConfig()
+
+    trace_config.on_request_start.append(on_request_start)
+    trace_config.on_request_end.append(on_request_end)
+
+    return trace_config
+
 
 def _make_request_processor(weak_request):
-    # type: (Callable[[], Request]) -> EventProcessor
+    # type: (weakref.ReferenceType[Request]) -> EventProcessor
     def aiohttp_processor(
-        event,  # type: Dict[str, Any]
-        hint,  # type: Dict[str, Tuple[type, BaseException, Any]]
+        event,  # type: Event
+        hint,  # type: dict[str, Tuple[type, BaseException, Any]]
     ):
-        # type: (...) -> Dict[str, Any]
+        # type: (...) -> Event
         request = weak_request()
         if request is None:
             return event
@@ -214,11 +311,8 @@ def get_aiohttp_request_data(hub, request):
     if bytes_body is not None:
         # we have body to show
         if not request_body_within_bounds(hub.client, len(bytes_body)):
+            return AnnotatedValue.removed_because_over_size_limit()
 
-            return AnnotatedValue(
-                "",
-                {"rem": [["!config", "x", 0, len(bytes_body)]], "len": len(bytes_body)},
-            )
         encoding = request.charset or "utf-8"
         return bytes_body.decode(encoding, "replace")
 
diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py
index f005521d32..fea08619d5 100644
--- a/sentry_sdk/integrations/argv.py
+++ b/sentry_sdk/integrations/argv.py
@@ -6,9 +6,9 @@
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/ariadne.py b/sentry_sdk/integrations/ariadne.py
new file mode 100644
index 0000000000..5b98a88443
--- /dev/null
+++ b/sentry_sdk/integrations/ariadne.py
@@ -0,0 +1,176 @@
+from importlib import import_module
+
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    package_version,
+)
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    # importing like this is necessary due to name shadowing in ariadne
+    # (ariadne.graphql is also a function)
+    ariadne_graphql = import_module("ariadne.graphql")
+except ImportError:
+    raise DidNotEnable("ariadne is not installed")
+
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, List, Optional
+    from ariadne.types import GraphQLError, GraphQLResult, GraphQLSchema, QueryParser  # type: ignore
+    from graphql.language.ast import DocumentNode  # type: ignore
+    from sentry_sdk._types import Event, EventProcessor
+
+
+class AriadneIntegration(Integration):
+    identifier = "ariadne"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("ariadne")
+
+        if version is None:
+            raise DidNotEnable("Unparsable ariadne version.")
+
+        if version < (0, 20):
+            raise DidNotEnable("ariadne 0.20 or newer required.")
+
+        ignore_logger("ariadne")
+
+        _patch_graphql()
+
+
+def _patch_graphql():
+    # type: () -> None
+    old_parse_query = ariadne_graphql.parse_query
+    old_handle_errors = ariadne_graphql.handle_graphql_errors
+    old_handle_query_result = ariadne_graphql.handle_query_result
+
+    def _sentry_patched_parse_query(context_value, query_parser, data):
+        # type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
+        hub = Hub.current
+        integration = hub.get_integration(AriadneIntegration)
+        if integration is None:
+            return old_parse_query(context_value, query_parser, data)
+
+        with hub.configure_scope() as scope:
+            event_processor = _make_request_event_processor(data)
+            scope.add_event_processor(event_processor)
+
+        result = old_parse_query(context_value, query_parser, data)
+        return result
+
+    def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
+        # type: (List[GraphQLError], Any, Any) -> GraphQLResult
+        hub = Hub.current
+        integration = hub.get_integration(AriadneIntegration)
+        if integration is None:
+            return old_handle_errors(errors, *args, **kwargs)
+
+        result = old_handle_errors(errors, *args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            event_processor = _make_response_event_processor(result[1])
+            scope.add_event_processor(event_processor)
+
+        if hub.client:
+            with capture_internal_exceptions():
+                for error in errors:
+                    event, hint = event_from_exception(
+                        error,
+                        client_options=hub.client.options,
+                        mechanism={
+                            "type": integration.identifier,
+                            "handled": False,
+                        },
+                    )
+                    hub.capture_event(event, hint=hint)
+
+        return result
+
+    def _sentry_patched_handle_query_result(result, *args, **kwargs):
+        # type: (Any, Any, Any) -> GraphQLResult
+        hub = Hub.current
+        integration = hub.get_integration(AriadneIntegration)
+        if integration is None:
+            return old_handle_query_result(result, *args, **kwargs)
+
+        query_result = old_handle_query_result(result, *args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            event_processor = _make_response_event_processor(query_result[1])
+            scope.add_event_processor(event_processor)
+
+        if hub.client:
+            with capture_internal_exceptions():
+                for error in result.errors or []:
+                    event, hint = event_from_exception(
+                        error,
+                        client_options=hub.client.options,
+                        mechanism={
+                            "type": integration.identifier,
+                            "handled": False,
+                        },
+                    )
+                    hub.capture_event(event, hint=hint)
+
+        return query_result
+
+    ariadne_graphql.parse_query = _sentry_patched_parse_query  # type: ignore
+    ariadne_graphql.handle_graphql_errors = _sentry_patched_handle_graphql_errors  # type: ignore
+    ariadne_graphql.handle_query_result = _sentry_patched_handle_query_result  # type: ignore
+
+
+def _make_request_event_processor(data):
+    # type: (GraphQLSchema) -> EventProcessor
+    """Add request data and api_target to events."""
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        if not isinstance(data, dict):
+            return event
+
+        with capture_internal_exceptions():
+            try:
+                content_length = int(
+                    (data.get("headers") or {}).get("Content-Length", 0)
+                )
+            except (TypeError, ValueError):
+                return event
+
+            if _should_send_default_pii() and request_body_within_bounds(
+                Hub.current.client, content_length
+            ):
+                request_info = event.setdefault("request", {})
+                request_info["api_target"] = "graphql"
+                request_info["data"] = data
+
+            elif event.get("request", {}).get("data"):
+                del event["request"]["data"]
+
+        return event
+
+    return inner
+
+
+def _make_response_event_processor(response):
+    # type: (Dict[str, Any]) -> EventProcessor
+    """Add response data to the event's response context."""
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if _should_send_default_pii() and response.get("errors"):
+                contexts = event.setdefault("contexts", {})
+                contexts["response"] = {
+                    "data": response,
+                }
+
+        return event
+
+    return inner
diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py
new file mode 100644
index 0000000000..ed045b854a
--- /dev/null
+++ b/sentry_sdk/integrations/arq.py
@@ -0,0 +1,238 @@
+from __future__ import absolute_import
+
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+    parse_version,
+)
+
+try:
+    import arq.worker
+    from arq.version import VERSION as ARQ_VERSION
+    from arq.connections import ArqRedis
+    from arq.worker import JobExecutionFailed, Retry, RetryJob, Worker
+except ImportError:
+    raise DidNotEnable("Arq is not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional, Union
+
+    from sentry_sdk._types import EventProcessor, Event, ExcInfo, Hint
+
+    from arq.cron import CronJob
+    from arq.jobs import Job
+    from arq.typing import WorkerCoroutine
+    from arq.worker import Function
+
+ARQ_CONTROL_FLOW_EXCEPTIONS = (JobExecutionFailed, Retry, RetryJob)
+
+
+class ArqIntegration(Integration):
+    identifier = "arq"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+
+        try:
+            if isinstance(ARQ_VERSION, str):
+                version = parse_version(ARQ_VERSION)
+            else:
+                version = ARQ_VERSION.version[:2]
+
+        except (TypeError, ValueError):
+            version = None
+
+        if version is None:
+            raise DidNotEnable("Unparsable arq version: {}".format(ARQ_VERSION))
+
+        if version < (0, 23):
+            raise DidNotEnable("arq 0.23 or newer required.")
+
+        patch_enqueue_job()
+        patch_run_job()
+        patch_create_worker()
+
+        ignore_logger("arq.worker")
+
+
+def patch_enqueue_job():
+    # type: () -> None
+    old_enqueue_job = ArqRedis.enqueue_job
+
+    async def _sentry_enqueue_job(self, function, *args, **kwargs):
+        # type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
+        hub = Hub.current
+
+        if hub.get_integration(ArqIntegration) is None:
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+        with hub.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function):
+            return await old_enqueue_job(self, function, *args, **kwargs)
+
+    ArqRedis.enqueue_job = _sentry_enqueue_job
+
+
+def patch_run_job():
+    # type: () -> None
+    old_run_job = Worker.run_job
+
+    async def _sentry_run_job(self, job_id, score):
+        # type: (Worker, str, int) -> None
+        hub = Hub(Hub.current)
+
+        if hub.get_integration(ArqIntegration) is None:
+            return await old_run_job(self, job_id, score)
+
+        with hub.push_scope() as scope:
+            scope._name = "arq"
+            scope.clear_breadcrumbs()
+
+            transaction = Transaction(
+                name="unknown arq task",
+                status="ok",
+                op=OP.QUEUE_TASK_ARQ,
+                source=TRANSACTION_SOURCE_TASK,
+            )
+
+            with hub.start_transaction(transaction):
+                return await old_run_job(self, job_id, score)
+
+    Worker.run_job = _sentry_run_job
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    hub = Hub.current
+
+    if hub.scope.transaction is not None:
+        if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
+            hub.scope.transaction.set_status("aborted")
+            return
+
+        hub.scope.transaction.set_status("internal_error")
+
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": ArqIntegration.identifier, "handled": False},
+    )
+    hub.capture_event(event, hint=hint)
+
+
+def _make_event_processor(ctx, *args, **kwargs):
+    # type: (Dict[Any, Any], *Any, **Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        hub = Hub.current
+
+        with capture_internal_exceptions():
+            if hub.scope.transaction is not None:
+                hub.scope.transaction.name = ctx["job_name"]
+                event["transaction"] = ctx["job_name"]
+
+            tags = event.setdefault("tags", {})
+            tags["arq_task_id"] = ctx["job_id"]
+            tags["arq_task_retry"] = ctx["job_try"] > 1
+            extra = event.setdefault("extra", {})
+            extra["arq-job"] = {
+                "task": ctx["job_name"],
+                "args": (
+                    args if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "kwargs": (
+                    kwargs if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "retry": ctx["job_try"],
+            }
+
+        return event
+
+    return event_processor
+
+
+def _wrap_coroutine(name, coroutine):
+    # type: (str, WorkerCoroutine) -> WorkerCoroutine
+    async def _sentry_coroutine(ctx, *args, **kwargs):
+        # type: (Dict[Any, Any], *Any, **Any) -> Any
+        hub = Hub.current
+        if hub.get_integration(ArqIntegration) is None:
+            return await coroutine(ctx, *args, **kwargs)
+
+        hub.scope.add_event_processor(
+            _make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
+        )
+
+        try:
+            result = await coroutine(ctx, *args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_coroutine
+
+
+def patch_create_worker():
+    # type: () -> None
+    old_create_worker = arq.worker.create_worker
+
+    def _sentry_create_worker(*args, **kwargs):
+        # type: (*Any, **Any) -> Worker
+        hub = Hub.current
+
+        if hub.get_integration(ArqIntegration) is None:
+            return old_create_worker(*args, **kwargs)
+
+        settings_cls = args[0]
+
+        if hasattr(settings_cls, "functions"):
+            settings_cls.functions = [
+                _get_arq_function(func) for func in settings_cls.functions
+            ]
+        if hasattr(settings_cls, "cron_jobs"):
+            settings_cls.cron_jobs = [
+                _get_arq_cron_job(cron_job) for cron_job in settings_cls.cron_jobs
+            ]
+
+        if "functions" in kwargs:
+            kwargs["functions"] = [
+                _get_arq_function(func) for func in kwargs["functions"]
+            ]
+        if "cron_jobs" in kwargs:
+            kwargs["cron_jobs"] = [
+                _get_arq_cron_job(cron_job) for cron_job in kwargs["cron_jobs"]
+            ]
+
+        return old_create_worker(*args, **kwargs)
+
+    arq.worker.create_worker = _sentry_create_worker
+
+
+def _get_arq_function(func):
+    # type: (Union[str, Function, WorkerCoroutine]) -> Function
+    arq_func = arq.worker.func(func)
+    arq_func.coroutine = _wrap_coroutine(arq_func.name, arq_func.coroutine)
+
+    return arq_func
+
+
+def _get_arq_cron_job(cron_job):
+    # type: (CronJob) -> CronJob
+    cron_job.coroutine = _wrap_coroutine(cron_job.name, cron_job.coroutine)
+
+    return cron_job
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 5f7810732b..901c6f5d23 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -1,34 +1,48 @@
 """
 An ASGI middleware.
 
-Based on Tom Christie's `sentry-asgi `_.
+Based on Tom Christie's `sentry-asgi `.
 """
 
 import asyncio
 import inspect
-import urllib
+from copy import deepcopy
 
 from sentry_sdk._functools import partial
-from sentry_sdk._types import MYPY
-from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+
+from sentry_sdk.integrations._asgi_common import (
+    _get_headers,
+    _get_request_data,
+    _get_url,
+)
 from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.tracing import (
+    SOURCE_FOR_STYLE,
+    TRANSACTION_SOURCE_ROUTE,
+    TRANSACTION_SOURCE_URL,
+    TRANSACTION_SOURCE_COMPONENT,
+)
 from sentry_sdk.utils import (
     ContextVar,
     event_from_exception,
-    transaction_from_function,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    logger,
+    transaction_from_function,
+    _get_installed_modules,
 )
 from sentry_sdk.tracing import Transaction
 
-if MYPY:
-    from typing import Dict
+if TYPE_CHECKING:
     from typing import Any
-    from typing import Optional
     from typing import Callable
-
-    from typing_extensions import Literal
+    from typing import Dict
+    from typing import Optional
+    from typing import Tuple
 
     from sentry_sdk._types import Event, Hint
 
@@ -40,15 +54,15 @@
 TRANSACTION_STYLE_VALUES = ("endpoint", "url")
 
 
-def _capture_exception(hub, exc):
-    # type: (Hub, Any) -> None
+def _capture_exception(hub, exc, mechanism_type="asgi"):
+    # type: (Hub, Any, str) -> None
 
     # Check client here as it might have been unset while streaming response
     if hub.client is not None:
         event, hint = event_from_exception(
             exc,
             client_options=hub.client.options,
-            mechanism={"type": "asgi", "handled": False},
+            mechanism={"type": mechanism_type, "handled": False},
         )
         hub.capture_event(event, hint=hint)
 
@@ -70,10 +84,16 @@ def _looks_like_asgi3(app):
 
 
 class SentryAsgiMiddleware:
-    __slots__ = ("app", "__call__", "transaction_style")
-
-    def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint"):
-        # type: (Any, bool, str) -> None
+    __slots__ = ("app", "__call__", "transaction_style", "mechanism_type")
+
+    def __init__(
+        self,
+        app,
+        unsafe_context_data=False,
+        transaction_style="endpoint",
+        mechanism_type="asgi",
+    ):
+        # type: (Any, bool, str, str) -> None
         """
         Instrument an ASGI application with Sentry. Provides HTTP/websocket
         data to sent events and basic handling for exceptions bubbling up
@@ -81,7 +101,6 @@ def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint")
 
         :param unsafe_context_data: Disable errors when a proper contextvars installation could not be found. We do not recommend changing this from the default.
         """
-
         if not unsafe_context_data and not HAS_REAL_CONTEXTVARS:
             # We better have contextvars or we're going to leak state between
             # requests.
@@ -94,7 +113,19 @@ def __init__(self, app, unsafe_context_data=False, transaction_style="endpoint")
                 "Invalid value for transaction_style: %s (must be in %s)"
                 % (transaction_style, TRANSACTION_STYLE_VALUES)
             )
+
+        asgi_middleware_while_using_starlette_or_fastapi = (
+            mechanism_type == "asgi" and "starlette" in _get_installed_modules()
+        )
+        if asgi_middleware_while_using_starlette_or_fastapi:
+            logger.warning(
+                "The Sentry Python SDK can now automatically support ASGI frameworks like Starlette and FastAPI. "
+                "Please remove 'SentryAsgiMiddleware' from your project. "
+                "See https://docs.sentry.io/platforms/python/guides/asgi/ for more information."
+            )
+
         self.transaction_style = transaction_style
+        self.mechanism_type = mechanism_type
         self.app = app
 
         if _looks_like_asgi3(app):
@@ -106,23 +137,27 @@ def _run_asgi2(self, scope):
         # type: (Any) -> Any
         async def inner(receive, send):
             # type: (Any, Any) -> Any
-            return await self._run_app(scope, lambda: self.app(scope)(receive, send))
+            return await self._run_app(scope, receive, send, asgi_version=2)
 
         return inner
 
     async def _run_asgi3(self, scope, receive, send):
         # type: (Any, Any, Any) -> Any
-        return await self._run_app(scope, lambda: self.app(scope, receive, send))
+        return await self._run_app(scope, receive, send, asgi_version=3)
 
-    async def _run_app(self, scope, callback):
-        # type: (Any, Any) -> Any
+    async def _run_app(self, scope, receive, send, asgi_version):
+        # type: (Any, Any, Any, Any, int) -> Any
         is_recursive_asgi_middleware = _asgi_middleware_applied.get(False)
-
-        if is_recursive_asgi_middleware:
+        is_lifespan = scope["type"] == "lifespan"
+        if is_recursive_asgi_middleware or is_lifespan:
             try:
-                return await callback()
+                if asgi_version == 2:
+                    return await self.app(scope)(receive, send)
+                else:
+                    return await self.app(scope, receive, send)
+
             except Exception as exc:
-                _capture_exception(Hub.current, exc)
+                _capture_exception(Hub.current, exc, mechanism_type=self.mechanism_type)
                 raise exc from None
 
         _asgi_middleware_applied.set(True)
@@ -137,142 +172,142 @@ async def _run_app(self, scope, callback):
                         sentry_scope.add_event_processor(processor)
 
                     ty = scope["type"]
+                    (
+                        transaction_name,
+                        transaction_source,
+                    ) = self._get_transaction_name_and_source(
+                        self.transaction_style,
+                        scope,
+                    )
 
                     if ty in ("http", "websocket"):
-                        transaction = Transaction.continue_from_headers(
-                            self._get_headers(scope),
+                        transaction = continue_trace(
+                            _get_headers(scope),
                             op="{}.server".format(ty),
+                            name=transaction_name,
+                            source=transaction_source,
+                        )
+                        logger.debug(
+                            "[ASGI] Created transaction (continuing trace): %s",
+                            transaction,
                         )
                     else:
-                        transaction = Transaction(op="asgi.server")
+                        transaction = Transaction(
+                            op=OP.HTTP_SERVER,
+                            name=transaction_name,
+                            source=transaction_source,
+                        )
+                        logger.debug(
+                            "[ASGI] Created transaction (new): %s", transaction
+                        )
 
-                    transaction.name = _DEFAULT_TRANSACTION_NAME
                     transaction.set_tag("asgi.type", ty)
+                    logger.debug(
+                        "[ASGI] Set transaction name and source on transaction: '%s' / '%s'",
+                        transaction.name,
+                        transaction.source,
+                    )
 
                     with hub.start_transaction(
                         transaction, custom_sampling_context={"asgi_scope": scope}
                     ):
-                        # XXX: Would be cool to have correct span status, but we
-                        # would have to wrap send(). That is a bit hard to do with
-                        # the current abstraction over ASGI 2/3.
+                        logger.debug("[ASGI] Started transaction: %s", transaction)
                         try:
-                            return await callback()
+
+                            async def _sentry_wrapped_send(event):
+                                # type: (Dict[str, Any]) -> Any
+                                is_http_response = (
+                                    event.get("type") == "http.response.start"
+                                    and transaction is not None
+                                    and "status" in event
+                                )
+                                if is_http_response:
+                                    transaction.set_http_status(event["status"])
+
+                                return await send(event)
+
+                            if asgi_version == 2:
+                                return await self.app(scope)(
+                                    receive, _sentry_wrapped_send
+                                )
+                            else:
+                                return await self.app(
+                                    scope, receive, _sentry_wrapped_send
+                                )
                         except Exception as exc:
-                            _capture_exception(hub, exc)
+                            _capture_exception(
+                                hub, exc, mechanism_type=self.mechanism_type
+                            )
                             raise exc from None
         finally:
             _asgi_middleware_applied.set(False)
 
     def event_processor(self, event, hint, asgi_scope):
         # type: (Event, Hint, Any) -> Optional[Event]
-        request_info = event.get("request", {})
-
-        ty = asgi_scope["type"]
-        if ty in ("http", "websocket"):
-            request_info["method"] = asgi_scope.get("method")
-            request_info["headers"] = headers = _filter_headers(
-                self._get_headers(asgi_scope)
+        request_data = event.get("request", {})
+        request_data.update(_get_request_data(asgi_scope))
+        event["request"] = deepcopy(request_data)
+
+        # Only set transaction name if not already set by Starlette or FastAPI (or other frameworks)
+        already_set = event["transaction"] != _DEFAULT_TRANSACTION_NAME and event[
+            "transaction_info"
+        ].get("source") in [
+            TRANSACTION_SOURCE_COMPONENT,
+            TRANSACTION_SOURCE_ROUTE,
+        ]
+        if not already_set:
+            name, source = self._get_transaction_name_and_source(
+                self.transaction_style, asgi_scope
             )
-            request_info["query_string"] = self._get_query(asgi_scope)
+            event["transaction"] = name
+            event["transaction_info"] = {"source": source}
 
-            request_info["url"] = self._get_url(
-                asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
+            logger.debug(
+                "[ASGI] Set transaction name and source in event_processor: '%s' / '%s'",
+                event["transaction"],
+                event["transaction_info"]["source"],
             )
 
-        client = asgi_scope.get("client")
-        if client and _should_send_default_pii():
-            request_info["env"] = {"REMOTE_ADDR": self._get_ip(asgi_scope)}
-
-        if (
-            event.get("transaction", _DEFAULT_TRANSACTION_NAME)
-            == _DEFAULT_TRANSACTION_NAME
-        ):
-            if self.transaction_style == "endpoint":
-                endpoint = asgi_scope.get("endpoint")
-                # Webframeworks like Starlette mutate the ASGI env once routing is
-                # done, which is sometime after the request has started. If we have
-                # an endpoint, overwrite our generic transaction name.
-                if endpoint:
-                    event["transaction"] = transaction_from_function(endpoint)
-            elif self.transaction_style == "url":
-                # FastAPI includes the route object in the scope to let Sentry extract the
-                # path from it for the transaction name
-                route = asgi_scope.get("route")
-                if route:
-                    path = getattr(route, "path", None)
-                    if path is not None:
-                        event["transaction"] = path
-
-        event["request"] = request_info
-
         return event
 
-    # Helper functions for extracting request data.
+    # Helper functions.
     #
     # Note: Those functions are not public API. If you want to mutate request
     # data to your liking it's recommended to use the `before_send` callback
     # for that.
 
-    def _get_url(self, scope, default_scheme, host):
-        # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
-        """
-        Extract URL from the ASGI scope, without also including the querystring.
-        """
-        scheme = scope.get("scheme", default_scheme)
-
-        server = scope.get("server", None)
-        path = scope.get("root_path", "") + scope.get("path", "")
-
-        if host:
-            return "%s://%s%s" % (scheme, host, path)
-
-        if server is not None:
-            host, port = server
-            default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
-            if port != default_port:
-                return "%s://%s:%s%s" % (scheme, host, port, path)
-            return "%s://%s%s" % (scheme, host, path)
-        return path
-
-    def _get_query(self, scope):
-        # type: (Any) -> Any
-        """
-        Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
-        """
-        qs = scope.get("query_string")
-        if not qs:
-            return None
-        return urllib.parse.unquote(qs.decode("latin-1"))
-
-    def _get_ip(self, scope):
-        # type: (Any) -> str
-        """
-        Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
-        """
-        headers = self._get_headers(scope)
-        try:
-            return headers["x-forwarded-for"].split(",")[0].strip()
-        except (KeyError, IndexError):
-            pass
-
-        try:
-            return headers["x-real-ip"]
-        except KeyError:
-            pass
+    def _get_transaction_name_and_source(self, transaction_style, asgi_scope):
+        # type: (SentryAsgiMiddleware, str, Any) -> Tuple[str, str]
+        name = None
+        source = SOURCE_FOR_STYLE[transaction_style]
+        ty = asgi_scope.get("type")
+
+        if transaction_style == "endpoint":
+            endpoint = asgi_scope.get("endpoint")
+            # Webframeworks like Starlette mutate the ASGI env once routing is
+            # done, which is sometime after the request has started. If we have
+            # an endpoint, overwrite our generic transaction name.
+            if endpoint:
+                name = transaction_from_function(endpoint) or ""
+            else:
+                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
+                source = TRANSACTION_SOURCE_URL
+
+        elif transaction_style == "url":
+            # FastAPI includes the route object in the scope to let Sentry extract the
+            # path from it for the transaction name
+            route = asgi_scope.get("route")
+            if route:
+                path = getattr(route, "path", None)
+                if path is not None:
+                    name = path
+            else:
+                name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
+                source = TRANSACTION_SOURCE_URL
 
-        return scope.get("client")[0]
+        if name is None:
+            name = _DEFAULT_TRANSACTION_NAME
+            source = TRANSACTION_SOURCE_ROUTE
+            return name, source
 
-    def _get_headers(self, scope):
-        # type: (Any) -> Dict[str, str]
-        """
-        Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
-        """
-        headers = {}  # type: Dict[str, str]
-        for raw_key, raw_value in scope["headers"]:
-            key = raw_key.decode("latin-1")
-            value = raw_value.decode("latin-1")
-            if key in headers:
-                headers[key] = headers[key] + ", " + value
-            else:
-                headers[key] = value
-        return headers
+        return name, source
diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
new file mode 100644
index 0000000000..7f9b5b0c6d
--- /dev/null
+++ b/sentry_sdk/integrations/asyncio.py
@@ -0,0 +1,106 @@
+from __future__ import absolute_import
+import sys
+
+from sentry_sdk._compat import reraise
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import event_from_exception
+
+try:
+    import asyncio
+    from asyncio.tasks import Task
+except ImportError:
+    raise DidNotEnable("asyncio not available")
+
+
+if TYPE_CHECKING:
+    from typing import Any
+    from collections.abc import Coroutine
+
+    from sentry_sdk._types import ExcInfo
+
+
+def get_name(coro):
+    # type: (Any) -> str
+    return (
+        getattr(coro, "__qualname__", None)
+        or getattr(coro, "__name__", None)
+        or "coroutine without __name__"
+    )
+
+
+def patch_asyncio():
+    # type: () -> None
+    orig_task_factory = None
+    try:
+        loop = asyncio.get_running_loop()
+        orig_task_factory = loop.get_task_factory()
+
+        def _sentry_task_factory(loop, coro, **kwargs):
+            # type: (asyncio.AbstractEventLoop, Coroutine[Any, Any, Any], Any) -> asyncio.Future[Any]
+
+            async def _coro_creating_hub_and_span():
+                # type: () -> Any
+                hub = Hub(Hub.current)
+                result = None
+
+                with hub:
+                    with hub.start_span(op=OP.FUNCTION, description=get_name(coro)):
+                        try:
+                            result = await coro
+                        except Exception:
+                            reraise(*_capture_exception(hub))
+
+                return result
+
+            # Trying to use user set task factory (if there is one)
+            if orig_task_factory:
+                return orig_task_factory(loop, _coro_creating_hub_and_span(), **kwargs)
+
+            # The default task factory in `asyncio` does not have its own function
+            # but is just a couple of lines in `asyncio.base_events.create_task()`
+            # Those lines are copied here.
+
+            # WARNING:
+            # If the default behavior of the task creation in asyncio changes,
+            # this will break!
+            task = Task(_coro_creating_hub_and_span(), loop=loop, **kwargs)
+            if task._source_traceback:  # type: ignore
+                del task._source_traceback[-1]  # type: ignore
+
+            return task
+
+        loop.set_task_factory(_sentry_task_factory)  # type: ignore
+    except RuntimeError:
+        # When there is no running loop, we have nothing to patch.
+        pass
+
+
+def _capture_exception(hub):
+    # type: (Hub) -> ExcInfo
+    exc_info = sys.exc_info()
+
+    integration = hub.get_integration(AsyncioIntegration)
+    if integration is not None:
+        # If an integration is there, a client has to be there.
+        client = hub.client  # type: Any
+
+        event, hint = event_from_exception(
+            exc_info,
+            client_options=client.options,
+            mechanism={"type": "asyncio", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+    return exc_info
+
+
+class AsyncioIntegration(Integration):
+    identifier = "asyncio"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_asyncio()
diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py
new file mode 100644
index 0000000000..19aa9c3a69
--- /dev/null
+++ b/sentry_sdk/integrations/asyncpg.py
@@ -0,0 +1,209 @@
+from __future__ import annotations
+import contextlib
+from typing import Any, TypeVar, Callable, Awaitable, Iterator
+
+from asyncpg.cursor import BaseCursor  # type: ignore
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
+from sentry_sdk.utils import parse_version, capture_internal_exceptions
+
+try:
+    import asyncpg  # type: ignore[import-not-found]
+
+except ImportError:
+    raise DidNotEnable("asyncpg not installed.")
+
+# asyncpg.__version__ is a string containing the semantic version in the form of ".."
+asyncpg_version = parse_version(asyncpg.__version__)
+
+if asyncpg_version is not None and asyncpg_version < (0, 23, 0):
+    raise DidNotEnable("asyncpg >= 0.23.0 required")
+
+
+class AsyncPGIntegration(Integration):
+    identifier = "asyncpg"
+    _record_params = False
+
+    def __init__(self, *, record_params: bool = False):
+        AsyncPGIntegration._record_params = record_params
+
+    @staticmethod
+    def setup_once() -> None:
+        asyncpg.Connection.execute = _wrap_execute(
+            asyncpg.Connection.execute,
+        )
+
+        asyncpg.Connection._execute = _wrap_connection_method(
+            asyncpg.Connection._execute
+        )
+        asyncpg.Connection._executemany = _wrap_connection_method(
+            asyncpg.Connection._executemany, executemany=True
+        )
+        asyncpg.Connection.cursor = _wrap_cursor_creation(asyncpg.Connection.cursor)
+        asyncpg.Connection.prepare = _wrap_connection_method(asyncpg.Connection.prepare)
+        asyncpg.connect_utils._connect_addr = _wrap_connect_addr(
+            asyncpg.connect_utils._connect_addr
+        )
+
+
+T = TypeVar("T")
+
+
+def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        hub = Hub.current
+        integration = hub.get_integration(AsyncPGIntegration)
+
+        # Avoid recording calls to _execute twice.
+        # Calls to Connection.execute with args also call
+        # Connection._execute, which is recorded separately
+        # args[0] = the connection object, args[1] is the query
+        if integration is None or len(args) > 2:
+            return await f(*args, **kwargs)
+
+        query = args[1]
+        with record_sql_queries(
+            hub, None, query, None, None, executemany=False
+        ) as span:
+            res = await f(*args, **kwargs)
+
+        with capture_internal_exceptions():
+            add_query_source(hub, span)
+
+        return res
+
+    return _inner
+
+
+SubCursor = TypeVar("SubCursor", bound=BaseCursor)
+
+
+@contextlib.contextmanager
+def _record(
+    hub: Hub,
+    cursor: SubCursor | None,
+    query: str,
+    params_list: tuple[Any, ...] | None,
+    *,
+    executemany: bool = False,
+) -> Iterator[Span]:
+    integration = hub.get_integration(AsyncPGIntegration)
+    if not integration._record_params:
+        params_list = None
+
+    param_style = "pyformat" if params_list else None
+
+    with record_sql_queries(
+        hub,
+        cursor,
+        query,
+        params_list,
+        param_style,
+        executemany=executemany,
+        record_cursor_repr=cursor is not None,
+    ) as span:
+        yield span
+
+
+def _wrap_connection_method(
+    f: Callable[..., Awaitable[T]], *, executemany: bool = False
+) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        hub = Hub.current
+        integration = hub.get_integration(AsyncPGIntegration)
+
+        if integration is None:
+            return await f(*args, **kwargs)
+
+        query = args[1]
+        params_list = args[2] if len(args) > 2 else None
+        with _record(hub, None, query, params_list, executemany=executemany) as span:
+            _set_db_data(span, args[0])
+            res = await f(*args, **kwargs)
+
+        return res
+
+    return _inner
+
+
+def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]:
+    def _inner(*args: Any, **kwargs: Any) -> T:  # noqa: N807
+        hub = Hub.current
+        integration = hub.get_integration(AsyncPGIntegration)
+
+        if integration is None:
+            return f(*args, **kwargs)
+
+        query = args[1]
+        params_list = args[2] if len(args) > 2 else None
+
+        with _record(
+            hub,
+            None,
+            query,
+            params_list,
+            executemany=False,
+        ) as span:
+            _set_db_data(span, args[0])
+            res = f(*args, **kwargs)
+            span.set_data("db.cursor", res)
+
+        return res
+
+    return _inner
+
+
+def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
+    async def _inner(*args: Any, **kwargs: Any) -> T:
+        hub = Hub.current
+        integration = hub.get_integration(AsyncPGIntegration)
+
+        if integration is None:
+            return await f(*args, **kwargs)
+
+        user = kwargs["params"].user
+        database = kwargs["params"].database
+
+        with hub.start_span(op=OP.DB, description="connect") as span:
+            span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
+            addr = kwargs.get("addr")
+            if addr:
+                try:
+                    span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
+                    span.set_data(SPANDATA.SERVER_PORT, addr[1])
+                except IndexError:
+                    pass
+            span.set_data(SPANDATA.DB_NAME, database)
+            span.set_data(SPANDATA.DB_USER, user)
+
+            with capture_internal_exceptions():
+                hub.add_breadcrumb(message="connect", category="query", data=span._data)
+            res = await f(*args, **kwargs)
+
+        return res
+
+    return _inner
+
+
+def _set_db_data(span: Span, conn: Any) -> None:
+    span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
+
+    addr = conn._addr
+    if addr:
+        try:
+            span.set_data(SPANDATA.SERVER_ADDRESS, addr[0])
+            span.set_data(SPANDATA.SERVER_PORT, addr[1])
+        except IndexError:
+            pass
+
+    database = conn._params.database
+    if database:
+        span.set_data(SPANDATA.DB_NAME, database)
+
+    user = conn._params.user
+    if user:
+        span.set_data(SPANDATA.DB_USER, user)
diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py
index 18fe657bff..af70dd9fc9 100644
--- a/sentry_sdk/integrations/atexit.py
+++ b/sentry_sdk/integrations/atexit.py
@@ -8,10 +8,9 @@
 from sentry_sdk.utils import logger
 from sentry_sdk.integrations import Integration
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk._types import TYPE_CHECKING
 
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
@@ -27,7 +26,7 @@ def echo(msg):
         # type: (str) -> None
         sys.stderr.write(msg + "\n")
 
-    echo("Sentry is attempting to send %i pending error messages" % pending)
+    echo("Sentry is attempting to send %i pending events" % pending)
     echo("Waiting up to %s seconds" % timeout)
     echo("Press Ctrl-%s to quit" % (os.name == "nt" and "Break" or "C"))
     sys.stderr.flush()
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 10b5025abe..3cefc90cfb 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -1,10 +1,12 @@
-from datetime import datetime, timedelta
-from os import environ
 import sys
+from copy import deepcopy
+from datetime import timedelta
+from os import environ
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import Transaction
-from sentry_sdk._compat import reraise
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk.utils import (
     AnnotatedValue,
     capture_internal_exceptions,
@@ -14,10 +16,11 @@
 )
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk._compat import datetime_utcnow, reraise
+from sentry_sdk._types import TYPE_CHECKING
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
+    from datetime import datetime
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -78,7 +81,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
         # will be the same for all events in the list, since they're all hitting
         # the lambda in the same request.)
 
-        if isinstance(aws_event, list):
+        if isinstance(aws_event, list) and len(aws_event) >= 1:
             request_data = aws_event[0]
             batch_size = len(aws_event)
         else:
@@ -134,12 +137,17 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
                     # Starting the thread to raise timeout warning exception
                     timeout_thread.start()
 
-            headers = request_data.get("headers")
-            # AWS Service may set an explicit `{headers: None}`, we can't rely on `.get()`'s default.
-            if headers is None:
+            headers = request_data.get("headers", {})
+            # Some AWS Services (ie. EventBridge) set headers as a list
+            # or None, so we must ensure it is a dict
+            if not isinstance(headers, dict):
                 headers = {}
-            transaction = Transaction.continue_from_headers(
-                headers, op="serverless.function", name=aws_context.function_name
+
+            transaction = continue_trace(
+                headers,
+                op=OP.FUNCTION_AWS,
+                name=aws_context.function_name,
+                source=TRANSACTION_SOURCE_COMPONENT,
             )
             with hub.start_transaction(
                 transaction,
@@ -317,7 +325,7 @@ def get_lambda_bootstrap():
 
 def _make_request_event_processor(aws_event, aws_context, configured_timeout):
     # type: (Any, Any, Any) -> EventProcessor
-    start_time = datetime.utcnow()
+    start_time = datetime_utcnow()
 
     def event_processor(sentry_event, hint, start_time=start_time):
         # type: (Event, Hint, datetime) -> Optional[Event]
@@ -374,9 +382,9 @@ def event_processor(sentry_event, hint, start_time=start_time):
             if aws_event.get("body", None):
                 # Unfortunately couldn't find a way to get structured body from AWS
                 # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+                request["data"] = AnnotatedValue.removed_because_raw_data()
 
-        sentry_event["request"] = request
+        sentry_event["request"] = deepcopy(request)
 
         return sentry_event
 
@@ -422,7 +430,7 @@ def _get_cloudwatch_logs_url(aws_context, start_time):
         log_group=aws_context.log_group_name,
         log_stream=aws_context.log_stream_name,
         start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
-        end_time=(datetime.utcnow() + timedelta(seconds=2)).strftime(formatstring),
+        end_time=(datetime_utcnow() + timedelta(seconds=2)).strftime(formatstring),
     )
 
     return url
diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py
index 30faa3814f..ea45087d05 100644
--- a/sentry_sdk/integrations/beam.py
+++ b/sentry_sdk/integrations/beam.py
@@ -9,9 +9,9 @@
 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Iterator
     from typing import TypeVar
diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py
index e65f5a754b..a21772fc1a 100644
--- a/sentry_sdk/integrations/boto3.py
+++ b/sentry_sdk/integrations/boto3.py
@@ -1,13 +1,15 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.tracing import Span
 
 from sentry_sdk._functools import partial
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import capture_internal_exceptions, parse_url, parse_version
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -28,14 +30,17 @@ class Boto3Integration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, BOTOCORE_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+
+        version = parse_version(BOTOCORE_VERSION)
+
+        if version is None:
             raise DidNotEnable(
                 "Unparsable botocore version: {}".format(BOTOCORE_VERSION)
             )
+
         if version < (1, 12):
             raise DidNotEnable("Botocore 1.12 or newer is required.")
+
         orig_init = BaseClient.__init__
 
         def sentry_patched_init(self, *args, **kwargs):
@@ -62,12 +67,19 @@ def _sentry_request_created(service_id, request, operation_name, **kwargs):
     description = "aws.%s.%s" % (service_id, operation_name)
     span = hub.start_span(
         hub=hub,
-        op="aws.request",
+        op=OP.HTTP_CLIENT,
         description=description,
     )
+
+    with capture_internal_exceptions():
+        parsed_url = parse_url(request.url, sanitize=False)
+        span.set_data("aws.request.url", parsed_url.url)
+        span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+        span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
     span.set_tag("aws.service_id", service_id)
     span.set_tag("aws.operation_name", operation_name)
-    span.set_data("aws.request.url", request.url)
+    span.set_data(SPANDATA.HTTP_METHOD, request.method)
 
     # We do it in order for subsequent http calls/retries be
     # attached to this span.
@@ -92,7 +104,7 @@ def _sentry_after_call(context, parsed, **kwargs):
         return
 
     streaming_span = span.start_child(
-        op="aws.request.stream",
+        op=OP.HTTP_CLIENT_STREAM,
         description=span.description,
     )
 
diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py
index 4fa077e8f6..6f3678466e 100644
--- a/sentry_sdk/integrations/bottle.py
+++ b/sentry_sdk/integrations/bottle.py
@@ -1,18 +1,20 @@
 from __future__ import absolute_import
 
 from sentry_sdk.hub import Hub
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
     transaction_from_function,
 )
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from sentry_sdk.integrations.wsgi import _ScopedResponse
     from typing import Any
     from typing import Dict
@@ -20,7 +22,7 @@
     from typing import Optional
     from bottle import FileUpload, FormsDict, LocalRequest  # type: ignore
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import EventProcessor, Event
 
 try:
     from bottle import (
@@ -40,7 +42,7 @@
 class BottleIntegration(Integration):
     identifier = "bottle"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="endpoint"):
         # type: (str) -> None
@@ -56,10 +58,10 @@ def __init__(self, transaction_style="endpoint"):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, BOTTLE_VERSION.replace("-dev", "").split(".")))
-        except (TypeError, ValueError):
-            raise DidNotEnable("Unparsable Bottle version: {}".format(version))
+        version = parse_version(BOTTLE_VERSION)
+
+        if version is None:
+            raise DidNotEnable("Unparsable Bottle version: {}".format(BOTTLE_VERSION))
 
         if version < (0, 12):
             raise DidNotEnable("Bottle 0.12 or newer required.")
@@ -176,24 +178,34 @@ def size_of_file(self, file):
         return file.content_length
 
 
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Event, str, Any) -> None
+    name = ""
+
+    if transaction_style == "url":
+        name = request.route.rule or ""
+
+    elif transaction_style == "endpoint":
+        name = (
+            request.route.name
+            or transaction_from_function(request.route.callback)
+            or ""
+        )
+
+    event["transaction"] = name
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
+
+
 def _make_request_event_processor(app, request, integration):
     # type: (Bottle, LocalRequest, BottleIntegration) -> EventProcessor
-    def inner(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
-
-        try:
-            if integration.transaction_style == "endpoint":
-                event["transaction"] = request.route.name or transaction_from_function(
-                    request.route.callback
-                )
-            elif integration.transaction_style == "url":
-                event["transaction"] = request.route.rule
-        except Exception:
-            pass
+
+    def event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        _set_transaction_name_and_source(event, integration.transaction_style, request)
 
         with capture_internal_exceptions():
             BottleRequestExtractor(request).extract_into_event(event)
 
         return event
 
-    return inner
+    return event_processor
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
index 743e2cfb50..984197316f 100644
--- a/sentry_sdk/integrations/celery.py
+++ b/sentry_sdk/integrations/celery.py
@@ -1,39 +1,79 @@
 from __future__ import absolute_import
 
 import sys
+import time
 
-from sentry_sdk.hub import Hub
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
-from sentry_sdk.tracing import Transaction
+try:
+    from typing import cast
+except ImportError:
+    cast = lambda _, o: o
+
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk._compat import reraise
+from sentry_sdk._functools import wraps
+from sentry_sdk.crons import capture_checkin, MonitorStatus
+from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk._types import MYPY
-from sentry_sdk._functools import wraps
-
-if MYPY:
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+    match_regex_list,
+)
+
+if TYPE_CHECKING:
     from typing import Any
-    from typing import TypeVar
     from typing import Callable
+    from typing import Dict
+    from typing import List
     from typing import Optional
-
-    from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
+    from typing import Tuple
+    from typing import TypeVar
+    from typing import Union
+
+    from sentry_sdk.tracing import Span
+    from sentry_sdk._types import (
+        EventProcessor,
+        Event,
+        Hint,
+        ExcInfo,
+        MonitorConfig,
+        MonitorConfigScheduleType,
+        MonitorConfigScheduleUnit,
+    )
 
     F = TypeVar("F", bound=Callable[..., Any])
 
 
 try:
-    from celery import VERSION as CELERY_VERSION
+    from celery import VERSION as CELERY_VERSION  # type: ignore
+    from celery import Task, Celery
+    from celery.app.trace import task_has_custom
+    from celery.beat import Scheduler  # type: ignore
     from celery.exceptions import (  # type: ignore
-        SoftTimeLimitExceeded,
-        Retry,
         Ignore,
         Reject,
+        Retry,
+        SoftTimeLimitExceeded,
+    )
+    from celery.schedules import crontab, schedule  # type: ignore
+    from celery.signals import (  # type: ignore
+        task_failure,
+        task_success,
+        task_retry,
     )
-    from celery.app.trace import task_has_custom
 except ImportError:
     raise DidNotEnable("Celery not installed")
 
+try:
+    from redbeat.schedulers import RedBeatScheduler  # type: ignore
+except ImportError:
+    RedBeatScheduler = None
+
 
 CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
 
@@ -41,9 +81,21 @@
 class CeleryIntegration(Integration):
     identifier = "celery"
 
-    def __init__(self, propagate_traces=True):
-        # type: (bool) -> None
+    def __init__(
+        self,
+        propagate_traces=True,
+        monitor_beat_tasks=False,
+        exclude_beat_tasks=None,
+    ):
+        # type: (bool, bool, Optional[List[str]]) -> None
         self.propagate_traces = propagate_traces
+        self.monitor_beat_tasks = monitor_beat_tasks
+        self.exclude_beat_tasks = exclude_beat_tasks
+
+        if monitor_beat_tasks:
+            _patch_beat_apply_entry()
+            _patch_redbeat_maybe_due()
+            _setup_celery_beat_signals()
 
     @staticmethod
     def setup_once():
@@ -91,6 +143,26 @@ def sentry_build_tracer(name, task, *args, **kwargs):
         ignore_logger("celery.redirected")
 
 
+def _now_seconds_since_epoch():
+    # type: () -> float
+    # We cannot use `time.perf_counter()` when dealing with the duration
+    # of a Celery task, because the start of a Celery task and
+    # the end are recorded in different processes.
+    # Start happens in the Celery Beat process,
+    # the end in a Celery Worker process.
+    return time.time()
+
+
+class NoOpMgr:
+    def __enter__(self):
+        # type: () -> None
+        return None
+
+    def __exit__(self, exc_type, exc_value, traceback):
+        # type: (Any, Any, Any) -> None
+        return None
+
+
 def _wrap_apply_async(f):
     # type: (F) -> F
     @wraps(f)
@@ -98,27 +170,80 @@ def apply_async(*args, **kwargs):
         # type: (*Any, **Any) -> Any
         hub = Hub.current
         integration = hub.get_integration(CeleryIntegration)
-        if integration is not None and integration.propagate_traces:
-            with hub.start_span(op="celery.submit", description=args[0].name) as span:
-                with capture_internal_exceptions():
-                    headers = dict(hub.iter_trace_propagation_headers(span))
-
-                    if headers:
-                        # Note: kwargs can contain headers=None, so no setdefault!
-                        # Unsure which backend though.
-                        kwarg_headers = kwargs.get("headers") or {}
-                        kwarg_headers.update(headers)
-
-                        # https://github.com/celery/celery/issues/4875
-                        #
-                        # Need to setdefault the inner headers too since other
-                        # tracing tools (dd-trace-py) also employ this exact
-                        # workaround and we don't want to break them.
-                        kwarg_headers.setdefault("headers", {}).update(headers)
-                        kwargs["headers"] = kwarg_headers
 
-                return f(*args, **kwargs)
-        else:
+        if integration is None:
+            return f(*args, **kwargs)
+
+        # Note: kwargs can contain headers=None, so no setdefault!
+        # Unsure which backend though.
+        kwarg_headers = kwargs.get("headers") or {}
+        propagate_traces = kwarg_headers.pop(
+            "sentry-propagate-traces", integration.propagate_traces
+        )
+
+        if not propagate_traces:
+            return f(*args, **kwargs)
+
+        try:
+            task_started_from_beat = args[1][0] == "BEAT"
+        except (IndexError, TypeError):
+            task_started_from_beat = False
+
+        task = args[0]
+
+        span_mgr = (
+            hub.start_span(op=OP.QUEUE_SUBMIT_CELERY, description=task.name)
+            if not task_started_from_beat
+            else NoOpMgr()
+        )  # type: Union[Span, NoOpMgr]
+
+        with span_mgr as span:
+            with capture_internal_exceptions():
+                headers = (
+                    dict(hub.iter_trace_propagation_headers(span))
+                    if span is not None
+                    else {}
+                )
+                if integration.monitor_beat_tasks:
+                    headers.update(
+                        {
+                            "sentry-monitor-start-timestamp-s": "%.9f"
+                            % _now_seconds_since_epoch(),
+                        }
+                    )
+
+                if headers:
+                    existing_baggage = kwarg_headers.get(BAGGAGE_HEADER_NAME)
+                    sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
+
+                    combined_baggage = sentry_baggage or existing_baggage
+                    if sentry_baggage and existing_baggage:
+                        combined_baggage = "{},{}".format(
+                            existing_baggage,
+                            sentry_baggage,
+                        )
+
+                    kwarg_headers.update(headers)
+                    if combined_baggage:
+                        kwarg_headers[BAGGAGE_HEADER_NAME] = combined_baggage
+
+                    # https://github.com/celery/celery/issues/4875
+                    #
+                    # Need to setdefault the inner headers too since other
+                    # tracing tools (dd-trace-py) also employ this exact
+                    # workaround and we don't want to break them.
+                    kwarg_headers.setdefault("headers", {}).update(headers)
+                    if combined_baggage:
+                        kwarg_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
+
+                    # Add the Sentry options potentially added in `sentry_apply_entry`
+                    # to the headers (done when auto-instrumenting Celery Beat tasks)
+                    for key, value in kwarg_headers.items():
+                        if key.startswith("sentry-"):
+                            kwarg_headers["headers"][key] = value
+
+                    kwargs["headers"] = kwarg_headers
+
             return f(*args, **kwargs)
 
     return apply_async  # type: ignore
@@ -150,12 +275,12 @@ def _inner(*args, **kwargs):
             # Celery task objects are not a thing to be trusted. Even
             # something such as attribute access can fail.
             with capture_internal_exceptions():
-                transaction = Transaction.continue_from_headers(
+                transaction = continue_trace(
                     args[3].get("headers") or {},
-                    op="celery.task",
+                    op=OP.QUEUE_TASK_CELERY,
                     name="unknown celery task",
+                    source=TRANSACTION_SOURCE_TASK,
                 )
-
                 transaction.name = task.name
                 transaction.set_status("ok")
 
@@ -287,3 +412,269 @@ def sentry_workloop(*args, **kwargs):
                     hub.flush()
 
     Worker.workloop = sentry_workloop
+
+
+def _get_headers(task):
+    # type: (Task) -> Dict[str, Any]
+    headers = task.request.get("headers") or {}
+
+    # flatten nested headers
+    if "headers" in headers:
+        headers.update(headers["headers"])
+        del headers["headers"]
+
+    headers.update(task.request.get("properties") or {})
+
+    return headers
+
+
+def _get_humanized_interval(seconds):
+    # type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
+    TIME_UNITS = (  # noqa: N806
+        ("day", 60 * 60 * 24.0),
+        ("hour", 60 * 60.0),
+        ("minute", 60.0),
+    )
+
+    seconds = float(seconds)
+    for unit, divider in TIME_UNITS:
+        if seconds >= divider:
+            interval = int(seconds / divider)
+            return (interval, cast("MonitorConfigScheduleUnit", unit))
+
+    return (int(seconds), "second")
+
+
+def _get_monitor_config(celery_schedule, app, monitor_name):
+    # type: (Any, Celery, str) -> MonitorConfig
+    monitor_config = {}  # type: MonitorConfig
+    schedule_type = None  # type: Optional[MonitorConfigScheduleType]
+    schedule_value = None  # type: Optional[Union[str, int]]
+    schedule_unit = None  # type: Optional[MonitorConfigScheduleUnit]
+
+    if isinstance(celery_schedule, crontab):
+        schedule_type = "crontab"
+        schedule_value = (
+            "{0._orig_minute} "
+            "{0._orig_hour} "
+            "{0._orig_day_of_month} "
+            "{0._orig_month_of_year} "
+            "{0._orig_day_of_week}".format(celery_schedule)
+        )
+    elif isinstance(celery_schedule, schedule):
+        schedule_type = "interval"
+        (schedule_value, schedule_unit) = _get_humanized_interval(
+            celery_schedule.seconds
+        )
+
+        if schedule_unit == "second":
+            logger.warning(
+                "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+                monitor_name,
+                schedule_value,
+            )
+            return {}
+
+    else:
+        logger.warning(
+            "Celery schedule type '%s' not supported by Sentry Crons.",
+            type(celery_schedule),
+        )
+        return {}
+
+    monitor_config["schedule"] = {}
+    monitor_config["schedule"]["type"] = schedule_type
+    monitor_config["schedule"]["value"] = schedule_value
+
+    if schedule_unit is not None:
+        monitor_config["schedule"]["unit"] = schedule_unit
+
+    monitor_config["timezone"] = (
+        (
+            hasattr(celery_schedule, "tz")
+            and celery_schedule.tz is not None
+            and str(celery_schedule.tz)
+        )
+        or app.timezone
+        or "UTC"
+    )
+
+    return monitor_config
+
+
+def _patch_beat_apply_entry():
+    # type: () -> None
+    original_apply_entry = Scheduler.apply_entry
+
+    def sentry_apply_entry(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        scheduler, schedule_entry = args
+        app = scheduler.app
+
+        celery_schedule = schedule_entry.schedule
+        monitor_name = schedule_entry.name
+
+        hub = Hub.current
+        integration = hub.get_integration(CeleryIntegration)
+        if integration is None:
+            return original_apply_entry(*args, **kwargs)
+
+        if match_regex_list(monitor_name, integration.exclude_beat_tasks):
+            return original_apply_entry(*args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            # When tasks are started from Celery Beat, make sure each task has its own trace.
+            scope.set_new_propagation_context()
+
+            monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+            is_supported_schedule = bool(monitor_config)
+            if is_supported_schedule:
+                headers = schedule_entry.options.pop("headers", {})
+                headers.update(
+                    {
+                        "sentry-monitor-slug": monitor_name,
+                        "sentry-monitor-config": monitor_config,
+                    }
+                )
+
+                check_in_id = capture_checkin(
+                    monitor_slug=monitor_name,
+                    monitor_config=monitor_config,
+                    status=MonitorStatus.IN_PROGRESS,
+                )
+                headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+                # Set the Sentry configuration in the options of the ScheduleEntry.
+                # Those will be picked up in `apply_async` and added to the headers.
+                schedule_entry.options["headers"] = headers
+
+            return original_apply_entry(*args, **kwargs)
+
+    Scheduler.apply_entry = sentry_apply_entry
+
+
+def _patch_redbeat_maybe_due():
+    # type: () -> None
+
+    if RedBeatScheduler is None:
+        return
+
+    original_maybe_due = RedBeatScheduler.maybe_due
+
+    def sentry_maybe_due(*args, **kwargs):
+        # type: (*Any, **Any) -> None
+        scheduler, schedule_entry = args
+        app = scheduler.app
+
+        celery_schedule = schedule_entry.schedule
+        monitor_name = schedule_entry.name
+
+        hub = Hub.current
+        integration = hub.get_integration(CeleryIntegration)
+        if integration is None:
+            return original_maybe_due(*args, **kwargs)
+
+        if match_regex_list(monitor_name, integration.exclude_beat_tasks):
+            return original_maybe_due(*args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            # When tasks are started from Celery Beat, make sure each task has its own trace.
+            scope.set_new_propagation_context()
+
+            monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+            is_supported_schedule = bool(monitor_config)
+            if is_supported_schedule:
+                headers = schedule_entry.options.pop("headers", {})
+                headers.update(
+                    {
+                        "sentry-monitor-slug": monitor_name,
+                        "sentry-monitor-config": monitor_config,
+                    }
+                )
+
+                check_in_id = capture_checkin(
+                    monitor_slug=monitor_name,
+                    monitor_config=monitor_config,
+                    status=MonitorStatus.IN_PROGRESS,
+                )
+                headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+                # Set the Sentry configuration in the options of the ScheduleEntry.
+                # Those will be picked up in `apply_async` and added to the headers.
+                schedule_entry.options["headers"] = headers
+
+            return original_maybe_due(*args, **kwargs)
+
+    RedBeatScheduler.maybe_due = sentry_maybe_due
+
+
+def _setup_celery_beat_signals():
+    # type: () -> None
+    task_success.connect(crons_task_success)
+    task_failure.connect(crons_task_failure)
+    task_retry.connect(crons_task_retry)
+
+
+def crons_task_success(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_success %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.OK,
+    )
+
+
+def crons_task_failure(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_failure %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.ERROR,
+    )
+
+
+def crons_task_retry(sender, **kwargs):
+    # type: (Task, Dict[Any, Any]) -> None
+    logger.debug("celery_task_retry %s", sender)
+    headers = _get_headers(sender)
+
+    if "sentry-monitor-slug" not in headers:
+        return
+
+    monitor_config = headers.get("sentry-monitor-config", {})
+
+    start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+    capture_checkin(
+        monitor_slug=headers["sentry-monitor-slug"],
+        monitor_config=monitor_config,
+        check_in_id=headers["sentry-monitor-check-in-id"],
+        duration=_now_seconds_since_epoch() - start_timestamp_s,
+        status=MonitorStatus.ERROR,
+    )
diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py
index 109862bd90..25d8b4ac52 100644
--- a/sentry_sdk/integrations/chalice.py
+++ b/sentry_sdk/integrations/chalice.py
@@ -4,18 +4,20 @@
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
+    parse_version,
 )
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk._functools import wraps
 
 import chalice  # type: ignore
 from chalice import Chalice, ChaliceViewError
 from chalice.app import EventSourceHandler as ChaliceEventSourceHandler  # type: ignore
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import TypeVar
@@ -65,7 +67,11 @@ def wrapped_view_function(**function_args):
         with hub.push_scope() as scope:
             with capture_internal_exceptions():
                 configured_time = app.lambda_context.get_remaining_time_in_millis()
-                scope.transaction = app.lambda_context.function_name
+                scope.set_transaction_name(
+                    app.lambda_context.function_name,
+                    source=TRANSACTION_SOURCE_COMPONENT,
+                )
+
                 scope.add_event_processor(
                     _make_request_event_processor(
                         app.current_request.to_dict(),
@@ -97,10 +103,12 @@ class ChaliceIntegration(Integration):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, CHALICE_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+
+        version = parse_version(CHALICE_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable Chalice version: {}".format(CHALICE_VERSION))
+
         if version < (1, 20):
             old_get_view_function_response = Chalice._get_view_function_response
         else:
diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py
new file mode 100644
index 0000000000..a09e567118
--- /dev/null
+++ b/sentry_sdk/integrations/clickhouse_driver.py
@@ -0,0 +1,155 @@
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.tracing import Span
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import capture_internal_exceptions
+
+from typing import TypeVar
+
+# Hack to get new Python features working in older versions
+# without introducing a hard dependency on `typing_extensions`
+# from: https://stackoverflow.com/a/71944042/300572
+if TYPE_CHECKING:
+    from typing import ParamSpec, Callable
+else:
+    # Fake ParamSpec
+    class ParamSpec:
+        def __init__(self, _):
+            self.args = None
+            self.kwargs = None
+
+    # Callable[anything] will return None
+    class _Callable:
+        def __getitem__(self, _):
+            return None
+
+    # Make instances
+    Callable = _Callable()
+
+
+try:
+    import clickhouse_driver  # type: ignore[import-not-found]
+
+except ImportError:
+    raise DidNotEnable("clickhouse-driver not installed.")
+
+if clickhouse_driver.VERSION < (0, 2, 0):
+    raise DidNotEnable("clickhouse-driver >= 0.2.0 required")
+
+
+class ClickhouseDriverIntegration(Integration):
+    identifier = "clickhouse_driver"
+
+    @staticmethod
+    def setup_once() -> None:
+        # Every query is done using the Connection's `send_query` function
+        clickhouse_driver.connection.Connection.send_query = _wrap_start(
+            clickhouse_driver.connection.Connection.send_query
+        )
+
+        # If the query contains parameters then the send_data function is used to send those parameters to clickhouse
+        clickhouse_driver.client.Client.send_data = _wrap_send_data(
+            clickhouse_driver.client.Client.send_data
+        )
+
+        # Every query ends either with the Client's `receive_end_of_query` (no result expected)
+        # or its `receive_result` (result expected)
+        clickhouse_driver.client.Client.receive_end_of_query = _wrap_end(
+            clickhouse_driver.client.Client.receive_end_of_query
+        )
+        if hasattr(clickhouse_driver.client.Client, "receive_end_of_insert_query"):
+            # In 0.2.7, insert queries are handled separately via `receive_end_of_insert_query`
+            clickhouse_driver.client.Client.receive_end_of_insert_query = _wrap_end(
+                clickhouse_driver.client.Client.receive_end_of_insert_query
+            )
+        clickhouse_driver.client.Client.receive_result = _wrap_end(
+            clickhouse_driver.client.Client.receive_result
+        )
+
+
+P = ParamSpec("P")
+T = TypeVar("T")
+
+
+def _wrap_start(f: Callable[P, T]) -> Callable[P, T]:
+    def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
+        hub = Hub.current
+        if hub.get_integration(ClickhouseDriverIntegration) is None:
+            return f(*args, **kwargs)
+        connection = args[0]
+        query = args[1]
+        query_id = args[2] if len(args) > 2 else kwargs.get("query_id")
+        params = args[3] if len(args) > 3 else kwargs.get("params")
+
+        span = hub.start_span(op=OP.DB, description=query)
+
+        connection._sentry_span = span  # type: ignore[attr-defined]
+
+        _set_db_data(span, connection)
+
+        span.set_data("query", query)
+
+        if query_id:
+            span.set_data("db.query_id", query_id)
+
+        if params and _should_send_default_pii():
+            span.set_data("db.params", params)
+
+        # run the original code
+        ret = f(*args, **kwargs)
+
+        return ret
+
+    return _inner
+
+
+def _wrap_end(f: Callable[P, T]) -> Callable[P, T]:
+    def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
+        res = f(*args, **kwargs)
+        instance = args[0]
+        span = instance.connection._sentry_span  # type: ignore[attr-defined]
+
+        if span is not None:
+            if res is not None and _should_send_default_pii():
+                span.set_data("db.result", res)
+
+            with capture_internal_exceptions():
+                span.hub.add_breadcrumb(
+                    message=span._data.pop("query"), category="query", data=span._data
+                )
+
+            span.finish()
+
+        return res
+
+    return _inner_end
+
+
+def _wrap_send_data(f: Callable[P, T]) -> Callable[P, T]:
+    def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
+        instance = args[0]  # type: clickhouse_driver.client.Client
+        data = args[2]
+        span = instance.connection._sentry_span
+
+        _set_db_data(span, instance.connection)
+
+        if _should_send_default_pii():
+            db_params = span._data.get("db.params", [])
+            db_params.extend(data)
+            span.set_data("db.params", db_params)
+
+        return f(*args, **kwargs)
+
+    return _inner_send_data
+
+
+def _set_db_data(
+    span: Span, connection: clickhouse_driver.connection.Connection
+) -> None:
+    span.set_data(SPANDATA.DB_SYSTEM, "clickhouse")
+    span.set_data(SPANDATA.SERVER_ADDRESS, connection.host)
+    span.set_data(SPANDATA.SERVER_PORT, connection.port)
+    span.set_data(SPANDATA.DB_NAME, connection.database)
+    span.set_data(SPANDATA.DB_USER, connection.user)
diff --git a/sentry_sdk/integrations/cloud_resource_context.py b/sentry_sdk/integrations/cloud_resource_context.py
new file mode 100644
index 0000000000..695bf17d38
--- /dev/null
+++ b/sentry_sdk/integrations/cloud_resource_context.py
@@ -0,0 +1,258 @@
+import json
+import urllib3
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk.api import set_context
+from sentry_sdk.utils import logger
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Dict
+
+
+CONTEXT_TYPE = "cloud_resource"
+
+AWS_METADATA_HOST = "169.254.169.254"
+AWS_TOKEN_URL = "http://{}/latest/api/token".format(AWS_METADATA_HOST)
+AWS_METADATA_URL = "http://{}/latest/dynamic/instance-identity/document".format(
+    AWS_METADATA_HOST
+)
+
+GCP_METADATA_HOST = "metadata.google.internal"
+GCP_METADATA_URL = "http://{}/computeMetadata/v1/?recursive=true".format(
+    GCP_METADATA_HOST
+)
+
+
+class CLOUD_PROVIDER:  # noqa: N801
+    """
+    Name of the cloud provider.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    ALIBABA = "alibaba_cloud"
+    AWS = "aws"
+    AZURE = "azure"
+    GCP = "gcp"
+    IBM = "ibm_cloud"
+    TENCENT = "tencent_cloud"
+
+
+class CLOUD_PLATFORM:  # noqa: N801
+    """
+    The cloud platform.
+    see https://opentelemetry.io/docs/reference/specification/resource/semantic_conventions/cloud/
+    """
+
+    AWS_EC2 = "aws_ec2"
+    GCP_COMPUTE_ENGINE = "gcp_compute_engine"
+
+
+class CloudResourceContextIntegration(Integration):
+    """
+    Adds cloud resource context to the Senty scope
+    """
+
+    identifier = "cloudresourcecontext"
+
+    cloud_provider = ""
+
+    aws_token = ""
+    http = urllib3.PoolManager()
+
+    gcp_metadata = None
+
+    def __init__(self, cloud_provider=""):
+        # type: (str) -> None
+        CloudResourceContextIntegration.cloud_provider = cloud_provider
+
+    @classmethod
+    def _is_aws(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "PUT",
+                AWS_TOKEN_URL,
+                headers={"X-aws-ec2-metadata-token-ttl-seconds": "60"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.aws_token = r.data.decode()
+            return True
+
+        except Exception:
+            return False
+
+    @classmethod
+    def _get_aws_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.AWS,
+            "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+        }
+
+        try:
+            r = cls.http.request(
+                "GET",
+                AWS_METADATA_URL,
+                headers={"X-aws-ec2-metadata-token": cls.aws_token},
+            )
+
+            if r.status != 200:
+                return ctx
+
+            data = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = data["accountId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = data["availabilityZone"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.region"] = data["region"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = data["instanceId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.type"] = data["instanceType"]
+            except Exception:
+                pass
+
+        except Exception:
+            pass
+
+        return ctx
+
+    @classmethod
+    def _is_gcp(cls):
+        # type: () -> bool
+        try:
+            r = cls.http.request(
+                "GET",
+                GCP_METADATA_URL,
+                headers={"Metadata-Flavor": "Google"},
+            )
+
+            if r.status != 200:
+                return False
+
+            cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+            return True
+
+        except Exception:
+            return False
+
+    @classmethod
+    def _get_gcp_context(cls):
+        # type: () -> Dict[str, str]
+        ctx = {
+            "cloud.provider": CLOUD_PROVIDER.GCP,
+            "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+        }
+
+        try:
+            if cls.gcp_metadata is None:
+                r = cls.http.request(
+                    "GET",
+                    GCP_METADATA_URL,
+                    headers={"Metadata-Flavor": "Google"},
+                )
+
+                if r.status != 200:
+                    return ctx
+
+                cls.gcp_metadata = json.loads(r.data.decode("utf-8"))
+
+            try:
+                ctx["cloud.account.id"] = cls.gcp_metadata["project"]["projectId"]
+            except Exception:
+                pass
+
+            try:
+                ctx["cloud.availability_zone"] = cls.gcp_metadata["instance"][
+                    "zone"
+                ].split("/")[-1]
+            except Exception:
+                pass
+
+            try:
+                # only populated in google cloud run
+                ctx["cloud.region"] = cls.gcp_metadata["instance"]["region"].split("/")[
+                    -1
+                ]
+            except Exception:
+                pass
+
+            try:
+                ctx["host.id"] = cls.gcp_metadata["instance"]["id"]
+            except Exception:
+                pass
+
+        except Exception:
+            pass
+
+        return ctx
+
+    @classmethod
+    def _get_cloud_provider(cls):
+        # type: () -> str
+        if cls._is_aws():
+            return CLOUD_PROVIDER.AWS
+
+        if cls._is_gcp():
+            return CLOUD_PROVIDER.GCP
+
+        return ""
+
+    @classmethod
+    def _get_cloud_resource_context(cls):
+        # type: () -> Dict[str, str]
+        cloud_provider = (
+            cls.cloud_provider
+            if cls.cloud_provider != ""
+            else CloudResourceContextIntegration._get_cloud_provider()
+        )
+        if cloud_provider in context_getters.keys():
+            return context_getters[cloud_provider]()
+
+        return {}
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        cloud_provider = CloudResourceContextIntegration.cloud_provider
+        unsupported_cloud_provider = (
+            cloud_provider != "" and cloud_provider not in context_getters.keys()
+        )
+
+        if unsupported_cloud_provider:
+            logger.warning(
+                "Invalid value for cloud_provider: %s (must be in %s). Falling back to autodetection...",
+                CloudResourceContextIntegration.cloud_provider,
+                list(context_getters.keys()),
+            )
+
+        context = CloudResourceContextIntegration._get_cloud_resource_context()
+        if context != {}:
+            set_context(CONTEXT_TYPE, context)
+
+
+# Map with the currently supported cloud providers
+# mapping to functions extracting the context
+context_getters = {
+    CLOUD_PROVIDER.AWS: CloudResourceContextIntegration._get_aws_context,
+    CLOUD_PROVIDER.GCP: CloudResourceContextIntegration._get_gcp_context,
+}
diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py
index b023df2042..04208f608a 100644
--- a/sentry_sdk/integrations/dedupe.py
+++ b/sentry_sdk/integrations/dedupe.py
@@ -3,9 +3,9 @@
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index d2ca12be4a..a38674f09d 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -1,18 +1,26 @@
 # -*- coding: utf-8 -*-
 from __future__ import absolute_import
 
+import inspect
 import sys
 import threading
 import weakref
+from importlib import import_module
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import string_types, text_type
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk.db.explain_plan.django import attach_explain_plan_to_span
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.serializer import add_global_repr_processor
-from sentry_sdk.tracing_utils import record_sql_queries
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
 from sentry_sdk.utils import (
+    AnnotatedValue,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    SENSITIVE_DATA_SUBSTITUTE,
     logger,
     capture_internal_exceptions,
     event_from_exception,
@@ -26,26 +34,45 @@
 
 try:
     from django import VERSION as DJANGO_VERSION
+    from django.conf import settings as django_settings
     from django.core import signals
+    from django.conf import settings
 
     try:
         from django.urls import resolve
     except ImportError:
         from django.core.urlresolvers import resolve
+
+    try:
+        from django.urls import Resolver404
+    except ImportError:
+        from django.core.urlresolvers import Resolver404
+
+    # Only available in Django 3.0+
+    try:
+        from django.core.handlers.asgi import ASGIRequest
+    except Exception:
+        ASGIRequest = None
+
 except ImportError:
     raise DidNotEnable("Django not installed")
 
-
 from sentry_sdk.integrations.django.transactions import LEGACY_RESOLVER
 from sentry_sdk.integrations.django.templates import (
     get_template_frame_from_exception,
     patch_templates,
 )
 from sentry_sdk.integrations.django.middleware import patch_django_middlewares
+from sentry_sdk.integrations.django.signals_handlers import patch_signals
 from sentry_sdk.integrations.django.views import patch_views
 
+if DJANGO_VERSION[:2] > (1, 8):
+    from sentry_sdk.integrations.django.caching import patch_caching
+else:
+    patch_caching = None  # type: ignore
+
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -58,6 +85,7 @@
     from django.http.request import QueryDict
     from django.utils.datastructures import MultiValueDict
 
+    from sentry_sdk.tracing import Span
     from sentry_sdk.scope import Scope
     from sentry_sdk.integrations.wsgi import _ScopedResponse
     from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
@@ -82,11 +110,21 @@ def is_authenticated(request_user):
 class DjangoIntegration(Integration):
     identifier = "django"
 
-    transaction_style = None
+    transaction_style = ""
     middleware_spans = None
-
-    def __init__(self, transaction_style="url", middleware_spans=True):
-        # type: (str, bool) -> None
+    signals_spans = None
+    cache_spans = None
+    signals_denylist = []  # type: list[signals.Signal]
+
+    def __init__(
+        self,
+        transaction_style="url",
+        middleware_spans=True,
+        signals_spans=True,
+        cache_spans=False,
+        signals_denylist=None,
+    ):
+        # type: (str, bool, bool, bool, Optional[list[signals.Signal]]) -> None
         if transaction_style not in TRANSACTION_STYLE_VALUES:
             raise ValueError(
                 "Invalid value for transaction_style: %s (must be in %s)"
@@ -94,6 +132,9 @@ def __init__(self, transaction_style="url", middleware_spans=True):
             )
         self.transaction_style = transaction_style
         self.middleware_spans = middleware_spans
+        self.signals_spans = signals_spans
+        self.cache_spans = cache_spans
+        self.signals_denylist = signals_denylist or []
 
     @staticmethod
     def setup_once():
@@ -211,6 +252,10 @@ def _django_queryset_repr(value, hint):
         patch_django_middlewares()
         patch_views()
         patch_templates()
+        patch_signals()
+
+        if patch_caching is not None:
+            patch_caching()
 
 
 _DRF_PATCHED = False
@@ -319,6 +364,48 @@ def _patch_django_asgi_handler():
     patch_django_asgi_handler_impl(ASGIHandler)
 
 
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, WSGIRequest) -> None
+    try:
+        transaction_name = None
+        if transaction_style == "function_name":
+            fn = resolve(request.path).func
+            transaction_name = transaction_from_function(getattr(fn, "view_class", fn))
+
+        elif transaction_style == "url":
+            if hasattr(request, "urlconf"):
+                transaction_name = LEGACY_RESOLVER.resolve(
+                    request.path_info, urlconf=request.urlconf
+                )
+            else:
+                transaction_name = LEGACY_RESOLVER.resolve(request.path_info)
+
+        if transaction_name is None:
+            transaction_name = request.path_info
+            source = TRANSACTION_SOURCE_URL
+        else:
+            source = SOURCE_FOR_STYLE[transaction_style]
+
+        scope.set_transaction_name(
+            transaction_name,
+            source=source,
+        )
+    except Resolver404:
+        urlconf = import_module(settings.ROOT_URLCONF)
+        # This exception only gets thrown when transaction_style is `function_name`
+        # So we don't check here what style is configured
+        if hasattr(urlconf, "handler404"):
+            handler = urlconf.handler404
+            if isinstance(handler, string_types):
+                scope.transaction = handler
+            else:
+                scope.transaction = transaction_from_function(
+                    getattr(handler, "view_class", handler)
+                )
+    except Exception:
+        pass
+
+
 def _before_get_response(request):
     # type: (WSGIRequest) -> None
     hub = Hub.current
@@ -330,24 +417,15 @@ def _before_get_response(request):
 
     with hub.configure_scope() as scope:
         # Rely on WSGI middleware to start a trace
-        try:
-            if integration.transaction_style == "function_name":
-                fn = resolve(request.path).func
-                scope.transaction = transaction_from_function(
-                    getattr(fn, "view_class", fn)
-                )
-            elif integration.transaction_style == "url":
-                scope.transaction = LEGACY_RESOLVER.resolve(request.path_info)
-        except Exception:
-            pass
+        _set_transaction_name_and_source(scope, integration.transaction_style, request)
 
         scope.add_event_processor(
-            _make_event_processor(weakref.ref(request), integration)
+            _make_wsgi_request_event_processor(weakref.ref(request), integration)
         )
 
 
-def _attempt_resolve_again(request, scope):
-    # type: (WSGIRequest, Scope) -> None
+def _attempt_resolve_again(request, scope, transaction_style):
+    # type: (WSGIRequest, Scope, str) -> None
     """
     Some django middlewares overwrite request.urlconf
     so we need to respect that contract,
@@ -356,13 +434,7 @@ def _attempt_resolve_again(request, scope):
     if not hasattr(request, "urlconf"):
         return
 
-    try:
-        scope.transaction = LEGACY_RESOLVER.resolve(
-            request.path_info,
-            urlconf=request.urlconf,
-        )
-    except Exception:
-        pass
+    _set_transaction_name_and_source(scope, transaction_style, request)
 
 
 def _after_get_response(request):
@@ -373,7 +445,7 @@ def _after_get_response(request):
         return
 
     with hub.configure_scope() as scope:
-        _attempt_resolve_again(request, scope)
+        _attempt_resolve_again(request, scope, integration.transaction_style)
 
 
 def _patch_get_response():
@@ -400,10 +472,10 @@ def sentry_patched_get_response(self, request):
         patch_get_response_async(BaseHandler, _before_get_response)
 
 
-def _make_event_processor(weak_request, integration):
+def _make_wsgi_request_event_processor(weak_request, integration):
     # type: (Callable[[], WSGIRequest], DjangoIntegration) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+    def wsgi_request_event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
         # if the request is gone we are fine not logging the data from
         # it.  This might happen if the processor is pushed away to
         # another thread.
@@ -411,6 +483,11 @@ def event_processor(event, hint):
         if request is None:
             return event
 
+        django_3 = ASGIRequest is not None
+        if django_3 and type(request) == ASGIRequest:
+            # We have a `asgi_request_event_processor` for this.
+            return event
+
         try:
             drf_request = request._sentry_drf_request_backref()
             if drf_request is not None:
@@ -427,7 +504,7 @@ def event_processor(event, hint):
 
         return event
 
-    return event_processor
+    return wsgi_request_event_processor
 
 
 def _got_request_exception(request=None, **kwargs):
@@ -435,10 +512,9 @@ def _got_request_exception(request=None, **kwargs):
     hub = Hub.current
     integration = hub.get_integration(DjangoIntegration)
     if integration is not None:
-
         if request is not None and integration.transaction_style == "url":
             with hub.configure_scope() as scope:
-                _attempt_resolve_again(request, scope)
+                _attempt_resolve_again(request, scope, integration.transaction_style)
 
         # If an integration is there, a client has to be there.
         client = hub.client  # type: Any
@@ -457,8 +533,20 @@ def env(self):
         return self.request.META
 
     def cookies(self):
-        # type: () -> Dict[str, str]
-        return self.request.COOKIES
+        # type: () -> Dict[str, Union[str, AnnotatedValue]]
+        privacy_cookies = [
+            django_settings.CSRF_COOKIE_NAME,
+            django_settings.SESSION_COOKIE_NAME,
+        ]
+
+        clean_cookies = {}  # type: Dict[str, Union[str, AnnotatedValue]]
+        for key, val in self.request.COOKIES.items():
+            if key in privacy_cookies:
+                clean_cookies[key] = SENSITIVE_DATA_SUBSTITUTE
+            else:
+                clean_cookies[key] = val
+
+        return clean_cookies
 
     def raw_data(self):
         # type: () -> bytes
@@ -485,7 +573,7 @@ def parsed_body(self):
 
 
 def _set_user_info(request, event):
-    # type: (WSGIRequest, Dict[str, Any]) -> None
+    # type: (WSGIRequest, Event) -> None
     user_info = event.setdefault("user", {})
 
     user = getattr(request, "user", None)
@@ -540,8 +628,25 @@ def execute(self, sql, params=None):
 
         with record_sql_queries(
             hub, self.cursor, sql, params, paramstyle="format", executemany=False
-        ):
-            return real_execute(self, sql, params)
+        ) as span:
+            _set_db_data(span, self)
+            if hub.client:
+                options = hub.client.options["_experiments"].get("attach_explain_plans")
+                if options is not None:
+                    attach_explain_plan_to_span(
+                        span,
+                        self.cursor.connection,
+                        sql,
+                        params,
+                        self.mogrify,
+                        options,
+                    )
+            result = real_execute(self, sql, params)
+
+        with capture_internal_exceptions():
+            add_query_source(hub, span)
+
+        return result
 
     def executemany(self, sql, param_list):
         # type: (CursorWrapper, Any, List[Any]) -> Any
@@ -551,8 +656,15 @@ def executemany(self, sql, param_list):
 
         with record_sql_queries(
             hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
-        ):
-            return real_executemany(self, sql, param_list)
+        ) as span:
+            _set_db_data(span, self)
+
+            result = real_executemany(self, sql, param_list)
+
+        with capture_internal_exceptions():
+            add_query_source(hub, span)
+
+        return result
 
     def connect(self):
         # type: (BaseDatabaseWrapper) -> None
@@ -563,10 +675,59 @@ def connect(self):
         with capture_internal_exceptions():
             hub.add_breadcrumb(message="connect", category="query")
 
-        with hub.start_span(op="db", description="connect"):
+        with hub.start_span(op=OP.DB, description="connect") as span:
+            _set_db_data(span, self)
             return real_connect(self)
 
     CursorWrapper.execute = execute
     CursorWrapper.executemany = executemany
     BaseDatabaseWrapper.connect = connect
     ignore_logger("django.db.backends")
+
+
+def _set_db_data(span, cursor_or_db):
+    # type: (Span, Any) -> None
+
+    db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db
+    vendor = db.vendor
+    span.set_data(SPANDATA.DB_SYSTEM, vendor)
+
+    # Some custom backends override `__getattr__`, making it look like `cursor_or_db`
+    # actually has a `connection` and the `connection` has a `get_dsn_parameters`
+    # attribute, only to throw an error once you actually want to call it.
+    # Hence the `inspect` check whether `get_dsn_parameters` is an actual callable
+    # function.
+    is_psycopg2 = (
+        hasattr(cursor_or_db, "connection")
+        and hasattr(cursor_or_db.connection, "get_dsn_parameters")
+        and inspect.isroutine(cursor_or_db.connection.get_dsn_parameters)
+    )
+    if is_psycopg2:
+        connection_params = cursor_or_db.connection.get_dsn_parameters()
+    else:
+        is_psycopg3 = (
+            hasattr(cursor_or_db, "connection")
+            and hasattr(cursor_or_db.connection, "info")
+            and hasattr(cursor_or_db.connection.info, "get_parameters")
+            and inspect.isroutine(cursor_or_db.connection.info.get_parameters)
+        )
+        if is_psycopg3:
+            connection_params = cursor_or_db.connection.info.get_parameters()
+        else:
+            connection_params = db.get_connection_params()
+
+    db_name = connection_params.get("dbname") or connection_params.get("database")
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = connection_params.get("host")
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = connection_params.get("port")
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, text_type(server_port))
+
+    server_socket_address = connection_params.get("unix_socket")
+    if server_socket_address is not None:
+        span.set_data(SPANDATA.SERVER_SOCKET_ADDRESS, server_socket_address)
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index 79916e94fb..e1ba678011 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -8,18 +8,56 @@
 
 import asyncio
 
+from django.core.handlers.wsgi import WSGIRequest
+
 from sentry_sdk import Hub, _functools
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import _should_send_default_pii
 
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.utils import capture_internal_exceptions
+
 
-if MYPY:
-    from typing import Any
-    from typing import Union
-    from typing import Callable
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Union
 
+    from django.core.handlers.asgi import ASGIRequest
     from django.http.response import HttpResponse
 
+    from sentry_sdk._types import Event, EventProcessor
+
+
+def _make_asgi_request_event_processor(request):
+    # type: (ASGIRequest) -> EventProcessor
+    def asgi_request_event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        # if the request is gone we are fine not logging the data from
+        # it.  This might happen if the processor is pushed away to
+        # another thread.
+        from sentry_sdk.integrations.django import (
+            DjangoRequestExtractor,
+            _set_user_info,
+        )
+
+        if request is None:
+            return event
+
+        if type(request) == WSGIRequest:
+            return event
+
+        with capture_internal_exceptions():
+            DjangoRequestExtractor(request).extract_into_event(event)
+
+        if _should_send_default_pii():
+            with capture_internal_exceptions():
+                _set_user_info(request, event)
+
+        return event
+
+    return asgi_request_event_processor
+
 
 def patch_django_asgi_handler_impl(cls):
     # type: (Any) -> None
@@ -30,16 +68,38 @@ def patch_django_asgi_handler_impl(cls):
 
     async def sentry_patched_asgi_handler(self, scope, receive, send):
         # type: (Any, Any, Any, Any) -> Any
-        if Hub.current.get_integration(DjangoIntegration) is None:
+        hub = Hub.current
+        integration = hub.get_integration(DjangoIntegration)
+        if integration is None:
             return await old_app(self, scope, receive, send)
 
         middleware = SentryAsgiMiddleware(
             old_app.__get__(self, cls), unsafe_context_data=True
         )._run_asgi3
+
         return await middleware(scope, receive, send)
 
     cls.__call__ = sentry_patched_asgi_handler
 
+    modern_django_asgi_support = hasattr(cls, "create_request")
+    if modern_django_asgi_support:
+        old_create_request = cls.create_request
+
+        def sentry_patched_create_request(self, *args, **kwargs):
+            # type: (Any, *Any, **Any) -> Any
+            hub = Hub.current
+            integration = hub.get_integration(DjangoIntegration)
+            if integration is None:
+                return old_create_request(self, *args, **kwargs)
+
+            with hub.configure_scope() as scope:
+                request, error_response = old_create_request(self, *args, **kwargs)
+                scope.add_event_processor(_make_asgi_request_event_processor(request))
+
+                return request, error_response
+
+        cls.create_request = sentry_patched_create_request
+
 
 def patch_get_response_async(cls, _before_get_response):
     # type: (Any, Any) -> None
@@ -60,7 +120,6 @@ def patch_channels_asgi_handler_impl(cls):
     from sentry_sdk.integrations.django import DjangoIntegration
 
     if channels.__version__ < "3.0.0":
-
         old_app = cls.__call__
 
         async def sentry_patched_asgi_handler(self, receive, send):
@@ -88,10 +147,14 @@ def wrap_async_view(hub, callback):
     async def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
 
-        with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
-        ):
-            return await callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.update_active_thread_id()
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return await callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
 
@@ -104,7 +167,7 @@ def _asgi_middleware_mixin_factory(_check_middleware_span):
     """
 
     class SentryASGIMixin:
-        if MYPY:
+        if TYPE_CHECKING:
             _inner = None
 
         def __init__(self, get_response):
diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py
new file mode 100644
index 0000000000..921f8e485d
--- /dev/null
+++ b/sentry_sdk/integrations/django/caching.py
@@ -0,0 +1,117 @@
+import functools
+from typing import TYPE_CHECKING
+
+from django import VERSION as DJANGO_VERSION
+from django.core.cache import CacheHandler
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk._compat import text_type
+
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+
+
+METHODS_TO_INSTRUMENT = [
+    "get",
+    "get_many",
+]
+
+
+def _get_span_description(method_name, args, kwargs):
+    # type: (str, Any, Any) -> str
+    description = "{} ".format(method_name)
+
+    if args is not None and len(args) >= 1:
+        description += text_type(args[0])
+    elif kwargs is not None and "key" in kwargs:
+        description += text_type(kwargs["key"])
+
+    return description
+
+
+def _patch_cache_method(cache, method_name):
+    # type: (CacheHandler, str) -> None
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    def _instrument_call(cache, method_name, original_method, args, kwargs):
+        # type: (CacheHandler, str, Callable[..., Any], Any, Any) -> Any
+        hub = Hub.current
+        integration = hub.get_integration(DjangoIntegration)
+        if integration is None or not integration.cache_spans:
+            return original_method(*args, **kwargs)
+
+        description = _get_span_description(method_name, args, kwargs)
+
+        with hub.start_span(op=OP.CACHE_GET_ITEM, description=description) as span:
+            value = original_method(*args, **kwargs)
+
+            if value:
+                span.set_data(SPANDATA.CACHE_HIT, True)
+
+                size = len(text_type(value))
+                span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
+
+            else:
+                span.set_data(SPANDATA.CACHE_HIT, False)
+
+            return value
+
+    original_method = getattr(cache, method_name)
+
+    @functools.wraps(original_method)
+    def sentry_method(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        return _instrument_call(cache, method_name, original_method, args, kwargs)
+
+    setattr(cache, method_name, sentry_method)
+
+
+def _patch_cache(cache):
+    # type: (CacheHandler) -> None
+    if not hasattr(cache, "_sentry_patched"):
+        for method_name in METHODS_TO_INSTRUMENT:
+            _patch_cache_method(cache, method_name)
+        cache._sentry_patched = True
+
+
+def patch_caching():
+    # type: () -> None
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    if not hasattr(CacheHandler, "_sentry_patched"):
+        if DJANGO_VERSION < (3, 2):
+            original_get_item = CacheHandler.__getitem__
+
+            @functools.wraps(original_get_item)
+            def sentry_get_item(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_get_item(self, alias)
+
+                integration = Hub.current.get_integration(DjangoIntegration)
+                if integration and integration.cache_spans:
+                    _patch_cache(cache)
+
+                return cache
+
+            CacheHandler.__getitem__ = sentry_get_item
+            CacheHandler._sentry_patched = True
+
+        else:
+            original_create_connection = CacheHandler.create_connection
+
+            @functools.wraps(original_create_connection)
+            def sentry_create_connection(self, alias):
+                # type: (CacheHandler, str) -> Any
+                cache = original_create_connection(self, alias)
+
+                integration = Hub.current.get_integration(DjangoIntegration)
+                if integration and integration.cache_spans:
+                    _patch_cache(cache)
+
+                return cache
+
+            CacheHandler.create_connection = sentry_create_connection
+            CacheHandler._sentry_patched = True
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
index c9001cdbf4..aa8023dbd4 100644
--- a/sentry_sdk/integrations/django/middleware.py
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -6,14 +6,15 @@
 
 from sentry_sdk import Hub
 from sentry_sdk._functools import wraps
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 from sentry_sdk.utils import (
     ContextVar,
     transaction_from_function,
     capture_internal_exceptions,
 )
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Optional
@@ -88,7 +89,7 @@ def _check_middleware_span(old_method):
             description = "{}.{}".format(description, function_basename)
 
         middleware_span = hub.start_span(
-            op="django.middleware", description=description
+            op=OP.MIDDLEWARE_DJANGO, description=description
         )
         middleware_span.set_tag("django.function_name", function_name)
         middleware_span.set_tag("django.middleware_name", middleware_name)
@@ -125,7 +126,6 @@ def sentry_wrapped_method(*args, **kwargs):
     class SentryWrappingMiddleware(
         _asgi_middleware_mixin_factory(_check_middleware_span)  # type: ignore
     ):
-
         async_capable = getattr(middleware, "async_capable", False)
 
         def __init__(self, get_response=None, *args, **kwargs):
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
new file mode 100644
index 0000000000..3d1aadab1f
--- /dev/null
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -0,0 +1,94 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import
+
+from django.dispatch import Signal
+
+from sentry_sdk import Hub
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.django import DJANGO_VERSION
+
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Union
+
+
+def _get_receiver_name(receiver):
+    # type: (Callable[..., Any]) -> str
+    name = ""
+
+    if hasattr(receiver, "__qualname__"):
+        name = receiver.__qualname__
+    elif hasattr(receiver, "__name__"):  # Python 2.7 has no __qualname__
+        name = receiver.__name__
+    elif hasattr(
+        receiver, "func"
+    ):  # certain functions (like partials) dont have a name
+        if hasattr(receiver, "func") and hasattr(receiver.func, "__name__"):
+            name = "partial()"
+
+    if (
+        name == ""
+    ):  # In case nothing was found, return the string representation (this is the slowest case)
+        return str(receiver)
+
+    if hasattr(receiver, "__module__"):  # prepend with module, if there is one
+        name = receiver.__module__ + "." + name
+
+    return name
+
+
+def patch_signals():
+    # type: () -> None
+    """
+    Patch django signal receivers to create a span.
+
+    This only wraps sync receivers. Django>=5.0 introduced async receivers, but
+    since we don't create transactions for ASGI Django, we don't wrap them.
+    """
+    from sentry_sdk.integrations.django import DjangoIntegration
+
+    old_live_receivers = Signal._live_receivers
+
+    def _sentry_live_receivers(self, sender):
+        # type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]]
+        hub = Hub.current
+
+        if DJANGO_VERSION >= (5, 0):
+            sync_receivers, async_receivers = old_live_receivers(self, sender)
+        else:
+            sync_receivers = old_live_receivers(self, sender)
+            async_receivers = []
+
+        def sentry_sync_receiver_wrapper(receiver):
+            # type: (Callable[..., Any]) -> Callable[..., Any]
+            @wraps(receiver)
+            def wrapper(*args, **kwargs):
+                # type: (Any, Any) -> Any
+                signal_name = _get_receiver_name(receiver)
+                with hub.start_span(
+                    op=OP.EVENT_DJANGO,
+                    description=signal_name,
+                ) as span:
+                    span.set_data("signal", signal_name)
+                    return receiver(*args, **kwargs)
+
+            return wrapper
+
+        integration = hub.get_integration(DjangoIntegration)
+        if (
+            integration
+            and integration.signals_spans
+            and self not in integration.signals_denylist
+        ):
+            for idx, receiver in enumerate(sync_receivers):
+                sync_receivers[idx] = sentry_sync_receiver_wrapper(receiver)
+
+        if DJANGO_VERSION >= (5, 0):
+            return sync_receivers, async_receivers
+        else:
+            return sync_receivers
+
+    Signal._live_receivers = _sentry_live_receivers
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
index 2ff9d1b184..e6c83b5bf2 100644
--- a/sentry_sdk/integrations/django/templates.py
+++ b/sentry_sdk/integrations/django/templates.py
@@ -1,10 +1,12 @@
 from django.template import TemplateSyntaxError
+from django.utils.safestring import mark_safe
 from django import VERSION as DJANGO_VERSION
 
 from sentry_sdk import _functools, Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
@@ -66,7 +68,7 @@ def rendered_content(self):
             return real_rendered_content.fget(self)
 
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(self.template_name),
         ) as span:
             span.set_data("context", self.context_data)
@@ -87,8 +89,13 @@ def render(request, template_name, context=None, *args, **kwargs):
         if hub.get_integration(DjangoIntegration) is None:
             return real_render(request, template_name, context, *args, **kwargs)
 
+        # Inject trace meta tags into template context
+        context = context or {}
+        if "sentry_trace_meta" not in context:
+            context["sentry_trace_meta"] = mark_safe(hub.trace_propagation_meta())
+
         with hub.start_span(
-            op="django.template.render",
+            op=OP.TEMPLATE_RENDER,
             description=_get_template_name_description(template_name),
         ) as span:
             span.set_data("context", context)
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
index b0f88e916a..b2e200b832 100644
--- a/sentry_sdk/integrations/django/transactions.py
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -1,15 +1,17 @@
 """
-Copied from raven-python. Used for
-`DjangoIntegration(transaction_fron="raven_legacy")`.
+Copied from raven-python.
+
+Despite being called "legacy" in some places this resolver is very much still
+in use.
 """
 
 from __future__ import absolute_import
 
 import re
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from django.urls.resolvers import URLResolver
     from typing import Dict
     from typing import List
@@ -19,6 +21,13 @@
     from typing import Union
     from re import Pattern
 
+from django import VERSION as DJANGO_VERSION
+
+if DJANGO_VERSION >= (2, 0):
+    from django.urls.resolvers import RoutePattern
+else:
+    RoutePattern = None
+
 try:
     from django.urls import get_resolver
 except ImportError:
@@ -36,6 +45,9 @@ def get_regex(resolver_or_pattern):
 
 
 class RavenResolver(object):
+    _new_style_group_matcher = re.compile(
+        r"<(?:([^>:]+):)?([^>]+)>"
+    )  # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247
     _optional_group_matcher = re.compile(r"\(\?\:([^\)]+)\)")
     _named_group_matcher = re.compile(r"\(\?P<(\w+)>[^\)]+\)+")
     _non_named_group_matcher = re.compile(r"\([^\)]+\)")
@@ -46,7 +58,7 @@ class RavenResolver(object):
     _cache = {}  # type: Dict[URLPattern, str]
 
     def _simplify(self, pattern):
-        # type: (str) -> str
+        # type: (Union[URLPattern, URLResolver]) -> str
         r"""
         Clean up urlpattern regexes into something readable by humans:
 
@@ -56,11 +68,24 @@ def _simplify(self, pattern):
         To:
         > "{sport_slug}/athletes/{athlete_slug}/"
         """
+        # "new-style" path patterns can be parsed directly without turning them
+        # into regexes first
+        if (
+            RoutePattern is not None
+            and hasattr(pattern, "pattern")
+            and isinstance(pattern.pattern, RoutePattern)
+        ):
+            return self._new_style_group_matcher.sub(
+                lambda m: "{%s}" % m.group(2), pattern.pattern._route
+            )
+
+        result = get_regex(pattern).pattern
+
         # remove optional params
         # TODO(dcramer): it'd be nice to change these into [%s] but it currently
         # conflicts with the other rules because we're doing regexp matches
         # rather than parsing tokens
-        result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), pattern)
+        result = self._optional_group_matcher.sub(lambda m: "%s" % m.group(1), result)
 
         # handle named groups first
         result = self._named_group_matcher.sub(lambda m: "{%s}" % m.group(1), result)
@@ -113,8 +138,8 @@ def _resolve(self, resolver, path, parents=None):
             except KeyError:
                 pass
 
-            prefix = "".join(self._simplify(get_regex(p).pattern) for p in parents)
-            result = prefix + self._simplify(get_regex(pattern).pattern)
+            prefix = "".join(self._simplify(p) for p in parents)
+            result = prefix + self._simplify(pattern)
             if not result.startswith("/"):
                 result = "/" + result
             self._cache[pattern] = result
@@ -127,10 +152,10 @@ def resolve(
         path,  # type: str
         urlconf=None,  # type: Union[None, Tuple[URLPattern, URLPattern, URLResolver], Tuple[URLPattern]]
     ):
-        # type: (...) -> str
+        # type: (...) -> Optional[str]
         resolver = get_resolver(urlconf)
         match = self._resolve(resolver, path)
-        return match or path
+        return match
 
 
 LEGACY_RESOLVER = RavenResolver()
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index 51f1abc8fb..d918afad66 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -1,8 +1,9 @@
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk import _functools
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -22,9 +23,19 @@ def patch_views():
     # type: () -> None
 
     from django.core.handlers.base import BaseHandler
+    from django.template.response import SimpleTemplateResponse
     from sentry_sdk.integrations.django import DjangoIntegration
 
     old_make_view_atomic = BaseHandler.make_view_atomic
+    old_render = SimpleTemplateResponse.render
+
+    def sentry_patched_render(self):
+        # type: (SimpleTemplateResponse) -> Any
+        hub = Hub.current
+        with hub.start_span(
+            op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
+        ):
+            return old_render(self)
 
     @_functools.wraps(old_make_view_atomic)
     def sentry_patched_make_view_atomic(self, *args, **kwargs):
@@ -36,14 +47,13 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
 
         hub = Hub.current
         integration = hub.get_integration(DjangoIntegration)
-
         if integration is not None and integration.middleware_spans:
-
-            if (
+            is_async_view = (
                 iscoroutinefunction is not None
                 and wrap_async_view is not None
                 and iscoroutinefunction(callback)
-            ):
+            )
+            if is_async_view:
                 sentry_wrapped_callback = wrap_async_view(hub, callback)
             else:
                 sentry_wrapped_callback = _wrap_sync_view(hub, callback)
@@ -53,6 +63,7 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
 
         return sentry_wrapped_callback
 
+    SimpleTemplateResponse.render = sentry_patched_render
     BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
 
 
@@ -61,9 +72,15 @@ def _wrap_sync_view(hub, callback):
     @_functools.wraps(callback)
     def sentry_wrapped_callback(request, *args, **kwargs):
         # type: (Any, *Any, **Any) -> Any
-        with hub.start_span(
-            op="django.view", description=request.resolver_match.view_name
-        ):
-            return callback(request, *args, **kwargs)
+        with hub.configure_scope() as sentry_scope:
+            # set the active thread id to the handler thread for sync views
+            # this isn't necessary for async views since that runs on main
+            if sentry_scope.profile is not None:
+                sentry_scope.profile.update_active_thread_id()
+
+            with hub.start_span(
+                op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+            ):
+                return callback(request, *args, **kwargs)
 
     return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py
index 1f16ff0b06..514e082b31 100644
--- a/sentry_sdk/integrations/excepthook.py
+++ b/sentry_sdk/integrations/excepthook.py
@@ -4,9 +4,9 @@
 from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
 from sentry_sdk.integrations import Integration
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Any
     from typing import Type
diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py
index 4fbf729bb1..e8636b61f8 100644
--- a/sentry_sdk/integrations/executing.py
+++ b/sentry_sdk/integrations/executing.py
@@ -1,12 +1,12 @@
 from __future__ import absolute_import
 
 from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import walk_exception_chain, iter_stacks
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional
 
     from sentry_sdk._types import Event, Hint
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
index 8129fab46b..d5e2480485 100644
--- a/sentry_sdk/integrations/falcon.py
+++ b/sentry_sdk/integrations/falcon.py
@@ -4,25 +4,45 @@
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    parse_version,
+)
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Dict
     from typing import Optional
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
+
+# In Falcon 3.0 `falcon.api_helpers` is renamed to `falcon.app_helpers`
+# and `falcon.API` to `falcon.App`
 
 try:
     import falcon  # type: ignore
-    import falcon.api_helpers  # type: ignore
 
     from falcon import __version__ as FALCON_VERSION
 except ImportError:
     raise DidNotEnable("Falcon not installed")
 
+try:
+    import falcon.app_helpers  # type: ignore
+
+    falcon_helpers = falcon.app_helpers
+    falcon_app_class = falcon.App
+    FALCON3 = True
+except ImportError:
+    import falcon.api_helpers  # type: ignore
+
+    falcon_helpers = falcon.api_helpers
+    falcon_app_class = falcon.API
+    FALCON3 = False
+
 
 class FalconRequestExtractor(RequestExtractor):
     def env(self):
@@ -54,16 +74,27 @@ def raw_data(self):
         else:
             return None
 
-    def json(self):
-        # type: () -> Optional[Dict[str, Any]]
-        try:
-            return self.request.media
-        except falcon.errors.HTTPBadRequest:
-            # NOTE(jmagnusson): We return `falcon.Request._media` here because
-            # falcon 1.4 doesn't do proper type checking in
-            # `falcon.Request.media`. This has been fixed in 2.0.
-            # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
-            return self.request._media
+    if FALCON3:
+
+        def json(self):
+            # type: () -> Optional[Dict[str, Any]]
+            try:
+                return self.request.media
+            except falcon.errors.HTTPBadRequest:
+                return None
+
+    else:
+
+        def json(self):
+            # type: () -> Optional[Dict[str, Any]]
+            try:
+                return self.request.media
+            except falcon.errors.HTTPBadRequest:
+                # NOTE(jmagnusson): We return `falcon.Request._media` here because
+                # falcon 1.4 doesn't do proper type checking in
+                # `falcon.Request.media`. This has been fixed in 2.0.
+                # Relevant code: https://github.com/falconry/falcon/blob/1.4.1/falcon/request.py#L953
+                return self.request._media
 
 
 class SentryFalconMiddleware(object):
@@ -87,7 +118,7 @@ def process_request(self, req, resp, *args, **kwargs):
 class FalconIntegration(Integration):
     identifier = "falcon"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="uri_template"):
         # type: (str) -> None
@@ -101,9 +132,10 @@ def __init__(self, transaction_style="uri_template"):
     @staticmethod
     def setup_once():
         # type: () -> None
-        try:
-            version = tuple(map(int, FALCON_VERSION.split(".")))
-        except (ValueError, TypeError):
+
+        version = parse_version(FALCON_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable Falcon version: {}".format(FALCON_VERSION))
 
         if version < (1, 4):
@@ -116,7 +148,7 @@ def setup_once():
 
 def _patch_wsgi_app():
     # type: () -> None
-    original_wsgi_app = falcon.API.__call__
+    original_wsgi_app = falcon_app_class.__call__
 
     def sentry_patched_wsgi_app(self, env, start_response):
         # type: (falcon.API, Any, Any) -> Any
@@ -131,29 +163,37 @@ def sentry_patched_wsgi_app(self, env, start_response):
 
         return sentry_wrapped(env, start_response)
 
-    falcon.API.__call__ = sentry_patched_wsgi_app
+    falcon_app_class.__call__ = sentry_patched_wsgi_app
 
 
 def _patch_handle_exception():
     # type: () -> None
-    original_handle_exception = falcon.API._handle_exception
+    original_handle_exception = falcon_app_class._handle_exception
 
     def sentry_patched_handle_exception(self, *args):
         # type: (falcon.API, *Any) -> Any
         # NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
         # method signature from `(ex, req, resp, params)` to
         # `(req, resp, ex, params)`
-        if isinstance(args[0], Exception):
-            ex = args[0]
-        else:
-            ex = args[2]
+        ex = response = None
+        with capture_internal_exceptions():
+            ex = next(argument for argument in args if isinstance(argument, Exception))
+            response = next(
+                argument for argument in args if isinstance(argument, falcon.Response)
+            )
 
         was_handled = original_handle_exception(self, *args)
 
+        if ex is None or response is None:
+            # Both ex and response should have a non-None value at this point; otherwise,
+            # there is an error with the SDK that will have been captured in the
+            # capture_internal_exceptions block above.
+            return was_handled
+
         hub = Hub.current
         integration = hub.get_integration(FalconIntegration)
 
-        if integration is not None and _exception_leads_to_http_5xx(ex):
+        if integration is not None and _exception_leads_to_http_5xx(ex, response):
             # If an integration is there, a client has to be there.
             client = hub.client  # type: Any
 
@@ -166,50 +206,77 @@ def sentry_patched_handle_exception(self, *args):
 
         return was_handled
 
-    falcon.API._handle_exception = sentry_patched_handle_exception
+    falcon_app_class._handle_exception = sentry_patched_handle_exception
 
 
 def _patch_prepare_middleware():
     # type: () -> None
-    original_prepare_middleware = falcon.api_helpers.prepare_middleware
+    original_prepare_middleware = falcon_helpers.prepare_middleware
 
     def sentry_patched_prepare_middleware(
-        middleware=None, independent_middleware=False
+        middleware=None, independent_middleware=False, asgi=False
     ):
-        # type: (Any, Any) -> Any
+        # type: (Any, Any, bool) -> Any
+        if asgi:
+            # We don't support ASGI Falcon apps, so we don't patch anything here
+            return original_prepare_middleware(middleware, independent_middleware, asgi)
+
         hub = Hub.current
         integration = hub.get_integration(FalconIntegration)
         if integration is not None:
             middleware = [SentryFalconMiddleware()] + (middleware or [])
+
+        # We intentionally omit the asgi argument here, since the default is False anyways,
+        # and this way, we remain backwards-compatible with pre-3.0.0 Falcon versions.
         return original_prepare_middleware(middleware, independent_middleware)
 
-    falcon.api_helpers.prepare_middleware = sentry_patched_prepare_middleware
+    falcon_helpers.prepare_middleware = sentry_patched_prepare_middleware
 
 
-def _exception_leads_to_http_5xx(ex):
-    # type: (Exception) -> bool
+def _exception_leads_to_http_5xx(ex, response):
+    # type: (Exception, falcon.Response) -> bool
     is_server_error = isinstance(ex, falcon.HTTPError) and (ex.status or "").startswith(
         "5"
     )
     is_unhandled_error = not isinstance(
         ex, (falcon.HTTPError, falcon.http_status.HTTPStatus)
     )
-    return is_server_error or is_unhandled_error
+
+    # We only check the HTTP status on Falcon 3 because in Falcon 2, the status on the response
+    # at the stage where we capture it is listed as 200, even though we would expect to see a 500
+    # status. Since at the time of this change, Falcon 2 is ca. 4 years old, we have decided to
+    # only perform this check on Falcon 3+, despite the risk that some handled errors might be
+    # reported to Sentry as unhandled on Falcon 2.
+    return (is_server_error or is_unhandled_error) and (
+        not FALCON3 or _has_http_5xx_status(response)
+    )
+
+
+def _has_http_5xx_status(response):
+    # type: (falcon.Response) -> bool
+    return response.status.startswith("5")
+
+
+def _set_transaction_name_and_source(event, transaction_style, request):
+    # type: (Event, str, falcon.Request) -> None
+    name_for_style = {
+        "uri_template": request.uri_template,
+        "path": request.path,
+    }
+    event["transaction"] = name_for_style[transaction_style]
+    event["transaction_info"] = {"source": SOURCE_FOR_STYLE[transaction_style]}
 
 
 def _make_request_event_processor(req, integration):
     # type: (falcon.Request, FalconIntegration) -> EventProcessor
 
-    def inner(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
-        if integration.transaction_style == "uri_template":
-            event["transaction"] = req.uri_template
-        elif integration.transaction_style == "path":
-            event["transaction"] = req.path
+    def event_processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        _set_transaction_name_and_source(event, integration.transaction_style, req)
 
         with capture_internal_exceptions():
             FalconRequestExtractor(req).extract_into_event(event)
 
         return event
 
-    return inner
+    return event_processor
diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
new file mode 100644
index 0000000000..33a5591cc4
--- /dev/null
+++ b/sentry_sdk/integrations/fastapi.py
@@ -0,0 +1,141 @@
+import asyncio
+from copy import deepcopy
+
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import transaction_from_function, logger
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Dict
+    from sentry_sdk.scope import Scope
+    from sentry_sdk._types import Event
+
+try:
+    from sentry_sdk.integrations.starlette import (
+        StarletteIntegration,
+        StarletteRequestExtractor,
+    )
+except DidNotEnable:
+    raise DidNotEnable("Starlette is not installed")
+
+try:
+    import fastapi  # type: ignore
+except ImportError:
+    raise DidNotEnable("FastAPI is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic FastAPI request"
+
+
+class FastApiIntegration(StarletteIntegration):
+    identifier = "fastapi"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_get_request_handler()
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Any) -> None
+    name = ""
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or ""
+
+    elif transaction_style == "url":
+        route = request.scope.get("route")
+        if route:
+            path = getattr(route, "path", None)
+            if path is not None:
+                name = path
+
+    if not name:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+    else:
+        source = SOURCE_FOR_STYLE[transaction_style]
+
+    scope.set_transaction_name(name, source=source)
+    logger.debug(
+        "[FastAPI] Set transaction name and source on scope: %s / %s", name, source
+    )
+
+
+def patch_get_request_handler():
+    # type: () -> None
+    old_get_request_handler = fastapi.routing.get_request_handler
+
+    def _sentry_get_request_handler(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        dependant = kwargs.get("dependant")
+        if (
+            dependant
+            and dependant.call is not None
+            and not asyncio.iscoroutinefunction(dependant.call)
+        ):
+            old_call = dependant.call
+
+            @wraps(old_call)
+            def _sentry_call(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.update_active_thread_id()
+                    return old_call(*args, **kwargs)
+
+            dependant.call = _sentry_call
+
+        old_app = old_get_request_handler(*args, **kwargs)
+
+        async def _sentry_app(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            hub = Hub.current
+            integration = hub.get_integration(FastApiIntegration)
+            if integration is None:
+                return await old_app(*args, **kwargs)
+
+            with hub.configure_scope() as sentry_scope:
+                request = args[0]
+
+                _set_transaction_name_and_source(
+                    sentry_scope, integration.transaction_style, request
+                )
+
+                extractor = StarletteRequestExtractor(request)
+                info = await extractor.extract_request_info()
+
+                def _make_request_event_processor(req, integration):
+                    # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event]
+                    def event_processor(event, hint):
+                        # type: (Event, Dict[str, Any]) -> Event
+
+                        # Extract information from request
+                        request_info = event.get("request", {})
+                        if info:
+                            if "cookies" in info and _should_send_default_pii():
+                                request_info["cookies"] = info["cookies"]
+                            if "data" in info:
+                                request_info["data"] = info["data"]
+                        event["request"] = deepcopy(request_info)
+
+                        return event
+
+                    return event_processor
+
+                sentry_scope._name = FastApiIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+            return await old_app(*args, **kwargs)
+
+        return _sentry_app
+
+    fastapi.routing.get_request_handler = _sentry_get_request_handler
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index 5aade50a94..f0bc3d7750 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -1,23 +1,24 @@
 from __future__ import absolute_import
 
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import RequestExtractor
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    package_version,
+)
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Dict, Union
+
+    from sentry_sdk._types import Event, EventProcessor
     from sentry_sdk.integrations.wsgi import _ScopedResponse
-    from typing import Any
-    from typing import Dict
-    from werkzeug.datastructures import ImmutableMultiDict
-    from werkzeug.datastructures import FileStorage
-    from typing import Union
-    from typing import Callable
-
-    from sentry_sdk._types import EventProcessor
+    from werkzeug.datastructures import FileStorage, ImmutableMultiDict
 
 
 try:
@@ -26,19 +27,14 @@
     flask_login = None
 
 try:
-    from flask import (  # type: ignore
-        Markup,
-        Request,
-        Flask,
-        _request_ctx_stack,
-        _app_ctx_stack,
-        __version__ as FLASK_VERSION,
-    )
+    from flask import Flask, Request  # type: ignore
+    from flask import request as flask_request
     from flask.signals import (
         before_render_template,
         got_request_exception,
         request_started,
     )
+    from markupsafe import Markup
 except ImportError:
     raise DidNotEnable("Flask is not installed")
 
@@ -53,7 +49,7 @@
 class FlaskIntegration(Integration):
     identifier = "flask"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="endpoint"):
         # type: (str) -> None
@@ -67,17 +63,13 @@ def __init__(self, transaction_style="endpoint"):
     @staticmethod
     def setup_once():
         # type: () -> None
+        version = package_version("flask")
 
-        # This version parsing is absolutely naive but the alternative is to
-        # import pkg_resources which slows down the SDK a lot.
-        try:
-            version = tuple(map(int, FLASK_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
-            # It's probably a release candidate, we assume it's fine.
-            pass
-        else:
-            if version < (0, 10):
-                raise DidNotEnable("Flask 0.10 or newer is required.")
+        if version is None:
+            raise DidNotEnable("Unparsable Flask version.")
+
+        if version < (0, 10):
+            raise DidNotEnable("Flask 0.10 or newer is required.")
 
         before_render_template.connect(_add_sentry_trace)
         request_started.connect(_request_started)
@@ -99,42 +91,42 @@ def sentry_patched_wsgi_app(self, environ, start_response):
 
 def _add_sentry_trace(sender, template, context, **extra):
     # type: (Flask, Any, Dict[str, Any], **Any) -> None
-
     if "sentry_trace" in context:
         return
 
-    sentry_span = Hub.current.scope.span
-    context["sentry_trace"] = (
-        Markup(
-            ''
-            % (sentry_span.to_traceparent(),)
+    hub = Hub.current
+    trace_meta = Markup(hub.trace_propagation_meta())
+    context["sentry_trace"] = trace_meta  # for backwards compatibility
+    context["sentry_trace_meta"] = trace_meta
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
         )
-        if sentry_span
-        else ""
-    )
+    except Exception:
+        pass
 
 
-def _request_started(sender, **kwargs):
+def _request_started(app, **kwargs):
     # type: (Flask, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(FlaskIntegration)
     if integration is None:
         return
 
-    app = _app_ctx_stack.top.app
     with hub.configure_scope() as scope:
-        request = _request_ctx_stack.top.request
-
-        # Set the transaction name here, but rely on WSGI middleware to actually
-        # start the transaction
-        try:
-            if integration.transaction_style == "endpoint":
-                scope.transaction = request.url_rule.endpoint
-            elif integration.transaction_style == "url":
-                scope.transaction = request.url_rule.rule
-        except Exception:
-            pass
-
+        # Set the transaction name and source here,
+        # but rely on WSGI middleware to actually start the transaction
+        request = flask_request._get_current_object()
+        _set_transaction_name_and_source(scope, integration.transaction_style, request)
         evt_processor = _make_request_event_processor(app, request, integration)
         scope.add_event_processor(evt_processor)
 
@@ -169,7 +161,7 @@ def is_json(self):
 
     def json(self):
         # type: () -> Any
-        return self.request.get_json()
+        return self.request.get_json(silent=True)
 
     def size_of_file(self, file):
         # type: (FileStorage) -> int
@@ -180,7 +172,7 @@ def _make_request_event_processor(app, request, integration):
     # type: (Flask, Callable[[], Request], FlaskIntegration) -> EventProcessor
 
     def inner(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # type: (Event, dict[str, Any]) -> Event
 
         # if the request is gone we are fine not logging the data from
         # it.  This might happen if the processor is pushed away to
@@ -219,7 +211,7 @@ def _capture_exception(sender, exception, **kwargs):
 
 
 def _add_user_to_event(event):
-    # type: (Dict[str, Any]) -> None
+    # type: (Event) -> None
     if flask_login is None:
         return
 
@@ -257,6 +249,5 @@ def _add_user_to_event(event):
 
         try:
             user_info.setdefault("username", user.username)
-            user_info.setdefault("username", user.email)
         except Exception:
             pass
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
index 118970e9d8..819c7ac93d 100644
--- a/sentry_sdk/integrations/gcp.py
+++ b/sentry_sdk/integrations/gcp.py
@@ -1,10 +1,13 @@
-from datetime import datetime, timedelta
-from os import environ
 import sys
+from copy import deepcopy
+from datetime import timedelta
+from os import environ
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import Transaction
-from sentry_sdk._compat import reraise
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
+from sentry_sdk._compat import datetime_utcnow, duration_in_milliseconds, reraise
 from sentry_sdk.utils import (
     AnnotatedValue,
     capture_internal_exceptions,
@@ -15,13 +18,14 @@
 from sentry_sdk.integrations import Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
 # Constants
 TIMEOUT_WARNING_BUFFER = 1.5  # Buffer time required to send timeout warning to Sentry
 MILLIS_TO_SECONDS = 1000.0
 
-if MYPY:
+if TYPE_CHECKING:
+    from datetime import datetime
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -54,7 +58,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
 
         configured_time = int(configured_time)
 
-        initial_time = datetime.utcnow()
+        initial_time = datetime_utcnow()
 
         with hub.push_scope() as scope:
             with capture_internal_exceptions():
@@ -80,8 +84,12 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
             headers = {}
             if hasattr(gcp_event, "headers"):
                 headers = gcp_event.headers
-            transaction = Transaction.continue_from_headers(
-                headers, op="serverless.function", name=environ.get("FUNCTION_NAME", "")
+
+            transaction = continue_trace(
+                headers,
+                op=OP.FUNCTION_GCP,
+                name=environ.get("FUNCTION_NAME", ""),
+                source=TRANSACTION_SOURCE_COMPONENT,
             )
             sampling_context = {
                 "gcp_env": {
@@ -147,10 +155,10 @@ def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
     def event_processor(event, hint):
         # type: (Event, Hint) -> Optional[Event]
 
-        final_time = datetime.utcnow()
+        final_time = datetime_utcnow()
         time_diff = final_time - initial_time
 
-        execution_duration_in_millis = time_diff.microseconds / MILLIS_TO_SECONDS
+        execution_duration_in_millis = duration_in_milliseconds(time_diff)
 
         extra = event.setdefault("extra", {})
         extra["google cloud functions"] = {
@@ -187,9 +195,9 @@ def event_processor(event, hint):
             if hasattr(gcp_event, "data"):
                 # Unfortunately couldn't find a way to get structured body from GCP
                 # event. Meaning every body is unstructured to us.
-                request["data"] = AnnotatedValue("", {"rem": [["!raw", "x", 0, 0]]})
+                request["data"] = AnnotatedValue.removed_because_raw_data()
 
-        event["request"] = request
+        event["request"] = deepcopy(request)
 
         return event
 
diff --git a/sentry_sdk/integrations/gnu_backtrace.py b/sentry_sdk/integrations/gnu_backtrace.py
index e0ec110547..f8321a6cd7 100644
--- a/sentry_sdk/integrations/gnu_backtrace.py
+++ b/sentry_sdk/integrations/gnu_backtrace.py
@@ -5,11 +5,11 @@
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import capture_internal_exceptions
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
-    from typing import Dict
+    from sentry_sdk._types import Event
 
 
 MODULE_RE = r"[a-zA-Z0-9/._:\\-]+"
@@ -42,13 +42,13 @@ def setup_once():
         # type: () -> None
         @add_global_event_processor
         def process_gnu_backtrace(event, hint):
-            # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+            # type: (Event, dict[str, Any]) -> Event
             with capture_internal_exceptions():
                 return _process_gnu_backtrace(event, hint)
 
 
 def _process_gnu_backtrace(event, hint):
-    # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+    # type: (Event, dict[str, Any]) -> Event
     if Hub.current.get_integration(GnuBacktraceIntegration) is None:
         return event
 
diff --git a/sentry_sdk/integrations/gql.py b/sentry_sdk/integrations/gql.py
new file mode 100644
index 0000000000..9db6632a4a
--- /dev/null
+++ b/sentry_sdk/integrations/gql.py
@@ -0,0 +1,142 @@
+from sentry_sdk.utils import event_from_exception, parse_version
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+
+try:
+    import gql  # type: ignore[import-not-found]
+    from graphql import print_ast, get_operation_ast, DocumentNode, VariableDefinitionNode  # type: ignore[import-not-found]
+    from gql.transport import Transport, AsyncTransport  # type: ignore[import-not-found]
+    from gql.transport.exceptions import TransportQueryError  # type: ignore[import-not-found]
+except ImportError:
+    raise DidNotEnable("gql is not installed")
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Tuple, Union
+    from sentry_sdk._types import Event, EventProcessor
+
+    EventDataType = Dict[str, Union[str, Tuple[VariableDefinitionNode, ...]]]
+
+MIN_GQL_VERSION = (3, 4, 1)
+
+
+class GQLIntegration(Integration):
+    identifier = "gql"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        gql_version = parse_version(gql.__version__)
+        if gql_version is None or gql_version < MIN_GQL_VERSION:
+            raise DidNotEnable(
+                "GQLIntegration is only supported for GQL versions %s and above."
+                % ".".join(str(num) for num in MIN_GQL_VERSION)
+            )
+        _patch_execute()
+
+
+def _data_from_document(document):
+    # type: (DocumentNode) -> EventDataType
+    try:
+        operation_ast = get_operation_ast(document)
+        data = {"query": print_ast(document)}  # type: EventDataType
+
+        if operation_ast is not None:
+            data["variables"] = operation_ast.variable_definitions
+            if operation_ast.name is not None:
+                data["operationName"] = operation_ast.name.value
+
+        return data
+    except (AttributeError, TypeError):
+        return dict()
+
+
+def _transport_method(transport):
+    # type: (Union[Transport, AsyncTransport]) -> str
+    """
+    The RequestsHTTPTransport allows defining the HTTP method; all
+    other transports use POST.
+    """
+    try:
+        return transport.method
+    except AttributeError:
+        return "POST"
+
+
+def _request_info_from_transport(transport):
+    # type: (Union[Transport, AsyncTransport, None]) -> Dict[str, str]
+    if transport is None:
+        return {}
+
+    request_info = {
+        "method": _transport_method(transport),
+    }
+
+    try:
+        request_info["url"] = transport.url
+    except AttributeError:
+        pass
+
+    return request_info
+
+
+def _patch_execute():
+    # type: () -> None
+    real_execute = gql.Client.execute
+
+    def sentry_patched_execute(self, document, *args, **kwargs):
+        # type: (gql.Client, DocumentNode, Any, Any) -> Any
+        hub = Hub.current
+        if hub.get_integration(GQLIntegration) is None:
+            return real_execute(self, document, *args, **kwargs)
+
+        with Hub.current.configure_scope() as scope:
+            scope.add_event_processor(_make_gql_event_processor(self, document))
+
+        try:
+            return real_execute(self, document, *args, **kwargs)
+        except TransportQueryError as e:
+            event, hint = event_from_exception(
+                e,
+                client_options=hub.client.options if hub.client is not None else None,
+                mechanism={"type": "gql", "handled": False},
+            )
+
+            hub.capture_event(event, hint)
+            raise e
+
+    gql.Client.execute = sentry_patched_execute
+
+
+def _make_gql_event_processor(client, document):
+    # type: (gql.Client, DocumentNode) -> EventProcessor
+    def processor(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        try:
+            errors = hint["exc_info"][1].errors
+        except (AttributeError, KeyError):
+            errors = None
+
+        request = event.setdefault("request", {})
+        request.update(
+            {
+                "api_target": "graphql",
+                **_request_info_from_transport(client.transport),
+            }
+        )
+
+        if _should_send_default_pii():
+            request["data"] = _data_from_document(document)
+            contexts = event.setdefault("contexts", {})
+            response = contexts.setdefault("response", {})
+            response.update(
+                {
+                    "data": {"errors": errors},
+                    "type": response,
+                }
+            )
+
+        return event
+
+    return processor
diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py
new file mode 100644
index 0000000000..b9c3b26018
--- /dev/null
+++ b/sentry_sdk/integrations/graphene.py
@@ -0,0 +1,112 @@
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    package_version,
+)
+from sentry_sdk._types import TYPE_CHECKING
+
+
+try:
+    from graphene.types import schema as graphene_schema  # type: ignore
+except ImportError:
+    raise DidNotEnable("graphene is not installed")
+
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Union
+    from graphene.language.source import Source  # type: ignore
+    from graphql.execution import ExecutionResult  # type: ignore
+    from graphql.type import GraphQLSchema  # type: ignore
+    from sentry_sdk._types import Event
+
+
+class GrapheneIntegration(Integration):
+    identifier = "graphene"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("graphene")
+
+        if version is None:
+            raise DidNotEnable("Unparsable graphene version.")
+
+        if version < (3, 3):
+            raise DidNotEnable("graphene 3.3 or newer required.")
+
+        _patch_graphql()
+
+
+def _patch_graphql():
+    # type: () -> None
+    old_graphql_sync = graphene_schema.graphql_sync
+    old_graphql_async = graphene_schema.graphql
+
+    def _sentry_patched_graphql_sync(schema, source, *args, **kwargs):
+        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
+        hub = Hub.current
+        integration = hub.get_integration(GrapheneIntegration)
+        if integration is None:
+            return old_graphql_sync(schema, source, *args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            scope.add_event_processor(_event_processor)
+
+        result = old_graphql_sync(schema, source, *args, **kwargs)
+
+        with capture_internal_exceptions():
+            for error in result.errors or []:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=hub.client.options if hub.client else None,
+                    mechanism={
+                        "type": integration.identifier,
+                        "handled": False,
+                    },
+                )
+                hub.capture_event(event, hint=hint)
+
+        return result
+
+    async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
+        # type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
+        hub = Hub.current
+        integration = hub.get_integration(GrapheneIntegration)
+        if integration is None:
+            return await old_graphql_async(schema, source, *args, **kwargs)
+
+        with hub.configure_scope() as scope:
+            scope.add_event_processor(_event_processor)
+
+        result = await old_graphql_async(schema, source, *args, **kwargs)
+
+        with capture_internal_exceptions():
+            for error in result.errors or []:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=hub.client.options if hub.client else None,
+                    mechanism={
+                        "type": integration.identifier,
+                        "handled": False,
+                    },
+                )
+                hub.capture_event(event, hint=hint)
+
+        return result
+
+    graphene_schema.graphql_sync = _sentry_patched_graphql_sync
+    graphene_schema.graphql = _sentry_patched_graphql_async
+
+
+def _event_processor(event, hint):
+    # type: (Event, Dict[str, Any]) -> Event
+    if _should_send_default_pii():
+        request_info = event.setdefault("request", {})
+        request_info["api_target"] = "graphql"
+
+    elif event.get("request", {}).get("data"):
+        del event["request"]["data"]
+
+    return event
diff --git a/sentry_sdk/integrations/grpc/__init__.py b/sentry_sdk/integrations/grpc/__init__.py
new file mode 100644
index 0000000000..2cb7c8192a
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/__init__.py
@@ -0,0 +1,152 @@
+from functools import wraps
+
+import grpc
+from grpc import Channel, Server, intercept_channel
+from grpc.aio import Channel as AsyncChannel
+from grpc.aio import Server as AsyncServer
+
+from sentry_sdk.integrations import Integration
+from sentry_sdk._types import TYPE_CHECKING
+
+from .client import ClientInterceptor
+from .server import ServerInterceptor
+from .aio.server import ServerInterceptor as AsyncServerInterceptor
+from .aio.client import (
+    SentryUnaryUnaryClientInterceptor as AsyncUnaryUnaryClientInterceptor,
+)
+from .aio.client import (
+    SentryUnaryStreamClientInterceptor as AsyncUnaryStreamClientIntercetor,
+)
+
+from typing import Any, Optional, Sequence
+
+# Hack to get new Python features working in older versions
+# without introducing a hard dependency on `typing_extensions`
+# from: https://stackoverflow.com/a/71944042/300572
+if TYPE_CHECKING:
+    from typing import ParamSpec, Callable
+else:
+    # Fake ParamSpec
+    class ParamSpec:
+        def __init__(self, _):
+            self.args = None
+            self.kwargs = None
+
+    # Callable[anything] will return None
+    class _Callable:
+        def __getitem__(self, _):
+            return None
+
+    # Make instances
+    Callable = _Callable()
+
+P = ParamSpec("P")
+
+
+def _wrap_channel_sync(func: Callable[P, Channel]) -> Callable[P, Channel]:
+    "Wrapper for synchronous secure and insecure channel."
+
+    @wraps(func)
+    def patched_channel(*args: Any, **kwargs: Any) -> Channel:
+        channel = func(*args, **kwargs)
+        if not ClientInterceptor._is_intercepted:
+            ClientInterceptor._is_intercepted = True
+            return intercept_channel(channel, ClientInterceptor())
+        else:
+            return channel
+
+    return patched_channel
+
+
+def _wrap_intercept_channel(func: Callable[P, Channel]) -> Callable[P, Channel]:
+    @wraps(func)
+    def patched_intercept_channel(
+        channel: Channel, *interceptors: grpc.ServerInterceptor
+    ) -> Channel:
+        if ClientInterceptor._is_intercepted:
+            interceptors = tuple(
+                [
+                    interceptor
+                    for interceptor in interceptors
+                    if not isinstance(interceptor, ClientInterceptor)
+                ]
+            )
+        else:
+            interceptors = interceptors
+        return intercept_channel(channel, *interceptors)
+
+    return patched_intercept_channel  # type: ignore
+
+
+def _wrap_channel_async(func: Callable[P, AsyncChannel]) -> Callable[P, AsyncChannel]:
+    "Wrapper for asynchronous secure and insecure channel."
+
+    @wraps(func)
+    def patched_channel(
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.aio.ClientInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Channel:
+        sentry_interceptors = [
+            AsyncUnaryUnaryClientInterceptor(),
+            AsyncUnaryStreamClientIntercetor(),
+        ]
+        interceptors = [*sentry_interceptors, *(interceptors or [])]
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_channel  # type: ignore
+
+
+def _wrap_sync_server(func: Callable[P, Server]) -> Callable[P, Server]:
+    """Wrapper for synchronous server."""
+
+    @wraps(func)
+    def patched_server(
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Server:
+        interceptors = [
+            interceptor
+            for interceptor in interceptors or []
+            if not isinstance(interceptor, ServerInterceptor)
+        ]
+        server_interceptor = ServerInterceptor()
+        interceptors = [server_interceptor, *(interceptors or [])]
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_server  # type: ignore
+
+
+def _wrap_async_server(func: Callable[P, AsyncServer]) -> Callable[P, AsyncServer]:
+    """Wrapper for asynchronous server."""
+
+    @wraps(func)
+    def patched_aio_server(
+        *args: P.args,
+        interceptors: Optional[Sequence[grpc.ServerInterceptor]] = None,
+        **kwargs: P.kwargs,
+    ) -> Server:
+        server_interceptor = AsyncServerInterceptor()
+        interceptors = [server_interceptor, *(interceptors or [])]
+        return func(*args, interceptors=interceptors, **kwargs)  # type: ignore
+
+    return patched_aio_server  # type: ignore
+
+
+class GRPCIntegration(Integration):
+    identifier = "grpc"
+
+    @staticmethod
+    def setup_once() -> None:
+        import grpc
+
+        grpc.insecure_channel = _wrap_channel_sync(grpc.insecure_channel)
+        grpc.secure_channel = _wrap_channel_sync(grpc.secure_channel)
+        grpc.intercept_channel = _wrap_intercept_channel(grpc.intercept_channel)
+
+        grpc.aio.insecure_channel = _wrap_channel_async(grpc.aio.insecure_channel)
+        grpc.aio.secure_channel = _wrap_channel_async(grpc.aio.secure_channel)
+
+        grpc.server = _wrap_sync_server(grpc.server)
+        grpc.aio.server = _wrap_async_server(grpc.aio.server)
diff --git a/sentry_sdk/integrations/grpc/aio/__init__.py b/sentry_sdk/integrations/grpc/aio/__init__.py
new file mode 100644
index 0000000000..59bfd502e5
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/aio/__init__.py
@@ -0,0 +1,2 @@
+from .server import ServerInterceptor  # noqa: F401
+from .client import ClientInterceptor  # noqa: F401
diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py
new file mode 100644
index 0000000000..e0b36541f3
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/aio/client.py
@@ -0,0 +1,91 @@
+from typing import Callable, Union, AsyncIterable, Any
+
+from grpc.aio import (
+    UnaryUnaryClientInterceptor,
+    UnaryStreamClientInterceptor,
+    ClientCallDetails,
+    UnaryUnaryCall,
+    UnaryStreamCall,
+)
+from google.protobuf.message import Message
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+
+
+class ClientInterceptor:
+    @staticmethod
+    def _update_client_call_details_metadata_from_hub(
+        client_call_details: ClientCallDetails, hub: Hub
+    ) -> ClientCallDetails:
+        metadata = (
+            list(client_call_details.metadata) if client_call_details.metadata else []
+        )
+        for key, value in hub.iter_trace_propagation_headers():
+            metadata.append((key, value))
+
+        client_call_details = ClientCallDetails(
+            method=client_call_details.method,
+            timeout=client_call_details.timeout,
+            metadata=metadata,
+            credentials=client_call_details.credentials,
+            wait_for_ready=client_call_details.wait_for_ready,
+        )
+
+        return client_call_details
+
+
+class SentryUnaryUnaryClientInterceptor(ClientInterceptor, UnaryUnaryClientInterceptor):  # type: ignore
+    async def intercept_unary_unary(
+        self,
+        continuation: Callable[[ClientCallDetails, Message], UnaryUnaryCall],
+        client_call_details: ClientCallDetails,
+        request: Message,
+    ) -> Union[UnaryUnaryCall, Message]:
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary unary call to %s" % method.decode()
+        ) as span:
+            span.set_data("type", "unary unary")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = await continuation(client_call_details, request)
+            status_code = await response.code()
+            span.set_data("code", status_code.name)
+
+            return response
+
+
+class SentryUnaryStreamClientInterceptor(
+    ClientInterceptor, UnaryStreamClientInterceptor  # type: ignore
+):
+    async def intercept_unary_stream(
+        self,
+        continuation: Callable[[ClientCallDetails, Message], UnaryStreamCall],
+        client_call_details: ClientCallDetails,
+        request: Message,
+    ) -> Union[AsyncIterable[Any], UnaryStreamCall]:
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary stream call to %s" % method.decode()
+        ) as span:
+            span.set_data("type", "unary stream")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = await continuation(client_call_details, request)
+            # status_code = await response.code()
+            # span.set_data("code", status_code)
+
+            return response
diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py
new file mode 100644
index 0000000000..ba19eb947c
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/aio/server.py
@@ -0,0 +1,97 @@
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM
+from sentry_sdk.utils import event_from_exception
+
+if MYPY:
+    from collections.abc import Awaitable, Callable
+    from typing import Any
+
+
+try:
+    import grpc
+    from grpc import HandlerCallDetails, RpcMethodHandler
+    from grpc.aio import AbortError, ServicerContext
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ServerInterceptor(grpc.aio.ServerInterceptor):  # type: ignore
+    def __init__(self, find_name=None):
+        # type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None
+        self._find_method_name = find_name or self._find_name
+
+        super(ServerInterceptor, self).__init__()
+
+    async def intercept_service(self, continuation, handler_call_details):
+        # type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Awaitable[RpcMethodHandler]
+        self._handler_call_details = handler_call_details
+        handler = await continuation(handler_call_details)
+
+        if not handler.request_streaming and not handler.response_streaming:
+            handler_factory = grpc.unary_unary_rpc_method_handler
+
+            async def wrapped(request, context):
+                # type: (Any, ServicerContext) -> Any
+                name = self._find_method_name(context)
+                if not name:
+                    return await handler(request, context)
+
+                hub = Hub.current
+
+                # What if the headers are empty?
+                transaction = Transaction.continue_from_headers(
+                    dict(context.invocation_metadata()),
+                    op=OP.GRPC_SERVER,
+                    name=name,
+                    source=TRANSACTION_SOURCE_CUSTOM,
+                )
+
+                with hub.start_transaction(transaction=transaction):
+                    try:
+                        return await handler.unary_unary(request, context)
+                    except AbortError:
+                        raise
+                    except Exception as exc:
+                        event, hint = event_from_exception(
+                            exc,
+                            mechanism={"type": "grpc", "handled": False},
+                        )
+                        hub.capture_event(event, hint=hint)
+                        raise
+
+        elif not handler.request_streaming and handler.response_streaming:
+            handler_factory = grpc.unary_stream_rpc_method_handler
+
+            async def wrapped(request, context):  # type: ignore
+                # type: (Any, ServicerContext) -> Any
+                async for r in handler.unary_stream(request, context):
+                    yield r
+
+        elif handler.request_streaming and not handler.response_streaming:
+            handler_factory = grpc.stream_unary_rpc_method_handler
+
+            async def wrapped(request, context):
+                # type: (Any, ServicerContext) -> Any
+                response = handler.stream_unary(request, context)
+                return await response
+
+        elif handler.request_streaming and handler.response_streaming:
+            handler_factory = grpc.stream_stream_rpc_method_handler
+
+            async def wrapped(request, context):  # type: ignore
+                # type: (Any, ServicerContext) -> Any
+                async for r in handler.stream_stream(request, context):
+                    yield r
+
+        return handler_factory(
+            wrapped,
+            request_deserializer=handler.request_deserializer,
+            response_serializer=handler.response_serializer,
+        )
+
+    def _find_name(self, context):
+        # type: (ServicerContext) -> str
+        return self._handler_call_details.method
diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py
new file mode 100644
index 0000000000..955c3c4217
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/client.py
@@ -0,0 +1,85 @@
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+
+if MYPY:
+    from typing import Any, Callable, Iterator, Iterable, Union
+
+try:
+    import grpc
+    from grpc import ClientCallDetails, Call
+    from grpc._interceptor import _UnaryOutcome
+    from grpc.aio._interceptor import UnaryStreamCall
+    from google.protobuf.message import Message
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ClientInterceptor(
+    grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor  # type: ignore
+):
+    _is_intercepted = False
+
+    def intercept_unary_unary(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary unary call to %s" % method
+        ) as span:
+            span.set_data("type", "unary unary")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = continuation(client_call_details, request)
+            span.set_data("code", response.code().name)
+
+            return response
+
+    def intercept_unary_stream(self, continuation, client_call_details, request):
+        # type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call]
+        hub = Hub.current
+        method = client_call_details.method
+
+        with hub.start_span(
+            op=OP.GRPC_CLIENT, description="unary stream call to %s" % method
+        ) as span:
+            span.set_data("type", "unary stream")
+            span.set_data("method", method)
+
+            client_call_details = self._update_client_call_details_metadata_from_hub(
+                client_call_details, hub
+            )
+
+            response = continuation(
+                client_call_details, request
+            )  # type: UnaryStreamCall
+            # Setting code on unary-stream leads to execution getting stuck
+            # span.set_data("code", response.code().name)
+
+            return response
+
+    @staticmethod
+    def _update_client_call_details_metadata_from_hub(client_call_details, hub):
+        # type: (ClientCallDetails, Hub) -> ClientCallDetails
+        metadata = (
+            list(client_call_details.metadata) if client_call_details.metadata else []
+        )
+        for key, value in hub.iter_trace_propagation_headers():
+            metadata.append((key, value))
+
+        client_call_details = grpc._interceptor._ClientCallDetails(
+            method=client_call_details.method,
+            timeout=client_call_details.timeout,
+            metadata=metadata,
+            credentials=client_call_details.credentials,
+            wait_for_ready=client_call_details.wait_for_ready,
+            compression=client_call_details.compression,
+        )
+
+        return client_call_details
diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py
new file mode 100644
index 0000000000..ce7c2f2a58
--- /dev/null
+++ b/sentry_sdk/integrations/grpc/server.py
@@ -0,0 +1,64 @@
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM
+
+if MYPY:
+    from typing import Callable, Optional
+    from google.protobuf.message import Message
+
+try:
+    import grpc
+    from grpc import ServicerContext, HandlerCallDetails, RpcMethodHandler
+except ImportError:
+    raise DidNotEnable("grpcio is not installed")
+
+
+class ServerInterceptor(grpc.ServerInterceptor):  # type: ignore
+    def __init__(self, find_name=None):
+        # type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None
+        self._find_method_name = find_name or ServerInterceptor._find_name
+
+        super(ServerInterceptor, self).__init__()
+
+    def intercept_service(self, continuation, handler_call_details):
+        # type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler
+        handler = continuation(handler_call_details)
+        if not handler or not handler.unary_unary:
+            return handler
+
+        def behavior(request, context):
+            # type: (Message, ServicerContext) -> Message
+            hub = Hub(Hub.current)
+
+            name = self._find_method_name(context)
+
+            if name:
+                metadata = dict(context.invocation_metadata())
+
+                transaction = Transaction.continue_from_headers(
+                    metadata,
+                    op=OP.GRPC_SERVER,
+                    name=name,
+                    source=TRANSACTION_SOURCE_CUSTOM,
+                )
+
+                with hub.start_transaction(transaction=transaction):
+                    try:
+                        return handler.unary_unary(request, context)
+                    except BaseException as e:
+                        raise e
+            else:
+                return handler.unary_unary(request, context)
+
+        return grpc.unary_unary_rpc_method_handler(
+            behavior,
+            request_deserializer=handler.request_deserializer,
+            response_serializer=handler.response_serializer,
+        )
+
+    @staticmethod
+    def _find_name(context):
+        # type: (ServicerContext) -> str
+        return context._rpc_event.call_details.method.decode()
diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py
index 3d4bbf8300..04db5047b4 100644
--- a/sentry_sdk/integrations/httpx.py
+++ b/sentry_sdk/integrations/httpx.py
@@ -1,10 +1,18 @@
 from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.utils import logger
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
+from sentry_sdk.tracing_utils import should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    logger,
+    parse_url,
+)
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -40,23 +48,44 @@ def send(self, request, **kwargs):
         if hub.get_integration(HttpxIntegration) is None:
             return real_send(self, request, **kwargs)
 
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
         ) as span:
-            span.set_data("method", request.method)
-            span.set_data("url", str(request.url))
-            for key, value in hub.iter_trace_propagation_headers():
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                        key=key, value=value, url=request.url
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(hub, str(request.url)):
+                for key, value in hub.iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
                     )
-                )
-                request.headers[key] = value
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
+                        BAGGAGE_HEADER_NAME
+                    ):
+                        # do not overwrite any existing baggage, just append to it
+                        request.headers[key] += "," + value
+                    else:
+                        request.headers[key] = value
+
             rv = real_send(self, request, **kwargs)
 
-            span.set_data("status_code", rv.status_code)
             span.set_http_status(rv.status_code)
             span.set_data("reason", rv.reason_phrase)
+
             return rv
 
     Client.send = send
@@ -72,23 +101,44 @@ async def send(self, request, **kwargs):
         if hub.get_integration(HttpxIntegration) is None:
             return await real_send(self, request, **kwargs)
 
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(str(request.url), sanitize=False)
+
         with hub.start_span(
-            op="http", description="%s %s" % (request.method, request.url)
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (
+                request.method,
+                parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE,
+            ),
         ) as span:
-            span.set_data("method", request.method)
-            span.set_data("url", str(request.url))
-            for key, value in hub.iter_trace_propagation_headers():
-                logger.debug(
-                    "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
-                        key=key, value=value, url=request.url
+            span.set_data(SPANDATA.HTTP_METHOD, request.method)
+            if parsed_url is not None:
+                span.set_data("url", parsed_url.url)
+                span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+                span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
+
+            if should_propagate_trace(hub, str(request.url)):
+                for key, value in hub.iter_trace_propagation_headers():
+                    logger.debug(
+                        "[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
+                            key=key, value=value, url=request.url
+                        )
                     )
-                )
-                request.headers[key] = value
+                    if key == BAGGAGE_HEADER_NAME and request.headers.get(
+                        BAGGAGE_HEADER_NAME
+                    ):
+                        # do not overwrite any existing baggage, just append to it
+                        request.headers[key] += "," + value
+                    else:
+                        request.headers[key] = value
+
             rv = await real_send(self, request, **kwargs)
 
-            span.set_data("status_code", rv.status_code)
             span.set_http_status(rv.status_code)
             span.set_data("reason", rv.reason_phrase)
+
             return rv
 
     AsyncClient.send = send
diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py
new file mode 100644
index 0000000000..43c03936b1
--- /dev/null
+++ b/sentry_sdk/integrations/huey.py
@@ -0,0 +1,178 @@
+from __future__ import absolute_import
+
+import sys
+from datetime import datetime
+
+from sentry_sdk._compat import reraise
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk import Hub
+from sentry_sdk.api import continue_trace, get_baggage, get_traceparent
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+    TRANSACTION_SOURCE_TASK,
+)
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    SENSITIVE_DATA_SUBSTITUTE,
+)
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Optional, Union, TypeVar
+
+    from sentry_sdk._types import EventProcessor, Event, Hint
+    from sentry_sdk.utils import ExcInfo
+
+    F = TypeVar("F", bound=Callable[..., Any])
+
+try:
+    from huey.api import Huey, Result, ResultGroup, Task, PeriodicTask
+    from huey.exceptions import CancelExecution, RetryTask, TaskLockedException
+except ImportError:
+    raise DidNotEnable("Huey is not installed")
+
+
+HUEY_CONTROL_FLOW_EXCEPTIONS = (CancelExecution, RetryTask, TaskLockedException)
+
+
+class HueyIntegration(Integration):
+    identifier = "huey"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        patch_enqueue()
+        patch_execute()
+
+
+def patch_enqueue():
+    # type: () -> None
+    old_enqueue = Huey.enqueue
+
+    def _sentry_enqueue(self, task):
+        # type: (Huey, Task) -> Optional[Union[Result, ResultGroup]]
+        hub = Hub.current
+
+        if hub.get_integration(HueyIntegration) is None:
+            return old_enqueue(self, task)
+
+        with hub.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name):
+            if not isinstance(task, PeriodicTask):
+                # Attach trace propagation data to task kwargs. We do
+                # not do this for periodic tasks, as these don't
+                # really have an originating transaction.
+                task.kwargs["sentry_headers"] = {
+                    BAGGAGE_HEADER_NAME: get_baggage(),
+                    SENTRY_TRACE_HEADER_NAME: get_traceparent(),
+                }
+            return old_enqueue(self, task)
+
+    Huey.enqueue = _sentry_enqueue
+
+
+def _make_event_processor(task):
+    # type: (Any) -> EventProcessor
+    def event_processor(event, hint):
+        # type: (Event, Hint) -> Optional[Event]
+
+        with capture_internal_exceptions():
+            tags = event.setdefault("tags", {})
+            tags["huey_task_id"] = task.id
+            tags["huey_task_retry"] = task.default_retries > task.retries
+            extra = event.setdefault("extra", {})
+            extra["huey-job"] = {
+                "task": task.name,
+                "args": (
+                    task.args
+                    if _should_send_default_pii()
+                    else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "kwargs": (
+                    task.kwargs
+                    if _should_send_default_pii()
+                    else SENSITIVE_DATA_SUBSTITUTE
+                ),
+                "retry": (task.default_retries or 0) - task.retries,
+            }
+
+        return event
+
+    return event_processor
+
+
+def _capture_exception(exc_info):
+    # type: (ExcInfo) -> None
+    hub = Hub.current
+
+    if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS:
+        hub.scope.transaction.set_status("aborted")
+        return
+
+    hub.scope.transaction.set_status("internal_error")
+    event, hint = event_from_exception(
+        exc_info,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": HueyIntegration.identifier, "handled": False},
+    )
+    hub.capture_event(event, hint=hint)
+
+
+def _wrap_task_execute(func):
+    # type: (F) -> F
+    def _sentry_execute(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        hub = Hub.current
+        if hub.get_integration(HueyIntegration) is None:
+            return func(*args, **kwargs)
+
+        try:
+            result = func(*args, **kwargs)
+        except Exception:
+            exc_info = sys.exc_info()
+            _capture_exception(exc_info)
+            reraise(*exc_info)
+
+        return result
+
+    return _sentry_execute  # type: ignore
+
+
+def patch_execute():
+    # type: () -> None
+    old_execute = Huey._execute
+
+    def _sentry_execute(self, task, timestamp=None):
+        # type: (Huey, Task, Optional[datetime]) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(HueyIntegration) is None:
+            return old_execute(self, task, timestamp)
+
+        with hub.push_scope() as scope:
+            with capture_internal_exceptions():
+                scope._name = "huey"
+                scope.clear_breadcrumbs()
+                scope.add_event_processor(_make_event_processor(task))
+
+            sentry_headers = task.kwargs.pop("sentry_headers", None)
+
+            transaction = continue_trace(
+                sentry_headers or {},
+                name=task.name,
+                op=OP.QUEUE_TASK_HUEY,
+                source=TRANSACTION_SOURCE_TASK,
+            )
+            transaction.set_status("ok")
+
+            if not getattr(task, "_sentry_is_patched", False):
+                task.execute = _wrap_task_execute(task.execute)
+                task._sentry_is_patched = True
+
+            with hub.start_transaction(transaction):
+                return old_execute(self, task, timestamp)
+
+    Huey._execute = _sentry_execute
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index e9f3fe9dbb..d455983fc5 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -1,7 +1,6 @@
 from __future__ import absolute_import
 
 import logging
-import datetime
 from fnmatch import fnmatch
 
 from sentry_sdk.hub import Hub
@@ -12,11 +11,12 @@
     capture_internal_exceptions,
 )
 from sentry_sdk.integrations import Integration
-from sentry_sdk._compat import iteritems
+from sentry_sdk._compat import iteritems, utc_from_timestamp
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
+    from collections.abc import MutableMapping
     from logging import LogRecord
     from typing import Any
     from typing import Dict
@@ -24,6 +24,16 @@
 
 DEFAULT_LEVEL = logging.INFO
 DEFAULT_EVENT_LEVEL = logging.ERROR
+LOGGING_TO_EVENT_LEVEL = {
+    logging.NOTSET: "notset",
+    logging.DEBUG: "debug",
+    logging.INFO: "info",
+    logging.WARN: "warning",  # WARN is same a WARNING
+    logging.WARNING: "warning",
+    logging.ERROR: "error",
+    logging.FATAL: "fatal",
+    logging.CRITICAL: "fatal",  # CRITICAL is same as FATAL
+}
 
 # Capturing events from those loggers causes recursion errors. We cannot allow
 # the user to unconditionally create events from those loggers under any
@@ -82,6 +92,10 @@ def setup_once():
 
         def sentry_patched_callhandlers(self, record):
             # type: (Any, LogRecord) -> Any
+            # keeping a local reference because the
+            # global might be discarded on shutdown
+            ignored_loggers = _IGNORED_LOGGERS
+
             try:
                 return old_callhandlers(self, record)
             finally:
@@ -89,7 +103,7 @@ def sentry_patched_callhandlers(self, record):
                 # the integration.  Otherwise we have a high chance of getting
                 # into a recursion error when the integration is resolved
                 # (this also is slower).
-                if record.name not in _IGNORED_LOGGERS:
+                if ignored_loggers is not None and record.name not in ignored_loggers:
                     integration = Hub.current.get_integration(LoggingIntegration)
                     if integration is not None:
                         integration._handle_record(record)
@@ -97,73 +111,62 @@ def sentry_patched_callhandlers(self, record):
         logging.Logger.callHandlers = sentry_patched_callhandlers  # type: ignore
 
 
-def _can_record(record):
-    # type: (LogRecord) -> bool
-    """Prevents ignored loggers from recording"""
-    for logger in _IGNORED_LOGGERS:
-        if fnmatch(record.name, logger):
-            return False
-    return True
-
-
-def _breadcrumb_from_record(record):
-    # type: (LogRecord) -> Dict[str, Any]
-    return {
-        "type": "log",
-        "level": _logging_to_event_level(record.levelname),
-        "category": record.name,
-        "message": record.message,
-        "timestamp": datetime.datetime.utcfromtimestamp(record.created),
-        "data": _extra_from_record(record),
-    }
-
-
-def _logging_to_event_level(levelname):
-    # type: (str) -> str
-    return {"critical": "fatal"}.get(levelname.lower(), levelname.lower())
-
-
-COMMON_RECORD_ATTRS = frozenset(
-    (
-        "args",
-        "created",
-        "exc_info",
-        "exc_text",
-        "filename",
-        "funcName",
-        "levelname",
-        "levelno",
-        "linenno",
-        "lineno",
-        "message",
-        "module",
-        "msecs",
-        "msg",
-        "name",
-        "pathname",
-        "process",
-        "processName",
-        "relativeCreated",
-        "stack",
-        "tags",
-        "thread",
-        "threadName",
-        "stack_info",
+class _BaseHandler(logging.Handler, object):
+    COMMON_RECORD_ATTRS = frozenset(
+        (
+            "args",
+            "created",
+            "exc_info",
+            "exc_text",
+            "filename",
+            "funcName",
+            "levelname",
+            "levelno",
+            "linenno",
+            "lineno",
+            "message",
+            "module",
+            "msecs",
+            "msg",
+            "name",
+            "pathname",
+            "process",
+            "processName",
+            "relativeCreated",
+            "stack",
+            "tags",
+            "taskName",
+            "thread",
+            "threadName",
+            "stack_info",
+        )
     )
-)
 
+    def _can_record(self, record):
+        # type: (LogRecord) -> bool
+        """Prevents ignored loggers from recording"""
+        for logger in _IGNORED_LOGGERS:
+            if fnmatch(record.name, logger):
+                return False
+        return True
+
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        return LOGGING_TO_EVENT_LEVEL.get(
+            record.levelno, record.levelname.lower() if record.levelname else ""
+        )
 
-def _extra_from_record(record):
-    # type: (LogRecord) -> Dict[str, None]
-    return {
-        k: v
-        for k, v in iteritems(vars(record))
-        if k not in COMMON_RECORD_ATTRS
-        and (not isinstance(k, str) or not k.startswith("_"))
-    }
+    def _extra_from_record(self, record):
+        # type: (LogRecord) -> MutableMapping[str, object]
+        return {
+            k: v
+            for k, v in iteritems(vars(record))
+            if k not in self.COMMON_RECORD_ATTRS
+            and (not isinstance(k, str) or not k.startswith("_"))
+        }
 
 
-class EventHandler(logging.Handler, object):
+class EventHandler(_BaseHandler):
     """
     A logging handler that emits Sentry events for each log record
 
@@ -178,7 +181,7 @@ def emit(self, record):
 
     def _emit(self, record):
         # type: (LogRecord) -> None
-        if not _can_record(record):
+        if not self._can_record(record):
             return
 
         hub = Hub.current
@@ -207,7 +210,10 @@ def _emit(self, record):
                     "values": [
                         {
                             "stacktrace": current_stacktrace(
-                                client_options["with_locals"]
+                                include_local_variables=client_options[
+                                    "include_local_variables"
+                                ],
+                                max_value_length=client_options["max_value_length"],
                             ),
                             "crashed": False,
                             "current": True,
@@ -220,7 +226,9 @@ def _emit(self, record):
 
         hint["log_record"] = record
 
-        event["level"] = _logging_to_event_level(record.levelname)
+        level = self._logging_to_event_level(record)
+        if level in {"debug", "info", "warning", "error", "critical", "fatal"}:
+            event["level"] = level  # type: ignore[typeddict-item]
         event["logger"] = record.name
 
         # Log records from `warnings` module as separate issues
@@ -243,7 +251,7 @@ def _emit(self, record):
                 "params": record.args,
             }
 
-        event["extra"] = _extra_from_record(record)
+        event["extra"] = self._extra_from_record(record)
 
         hub.capture_event(event, hint=hint)
 
@@ -252,7 +260,7 @@ def _emit(self, record):
 SentryHandler = EventHandler
 
 
-class BreadcrumbHandler(logging.Handler, object):
+class BreadcrumbHandler(_BaseHandler):
     """
     A logging handler that records breadcrumbs for each log record.
 
@@ -267,9 +275,20 @@ def emit(self, record):
 
     def _emit(self, record):
         # type: (LogRecord) -> None
-        if not _can_record(record):
+        if not self._can_record(record):
             return
 
         Hub.current.add_breadcrumb(
-            _breadcrumb_from_record(record), hint={"log_record": record}
+            self._breadcrumb_from_record(record), hint={"log_record": record}
         )
+
+    def _breadcrumb_from_record(self, record):
+        # type: (LogRecord) -> Dict[str, Any]
+        return {
+            "type": "log",
+            "level": self._logging_to_event_level(record),
+            "category": record.name,
+            "message": record.message,
+            "timestamp": utc_from_timestamp(record.created),
+            "data": self._extra_from_record(record),
+        }
diff --git a/sentry_sdk/integrations/loguru.py b/sentry_sdk/integrations/loguru.py
new file mode 100644
index 0000000000..b1ee2a681f
--- /dev/null
+++ b/sentry_sdk/integrations/loguru.py
@@ -0,0 +1,101 @@
+from __future__ import absolute_import
+
+import enum
+
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import (
+    BreadcrumbHandler,
+    EventHandler,
+    _BaseHandler,
+)
+
+if TYPE_CHECKING:
+    from logging import LogRecord
+    from typing import Optional, Tuple
+
+try:
+    import loguru
+    from loguru import logger
+    from loguru._defaults import LOGURU_FORMAT as DEFAULT_FORMAT
+except ImportError:
+    raise DidNotEnable("LOGURU is not installed")
+
+
+class LoggingLevels(enum.IntEnum):
+    TRACE = 5
+    DEBUG = 10
+    INFO = 20
+    SUCCESS = 25
+    WARNING = 30
+    ERROR = 40
+    CRITICAL = 50
+
+
+DEFAULT_LEVEL = LoggingLevels.INFO.value
+DEFAULT_EVENT_LEVEL = LoggingLevels.ERROR.value
+# We need to save the handlers to be able to remove them later
+# in tests (they call `LoguruIntegration.__init__` multiple times,
+# and we can't use `setup_once` because it's called before
+# than we get configuration).
+_ADDED_HANDLERS = (None, None)  # type: Tuple[Optional[int], Optional[int]]
+
+
+class LoguruIntegration(Integration):
+    identifier = "loguru"
+
+    def __init__(
+        self,
+        level=DEFAULT_LEVEL,
+        event_level=DEFAULT_EVENT_LEVEL,
+        breadcrumb_format=DEFAULT_FORMAT,
+        event_format=DEFAULT_FORMAT,
+    ):
+        # type: (Optional[int], Optional[int], str | loguru.FormatFunction, str | loguru.FormatFunction) -> None
+        global _ADDED_HANDLERS
+        breadcrumb_handler, event_handler = _ADDED_HANDLERS
+
+        if breadcrumb_handler is not None:
+            logger.remove(breadcrumb_handler)
+            breadcrumb_handler = None
+        if event_handler is not None:
+            logger.remove(event_handler)
+            event_handler = None
+
+        if level is not None:
+            breadcrumb_handler = logger.add(
+                LoguruBreadcrumbHandler(level=level),
+                level=level,
+                format=breadcrumb_format,
+            )
+
+        if event_level is not None:
+            event_handler = logger.add(
+                LoguruEventHandler(level=event_level),
+                level=event_level,
+                format=event_format,
+            )
+
+        _ADDED_HANDLERS = (breadcrumb_handler, event_handler)
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        pass  # we do everything in __init__
+
+
+class _LoguruBaseHandler(_BaseHandler):
+    def _logging_to_event_level(self, record):
+        # type: (LogRecord) -> str
+        try:
+            return LoggingLevels(record.levelno).name.lower()
+        except ValueError:
+            return record.levelname.lower() if record.levelname else ""
+
+
+class LoguruEventHandler(_LoguruBaseHandler, EventHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.EventHandler` to use loguru's level names."""
+
+
+class LoguruBreadcrumbHandler(_LoguruBaseHandler, BreadcrumbHandler):
+    """Modified version of :class:`sentry_sdk.integrations.logging.BreadcrumbHandler` to use loguru's level names."""
diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py
index 3d78cb89bb..fa0fbf8936 100644
--- a/sentry_sdk/integrations/modules.py
+++ b/sentry_sdk/integrations/modules.py
@@ -3,40 +3,15 @@
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.utils import _get_installed_modules
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
-    from typing import Dict
-    from typing import Tuple
-    from typing import Iterator
-
     from sentry_sdk._types import Event
 
 
-_installed_modules = None
-
-
-def _generate_installed_modules():
-    # type: () -> Iterator[Tuple[str, str]]
-    try:
-        import pkg_resources
-    except ImportError:
-        return
-
-    for info in pkg_resources.working_set:
-        yield info.key, info.version
-
-
-def _get_installed_modules():
-    # type: () -> Dict[str, str]
-    global _installed_modules
-    if _installed_modules is None:
-        _installed_modules = dict(_generate_installed_modules())
-    return _installed_modules
-
-
 class ModulesIntegration(Integration):
     identifier = "modules"
 
@@ -45,7 +20,7 @@ def setup_once():
         # type: () -> None
         @add_global_event_processor
         def processor(event, hint):
-            # type: (Event, Any) -> Dict[str, Any]
+            # type: (Event, Any) -> Event
             if event.get("type") == "transaction":
                 return event
 
diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py
new file mode 100644
index 0000000000..0e71029b60
--- /dev/null
+++ b/sentry_sdk/integrations/openai.py
@@ -0,0 +1,304 @@
+from sentry_sdk import consts
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any, Iterable, List, Optional, Callable, Iterator
+    from sentry_sdk.tracing import Span
+
+import sentry_sdk
+from sentry_sdk._functools import wraps
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.utils import logger, capture_internal_exceptions, event_from_exception
+
+try:
+    from openai.resources.chat.completions import Completions
+    from openai.resources import Embeddings
+
+    if TYPE_CHECKING:
+        from openai.types.chat import ChatCompletionMessageParam, ChatCompletionChunk
+except ImportError:
+    raise DidNotEnable("OpenAI not installed")
+
+try:
+    import tiktoken  # type: ignore
+
+    enc = tiktoken.get_encoding("cl100k_base")
+
+    def count_tokens(s):
+        # type: (str) -> int
+        return len(enc.encode_ordinary(s))
+
+    logger.debug("[OpenAI] using tiktoken to count tokens")
+except ImportError:
+    logger.info(
+        "The Sentry Python SDK requires 'tiktoken' in order to measure token usage from some OpenAI APIs"
+        "Please install 'tiktoken' if you aren't receiving token usage in Sentry."
+        "See https://docs.sentry.io/platforms/python/integrations/openai/ for more information."
+    )
+
+    def count_tokens(s):
+        # type: (str) -> int
+        return 0
+
+
+COMPLETION_TOKENS_USED = "ai.completion_tоkens.used"
+PROMPT_TOKENS_USED = "ai.prompt_tоkens.used"
+TOTAL_TOKENS_USED = "ai.total_tоkens.used"
+
+
+class OpenAIIntegration(Integration):
+    identifier = "openai"
+
+    def __init__(self, include_prompts=True):
+        # type: (OpenAIIntegration, bool) -> None
+        self.include_prompts = include_prompts
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        Completions.create = _wrap_chat_completion_create(Completions.create)
+        Embeddings.create = _wrap_embeddings_create(Embeddings.create)
+
+
+def _capture_exception(hub, exc):
+    # type: (Hub, Any) -> None
+
+    if hub.client is not None:
+        event, hint = event_from_exception(
+            exc,
+            client_options=hub.client.options,
+            mechanism={"type": "openai", "handled": False},
+        )
+        hub.capture_event(event, hint=hint)
+
+
+def _normalize_data(data):
+    # type: (Any) -> Any
+
+    # convert pydantic data (e.g. OpenAI v1+) to json compatible format
+    if hasattr(data, "model_dump"):
+        try:
+            return data.model_dump()
+        except Exception as e:
+            logger.warning("Could not convert pydantic data to JSON: %s", e)
+            return data
+    if isinstance(data, list):
+        return list(_normalize_data(x) for x in data)
+    if isinstance(data, dict):
+        return {k: _normalize_data(v) for (k, v) in data.items()}
+    return data
+
+
+def set_data_normalized(span, key, value):
+    # type: (Span, str, Any) -> None
+    span.set_data(key, _normalize_data(value))
+
+
+def _calculate_chat_completion_usage(
+    messages, response, span, streaming_message_responses=None
+):
+    # type: (Iterable[ChatCompletionMessageParam], Any, Span, Optional[List[str]]) -> None
+    completion_tokens = 0
+    prompt_tokens = 0
+    total_tokens = 0
+    if hasattr(response, "usage"):
+        if hasattr(response.usage, "completion_tokens") and isinstance(
+            response.usage.completion_tokens, int
+        ):
+            completion_tokens = response.usage.completion_tokens
+        if hasattr(response.usage, "prompt_tokens") and isinstance(
+            response.usage.prompt_tokens, int
+        ):
+            prompt_tokens = response.usage.prompt_tokens
+        if hasattr(response.usage, "total_tokens") and isinstance(
+            response.usage.total_tokens, int
+        ):
+            total_tokens = response.usage.total_tokens
+
+    if prompt_tokens == 0:
+        for message in messages:
+            if "content" in message:
+                prompt_tokens += count_tokens(message["content"])
+
+    if completion_tokens == 0:
+        if streaming_message_responses is not None:
+            for message in streaming_message_responses:
+                completion_tokens += count_tokens(message)
+        elif hasattr(response, "choices"):
+            for choice in response.choices:
+                if hasattr(choice, "message"):
+                    completion_tokens += count_tokens(choice.message)
+
+    if total_tokens == 0:
+        total_tokens = prompt_tokens + completion_tokens
+
+    if completion_tokens != 0:
+        set_data_normalized(span, COMPLETION_TOKENS_USED, completion_tokens)
+    if prompt_tokens != 0:
+        set_data_normalized(span, PROMPT_TOKENS_USED, prompt_tokens)
+    if total_tokens != 0:
+        set_data_normalized(span, TOTAL_TOKENS_USED, total_tokens)
+
+
+def _wrap_chat_completion_create(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+    @wraps(f)
+    def new_chat_completion(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        hub = Hub.current
+        if not hub:
+            return f(*args, **kwargs)
+
+        integration = hub.get_integration(OpenAIIntegration)  # type: OpenAIIntegration
+        if not integration:
+            return f(*args, **kwargs)
+
+        if "messages" not in kwargs:
+            # invalid call (in all versions of openai), let it return error
+            return f(*args, **kwargs)
+
+        try:
+            iter(kwargs["messages"])
+        except TypeError:
+            # invalid call (in all versions), messages must be iterable
+            return f(*args, **kwargs)
+
+        kwargs["messages"] = list(kwargs["messages"])
+        messages = kwargs["messages"]
+        model = kwargs.get("model")
+        streaming = kwargs.get("stream")
+
+        span = sentry_sdk.start_span(
+            op=consts.OP.OPENAI_CHAT_COMPLETIONS_CREATE, description="Chat Completion"
+        )
+        span.__enter__()
+        try:
+            res = f(*args, **kwargs)
+        except Exception as e:
+            _capture_exception(Hub.current, e)
+            span.__exit__(None, None, None)
+            raise e from None
+
+        with capture_internal_exceptions():
+            if _should_send_default_pii() and integration.include_prompts:
+                set_data_normalized(span, "ai.input_messages", messages)
+
+            set_data_normalized(span, "ai.model_id", model)
+            set_data_normalized(span, "ai.streaming", streaming)
+
+            if hasattr(res, "choices"):
+                if _should_send_default_pii() and integration.include_prompts:
+                    set_data_normalized(
+                        span,
+                        "ai.responses",
+                        list(map(lambda x: x.message, res.choices)),
+                    )
+                _calculate_chat_completion_usage(messages, res, span)
+                span.__exit__(None, None, None)
+            elif hasattr(res, "_iterator"):
+                data_buf: list[list[str]] = []  # one for each choice
+
+                old_iterator = res._iterator  # type: Iterator[ChatCompletionChunk]
+
+                def new_iterator():
+                    # type: () -> Iterator[ChatCompletionChunk]
+                    with capture_internal_exceptions():
+                        for x in old_iterator:
+                            if hasattr(x, "choices"):
+                                choice_index = 0
+                                for choice in x.choices:
+                                    if hasattr(choice, "delta") and hasattr(
+                                        choice.delta, "content"
+                                    ):
+                                        content = choice.delta.content
+                                        if len(data_buf) <= choice_index:
+                                            data_buf.append([])
+                                        data_buf[choice_index].append(content or "")
+                                    choice_index += 1
+                            yield x
+                        if len(data_buf) > 0:
+                            all_responses = list(
+                                map(lambda chunk: "".join(chunk), data_buf)
+                            )
+                            if (
+                                _should_send_default_pii()
+                                and integration.include_prompts
+                            ):
+                                set_data_normalized(span, "ai.responses", all_responses)
+                            _calculate_chat_completion_usage(
+                                messages, res, span, all_responses
+                            )
+                    span.__exit__(None, None, None)
+
+                res._iterator = new_iterator()
+            else:
+                set_data_normalized(span, "unknown_response", True)
+                span.__exit__(None, None, None)
+            return res
+
+    return new_chat_completion
+
+
+def _wrap_embeddings_create(f):
+    # type: (Callable[..., Any]) -> Callable[..., Any]
+
+    @wraps(f)
+    def new_embeddings_create(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+
+        hub = Hub.current
+        if not hub:
+            return f(*args, **kwargs)
+
+        integration = hub.get_integration(OpenAIIntegration)  # type: OpenAIIntegration
+        if not integration:
+            return f(*args, **kwargs)
+
+        with sentry_sdk.start_span(
+            op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
+            description="OpenAI Embedding Creation",
+        ) as span:
+            if "input" in kwargs and (
+                _should_send_default_pii() and integration.include_prompts
+            ):
+                if isinstance(kwargs["input"], str):
+                    set_data_normalized(span, "ai.input_messages", [kwargs["input"]])
+                elif (
+                    isinstance(kwargs["input"], list)
+                    and len(kwargs["input"]) > 0
+                    and isinstance(kwargs["input"][0], str)
+                ):
+                    set_data_normalized(span, "ai.input_messages", kwargs["input"])
+            if "model" in kwargs:
+                set_data_normalized(span, "ai.model_id", kwargs["model"])
+            try:
+                response = f(*args, **kwargs)
+            except Exception as e:
+                _capture_exception(Hub.current, e)
+                raise e from None
+
+            prompt_tokens = 0
+            total_tokens = 0
+            if hasattr(response, "usage"):
+                if hasattr(response.usage, "prompt_tokens") and isinstance(
+                    response.usage.prompt_tokens, int
+                ):
+                    prompt_tokens = response.usage.prompt_tokens
+                if hasattr(response.usage, "total_tokens") and isinstance(
+                    response.usage.total_tokens, int
+                ):
+                    total_tokens = response.usage.total_tokens
+
+            if prompt_tokens == 0:
+                prompt_tokens = count_tokens(kwargs["input"] or "")
+
+            if total_tokens == 0:
+                total_tokens = prompt_tokens
+
+            set_data_normalized(span, PROMPT_TOKENS_USED, prompt_tokens)
+            set_data_normalized(span, TOTAL_TOKENS_USED, total_tokens)
+
+            return response
+
+    return new_embeddings_create
diff --git a/sentry_sdk/integrations/opentelemetry/__init__.py b/sentry_sdk/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..e0020204d5
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/__init__.py
@@ -0,0 +1,7 @@
+from sentry_sdk.integrations.opentelemetry.span_processor import (  # noqa: F401
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import (  # noqa: F401
+    SentryPropagator,
+)
diff --git a/sentry_sdk/integrations/opentelemetry/consts.py b/sentry_sdk/integrations/opentelemetry/consts.py
new file mode 100644
index 0000000000..79663dd670
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/consts.py
@@ -0,0 +1,6 @@
+from opentelemetry.context import (  # type: ignore
+    create_key,
+)
+
+SENTRY_TRACE_KEY = create_key("sentry-trace")
+SENTRY_BAGGAGE_KEY = create_key("sentry-baggage")
diff --git a/sentry_sdk/integrations/opentelemetry/integration.py b/sentry_sdk/integrations/opentelemetry/integration.py
new file mode 100644
index 0000000000..9e62d1feca
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/integration.py
@@ -0,0 +1,174 @@
+"""
+IMPORTANT: The contents of this file are part of a proof of concept and as such
+are experimental and not suitable for production use. They may be changed or
+removed at any time without prior notice.
+"""
+
+import sys
+from importlib import import_module
+
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.utils import logger, _get_installed_modules
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    from opentelemetry import trace  # type: ignore
+    from opentelemetry.instrumentation.auto_instrumentation._load import (  # type: ignore
+        _load_distro,
+        _load_instrumentors,
+    )
+    from opentelemetry.propagate import set_global_textmap  # type: ignore
+    from opentelemetry.sdk.trace import TracerProvider  # type: ignore
+except ImportError:
+    raise DidNotEnable("opentelemetry not installed")
+
+if TYPE_CHECKING:
+    from typing import Dict
+
+
+CLASSES_TO_INSTRUMENT = {
+    # A mapping of packages to their entry point class that will be instrumented.
+    # This is used to post-instrument any classes that were imported before OTel
+    # instrumentation took place.
+    "fastapi": "fastapi.FastAPI",
+    "flask": "flask.Flask",
+}
+
+
+class OpenTelemetryIntegration(Integration):
+    identifier = "opentelemetry"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        logger.warning(
+            "[OTel] Initializing highly experimental OpenTelemetry support. "
+            "Use at your own risk."
+        )
+
+        original_classes = _record_unpatched_classes()
+
+        try:
+            distro = _load_distro()
+            distro.configure()
+            _load_instrumentors(distro)
+        except Exception:
+            logger.exception("[OTel] Failed to auto-initialize OpenTelemetry")
+
+        try:
+            _patch_remaining_classes(original_classes)
+        except Exception:
+            logger.exception(
+                "[OTel] Failed to post-patch instrumented classes. "
+                "You might have to make sure sentry_sdk.init() is called before importing anything else."
+            )
+
+        _setup_sentry_tracing()
+
+        logger.debug("[OTel] Finished setting up OpenTelemetry integration")
+
+
+def _record_unpatched_classes():
+    # type: () -> Dict[str, type]
+    """
+    Keep references to classes that are about to be instrumented.
+
+    Used to search for unpatched classes after the instrumentation has run so
+    that they can be patched manually.
+    """
+    installed_packages = _get_installed_modules()
+
+    original_classes = {}
+
+    for package, orig_path in CLASSES_TO_INSTRUMENT.items():
+        if package in installed_packages:
+            try:
+                original_cls = _import_by_path(orig_path)
+            except (AttributeError, ImportError):
+                logger.debug("[OTel] Failed to import %s", orig_path)
+                continue
+
+            original_classes[package] = original_cls
+
+    return original_classes
+
+
+def _patch_remaining_classes(original_classes):
+    # type: (Dict[str, type]) -> None
+    """
+    Best-effort attempt to patch any uninstrumented classes in sys.modules.
+
+    This enables us to not care about the order of imports and sentry_sdk.init()
+    in user code. If e.g. the Flask class had been imported before sentry_sdk
+    was init()ed (and therefore before the OTel instrumentation ran), it would
+    not be instrumented. This function goes over remaining uninstrumented
+    occurrences of the class in sys.modules and replaces them with the
+    instrumented class.
+
+    Since this is looking for exact matches, it will not work in some scenarios
+    (e.g. if someone is not using the specific class explicitly, but rather
+    inheriting from it). In those cases it's still necessary to sentry_sdk.init()
+    before importing anything that's supposed to be instrumented.
+    """
+    # check which classes have actually been instrumented
+    instrumented_classes = {}
+
+    for package in list(original_classes.keys()):
+        original_path = CLASSES_TO_INSTRUMENT[package]
+
+        try:
+            cls = _import_by_path(original_path)
+        except (AttributeError, ImportError):
+            logger.debug(
+                "[OTel] Failed to check if class has been instrumented: %s",
+                original_path,
+            )
+            del original_classes[package]
+            continue
+
+        if not cls.__module__.startswith("opentelemetry."):
+            del original_classes[package]
+            continue
+
+        instrumented_classes[package] = cls
+
+    if not instrumented_classes:
+        return
+
+    # replace occurrences of the original unpatched class in sys.modules
+    for module_name, module in sys.modules.copy().items():
+        if (
+            module_name.startswith("sentry_sdk")
+            or module_name in sys.builtin_module_names
+        ):
+            continue
+
+        for package, original_cls in original_classes.items():
+            for var_name, var in vars(module).copy().items():
+                if var == original_cls:
+                    logger.debug(
+                        "[OTel] Additionally patching %s from %s",
+                        original_cls,
+                        module_name,
+                    )
+
+                    setattr(module, var_name, instrumented_classes[package])
+
+
+def _import_by_path(path):
+    # type: (str) -> type
+    parts = path.rsplit(".", maxsplit=1)
+    return getattr(import_module(parts[0]), parts[-1])
+
+
+def _setup_sentry_tracing():
+    # type: () -> None
+    provider = TracerProvider()
+
+    provider.add_span_processor(SentrySpanProcessor())
+
+    trace.set_tracer_provider(provider)
+
+    set_global_textmap(SentryPropagator())
diff --git a/sentry_sdk/integrations/opentelemetry/propagator.py b/sentry_sdk/integrations/opentelemetry/propagator.py
new file mode 100644
index 0000000000..e1bcc3b13e
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/propagator.py
@@ -0,0 +1,115 @@
+from opentelemetry import trace  # type: ignore
+from opentelemetry.context import (  # type: ignore
+    Context,
+    get_current,
+    set_value,
+)
+from opentelemetry.propagators.textmap import (  # type: ignore
+    CarrierT,
+    Getter,
+    Setter,
+    TextMapPropagator,
+    default_getter,
+    default_setter,
+)
+from opentelemetry.trace import (  # type: ignore
+    NonRecordingSpan,
+    SpanContext,
+    TraceFlags,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+)
+
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+)
+from sentry_sdk.tracing_utils import Baggage, extract_sentrytrace_data
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+    from typing import Set
+
+
+class SentryPropagator(TextMapPropagator):  # type: ignore
+    """
+    Propagates tracing headers for Sentry's tracing system in a way OTel understands.
+    """
+
+    def extract(self, carrier, context=None, getter=default_getter):
+        # type: (CarrierT, Optional[Context], Getter) -> Context
+        if context is None:
+            context = get_current()
+
+        sentry_trace = getter.get(carrier, SENTRY_TRACE_HEADER_NAME)
+        if not sentry_trace:
+            return context
+
+        sentrytrace = extract_sentrytrace_data(sentry_trace[0])
+        if not sentrytrace:
+            return context
+
+        context = set_value(SENTRY_TRACE_KEY, sentrytrace, context)
+
+        trace_id, span_id = sentrytrace["trace_id"], sentrytrace["parent_span_id"]
+
+        span_context = SpanContext(
+            trace_id=int(trace_id, 16),  # type: ignore
+            span_id=int(span_id, 16),  # type: ignore
+            # we simulate a sampled trace on the otel side and leave the sampling to sentry
+            trace_flags=TraceFlags(TraceFlags.SAMPLED),
+            is_remote=True,
+        )
+
+        baggage_header = getter.get(carrier, BAGGAGE_HEADER_NAME)
+
+        if baggage_header:
+            baggage = Baggage.from_incoming_header(baggage_header[0])
+        else:
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and frozen and won't be populated as head SDK.
+            baggage = Baggage(sentry_items={})
+
+        baggage.freeze()
+        context = set_value(SENTRY_BAGGAGE_KEY, baggage, context)
+
+        span = NonRecordingSpan(span_context)
+        modified_context = trace.set_span_in_context(span, context)
+        return modified_context
+
+    def inject(self, carrier, context=None, setter=default_setter):
+        # type: (CarrierT, Optional[Context], Setter) -> None
+        if context is None:
+            context = get_current()
+
+        current_span = trace.get_current_span(context)
+        current_span_context = current_span.get_span_context()
+
+        if not current_span_context.is_valid:
+            return
+
+        span_id = trace.format_span_id(current_span_context.span_id)
+
+        span_map = SentrySpanProcessor().otel_span_map
+        sentry_span = span_map.get(span_id, None)
+        if not sentry_span:
+            return
+
+        setter.set(carrier, SENTRY_TRACE_HEADER_NAME, sentry_span.to_traceparent())
+
+        if sentry_span.containing_transaction:
+            baggage = sentry_span.containing_transaction.get_baggage()
+            if baggage:
+                setter.set(carrier, BAGGAGE_HEADER_NAME, baggage.serialize())
+
+    @property
+    def fields(self):
+        # type: () -> Set[str]
+        return {SENTRY_TRACE_HEADER_NAME, BAGGAGE_HEADER_NAME}
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
new file mode 100644
index 0000000000..0db698e239
--- /dev/null
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -0,0 +1,370 @@
+from time import time
+
+from opentelemetry.context import get_value  # type: ignore
+from opentelemetry.sdk.trace import SpanProcessor  # type: ignore
+from opentelemetry.semconv.trace import SpanAttributes  # type: ignore
+from opentelemetry.trace import (  # type: ignore
+    format_span_id,
+    format_trace_id,
+    get_current_span,
+    SpanContext,
+    Span as OTelSpan,
+    SpanKind,
+)
+from opentelemetry.trace.span import (  # type: ignore
+    INVALID_SPAN_ID,
+    INVALID_TRACE_ID,
+)
+from sentry_sdk._compat import utc_from_timestamp
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.hub import Hub
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.tracing import Transaction, Span as SentrySpan
+from sentry_sdk.utils import Dsn
+from sentry_sdk._types import TYPE_CHECKING
+
+from urllib3.util import parse_url as urlparse
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Optional, Union
+
+    from sentry_sdk._types import Event, Hint
+
+OPEN_TELEMETRY_CONTEXT = "otel"
+SPAN_MAX_TIME_OPEN_MINUTES = 10
+
+
+def link_trace_context_to_error_event(event, otel_span_map):
+    # type: (Event, Dict[str, Union[Transaction, SentrySpan]]) -> Event
+    hub = Hub.current
+    if not hub:
+        return event
+
+    if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+        return event
+
+    if hasattr(event, "type") and event["type"] == "transaction":
+        return event
+
+    otel_span = get_current_span()
+    if not otel_span:
+        return event
+
+    ctx = otel_span.get_span_context()
+    trace_id = format_trace_id(ctx.trace_id)
+    span_id = format_span_id(ctx.span_id)
+
+    if trace_id == INVALID_TRACE_ID or span_id == INVALID_SPAN_ID:
+        return event
+
+    sentry_span = otel_span_map.get(span_id, None)
+    if not sentry_span:
+        return event
+
+    contexts = event.setdefault("contexts", {})
+    contexts.setdefault("trace", {}).update(sentry_span.get_trace_context())
+
+    return event
+
+
+class SentrySpanProcessor(SpanProcessor):  # type: ignore
+    """
+    Converts OTel spans into Sentry spans so they can be sent to the Sentry backend.
+    """
+
+    # The mapping from otel span ids to sentry spans
+    otel_span_map = {}  # type: Dict[str, Union[Transaction, SentrySpan]]
+
+    # The currently open spans. Elements will be discarded after SPAN_MAX_TIME_OPEN_MINUTES
+    open_spans = {}  # type: dict[int, set[str]]
+
+    def __new__(cls):
+        # type: () -> SentrySpanProcessor
+        if not hasattr(cls, "instance"):
+            cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
+
+        return cls.instance
+
+    def __init__(self):
+        # type: () -> None
+        @add_global_event_processor
+        def global_event_processor(event, hint):
+            # type: (Event, Hint) -> Event
+            return link_trace_context_to_error_event(event, self.otel_span_map)
+
+    def _prune_old_spans(self):
+        # type: (SentrySpanProcessor) -> None
+        """
+        Prune spans that have been open for too long.
+        """
+        current_time_minutes = int(time() / 60)
+        for span_start_minutes in list(
+            self.open_spans.keys()
+        ):  # making a list because we change the dict
+            # prune empty open spans buckets
+            if self.open_spans[span_start_minutes] == set():
+                self.open_spans.pop(span_start_minutes)
+
+            # prune old buckets
+            elif current_time_minutes - span_start_minutes > SPAN_MAX_TIME_OPEN_MINUTES:
+                for span_id in self.open_spans.pop(span_start_minutes):
+                    self.otel_span_map.pop(span_id, None)
+
+    def on_start(self, otel_span, parent_context=None):
+        # type: (OTelSpan, Optional[SpanContext]) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if not hub.client or (hub.client and not hub.client.dsn):
+            return
+
+        try:
+            _ = Dsn(hub.client.dsn or "")
+        except Exception:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        if not otel_span.get_span_context().is_valid:
+            return
+
+        if self._is_sentry_span(hub, otel_span):
+            return
+
+        trace_data = self._get_trace_data(otel_span, parent_context)
+
+        parent_span_id = trace_data["parent_span_id"]
+        sentry_parent_span = (
+            self.otel_span_map.get(parent_span_id, None) if parent_span_id else None
+        )
+
+        sentry_span = None
+        if sentry_parent_span:
+            sentry_span = sentry_parent_span.start_child(
+                span_id=trace_data["span_id"],
+                description=otel_span.name,
+                start_timestamp=utc_from_timestamp(
+                    otel_span.start_time / 1e9
+                ),  # OTel spans have nanosecond precision
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+        else:
+            sentry_span = hub.start_transaction(
+                name=otel_span.name,
+                span_id=trace_data["span_id"],
+                parent_span_id=parent_span_id,
+                trace_id=trace_data["trace_id"],
+                baggage=trace_data["baggage"],
+                start_timestamp=utc_from_timestamp(
+                    otel_span.start_time / 1e9
+                ),  # OTel spans have nanosecond precision
+                instrumenter=INSTRUMENTER.OTEL,
+            )
+
+        self.otel_span_map[trace_data["span_id"]] = sentry_span
+
+        span_start_in_minutes = int(
+            otel_span.start_time / 1e9 / 60
+        )  # OTel spans have nanosecond precision
+        self.open_spans.setdefault(span_start_in_minutes, set()).add(
+            trace_data["span_id"]
+        )
+        self._prune_old_spans()
+
+    def on_end(self, otel_span):
+        # type: (OTelSpan) -> None
+        hub = Hub.current
+        if not hub:
+            return
+
+        if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+            return
+
+        span_context = otel_span.get_span_context()
+        if not span_context.is_valid:
+            return
+
+        span_id = format_span_id(span_context.span_id)
+        sentry_span = self.otel_span_map.pop(span_id, None)
+        if not sentry_span:
+            return
+
+        sentry_span.op = otel_span.name
+
+        self._update_span_with_otel_status(sentry_span, otel_span)
+
+        if isinstance(sentry_span, Transaction):
+            sentry_span.name = otel_span.name
+            sentry_span.set_context(
+                OPEN_TELEMETRY_CONTEXT, self._get_otel_context(otel_span)
+            )
+            self._update_transaction_with_otel_data(sentry_span, otel_span)
+
+        else:
+            self._update_span_with_otel_data(sentry_span, otel_span)
+
+        sentry_span.finish(
+            end_timestamp=utc_from_timestamp(otel_span.end_time / 1e9)
+        )  # OTel spans have nanosecond precision
+
+        span_start_in_minutes = int(
+            otel_span.start_time / 1e9 / 60
+        )  # OTel spans have nanosecond precision
+        self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id)
+        self._prune_old_spans()
+
+    def _is_sentry_span(self, hub, otel_span):
+        # type: (Hub, OTelSpan) -> bool
+        """
+        Break infinite loop:
+        HTTP requests to Sentry are caught by OTel and send again to Sentry.
+        """
+        otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+        dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
+
+        if otel_span_url and dsn_url in otel_span_url:
+            return True
+
+        return False
+
+    def _get_otel_context(self, otel_span):
+        # type: (OTelSpan) -> Dict[str, Any]
+        """
+        Returns the OTel context for Sentry.
+        See: https://develop.sentry.dev/sdk/performance/opentelemetry/#step-5-add-opentelemetry-context
+        """
+        ctx = {}
+
+        if otel_span.attributes:
+            ctx["attributes"] = dict(otel_span.attributes)
+
+        if otel_span.resource.attributes:
+            ctx["resource"] = dict(otel_span.resource.attributes)
+
+        return ctx
+
+    def _get_trace_data(self, otel_span, parent_context):
+        # type: (OTelSpan, SpanContext) -> Dict[str, Any]
+        """
+        Extracts tracing information from one OTel span and its parent OTel context.
+        """
+        trace_data = {}
+        span_context = otel_span.get_span_context()
+
+        span_id = format_span_id(span_context.span_id)
+        trace_data["span_id"] = span_id
+
+        trace_id = format_trace_id(span_context.trace_id)
+        trace_data["trace_id"] = trace_id
+
+        parent_span_id = (
+            format_span_id(otel_span.parent.span_id) if otel_span.parent else None
+        )
+        trace_data["parent_span_id"] = parent_span_id
+
+        sentry_trace_data = get_value(SENTRY_TRACE_KEY, parent_context)
+        trace_data["parent_sampled"] = (
+            sentry_trace_data["parent_sampled"] if sentry_trace_data else None
+        )
+
+        baggage = get_value(SENTRY_BAGGAGE_KEY, parent_context)
+        trace_data["baggage"] = baggage
+
+        return trace_data
+
+    def _update_span_with_otel_status(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Set the Sentry span status from the OTel span
+        """
+        if otel_span.status.is_unset:
+            return
+
+        if otel_span.status.is_ok:
+            sentry_span.set_status("ok")
+            return
+
+        sentry_span.set_status("internal_error")
+
+    def _update_span_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        """
+        Convert OTel span data and update the Sentry span with it.
+        This should eventually happen on the server when ingesting the spans.
+        """
+        for key, val in otel_span.attributes.items():
+            sentry_span.set_data(key, val)
+
+        sentry_span.set_data("otel.kind", otel_span.kind)
+
+        op = otel_span.name
+        description = otel_span.name
+
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD, None)
+        db_query = otel_span.attributes.get(SpanAttributes.DB_SYSTEM, None)
+
+        if http_method:
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            description = http_method
+
+            peer_name = otel_span.attributes.get(SpanAttributes.NET_PEER_NAME, None)
+            if peer_name:
+                description += " {}".format(peer_name)
+
+            target = otel_span.attributes.get(SpanAttributes.HTTP_TARGET, None)
+            if target:
+                description += " {}".format(target)
+
+            if not peer_name and not target:
+                url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
+                if url:
+                    parsed_url = urlparse(url)
+                    url = "{}://{}{}".format(
+                        parsed_url.scheme, parsed_url.netloc, parsed_url.path
+                    )
+                    description += " {}".format(url)
+
+            status_code = otel_span.attributes.get(
+                SpanAttributes.HTTP_STATUS_CODE, None
+            )
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+        elif db_query:
+            op = "db"
+            statement = otel_span.attributes.get(SpanAttributes.DB_STATEMENT, None)
+            if statement:
+                description = statement
+
+        sentry_span.op = op
+        sentry_span.description = description
+
+    def _update_transaction_with_otel_data(self, sentry_span, otel_span):
+        # type: (SentrySpan, OTelSpan) -> None
+        http_method = otel_span.attributes.get(SpanAttributes.HTTP_METHOD)
+
+        if http_method:
+            status_code = otel_span.attributes.get(SpanAttributes.HTTP_STATUS_CODE)
+            if status_code:
+                sentry_span.set_http_status(status_code)
+
+            op = "http"
+
+            if otel_span.kind == SpanKind.SERVER:
+                op += ".server"
+            elif otel_span.kind == SpanKind.CLIENT:
+                op += ".client"
+
+            sentry_span.op = op
diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py
index 9d3fe66822..5a2419c267 100644
--- a/sentry_sdk/integrations/pure_eval.py
+++ b/sentry_sdk/integrations/pure_eval.py
@@ -3,12 +3,12 @@
 import ast
 
 from sentry_sdk import Hub, serializer
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.scope import add_global_event_processor
 from sentry_sdk.utils import walk_exception_chain, iter_stacks
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Optional, Dict, Any, Tuple, List
     from types import FrameType
 
@@ -116,7 +116,7 @@ def start(n):
             return (n.lineno, n.col_offset)
 
         nodes_before_stmt = [
-            node for node in nodes if start(node) < stmt.last_token.end
+            node for node in nodes if start(node) < stmt.last_token.end  # type: ignore
         ]
         if nodes_before_stmt:
             # The position of the last node before or in the statement
diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py
new file mode 100644
index 0000000000..59001bb937
--- /dev/null
+++ b/sentry_sdk/integrations/pymongo.py
@@ -0,0 +1,206 @@
+from __future__ import absolute_import
+import copy
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    from pymongo import monitoring
+except ImportError:
+    raise DidNotEnable("Pymongo not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, Dict, Union
+
+    from pymongo.monitoring import (
+        CommandFailedEvent,
+        CommandStartedEvent,
+        CommandSucceededEvent,
+    )
+
+
+SAFE_COMMAND_ATTRIBUTES = [
+    "insert",
+    "ordered",
+    "find",
+    "limit",
+    "singleBatch",
+    "aggregate",
+    "createIndexes",
+    "indexes",
+    "delete",
+    "findAndModify",
+    "renameCollection",
+    "to",
+    "drop",
+]
+
+
+def _strip_pii(command):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    for key in command:
+        is_safe_field = key in SAFE_COMMAND_ATTRIBUTES
+        if is_safe_field:
+            # Skip if safe key
+            continue
+
+        update_db_command = key == "update" and "findAndModify" not in command
+        if update_db_command:
+            # Also skip "update" db command because it is save.
+            # There is also an "update" key in the "findAndModify" command, which is NOT safe!
+            continue
+
+        # Special stripping for documents
+        is_document = key == "documents"
+        if is_document:
+            for doc in command[key]:
+                for doc_key in doc:
+                    doc[doc_key] = "%s"
+            continue
+
+        # Special stripping for dict style fields
+        is_dict_field = key in ["filter", "query", "update"]
+        if is_dict_field:
+            for item_key in command[key]:
+                command[key][item_key] = "%s"
+            continue
+
+        # For pipeline fields strip the `$match` dict
+        is_pipeline_field = key == "pipeline"
+        if is_pipeline_field:
+            for pipeline in command[key]:
+                for match_key in pipeline["$match"] if "$match" in pipeline else []:
+                    pipeline["$match"][match_key] = "%s"
+            continue
+
+        # Default stripping
+        command[key] = "%s"
+
+    return command
+
+
+def _get_db_data(event):
+    # type: (Any) -> Dict[str, Any]
+    data = {}
+
+    data[SPANDATA.DB_SYSTEM] = "mongodb"
+
+    db_name = event.database_name
+    if db_name is not None:
+        data[SPANDATA.DB_NAME] = db_name
+
+    server_address = event.connection_id[0]
+    if server_address is not None:
+        data[SPANDATA.SERVER_ADDRESS] = server_address
+
+    server_port = event.connection_id[1]
+    if server_port is not None:
+        data[SPANDATA.SERVER_PORT] = server_port
+
+    return data
+
+
+class CommandTracer(monitoring.CommandListener):
+    def __init__(self):
+        # type: () -> None
+        self._ongoing_operations = {}  # type: Dict[int, Span]
+
+    def _operation_key(self, event):
+        # type: (Union[CommandFailedEvent, CommandStartedEvent, CommandSucceededEvent]) -> int
+        return event.request_id
+
+    def started(self, event):
+        # type: (CommandStartedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+        with capture_internal_exceptions():
+            command = dict(copy.deepcopy(event.command))
+
+            command.pop("$db", None)
+            command.pop("$clusterTime", None)
+            command.pop("$signature", None)
+
+            op = "db.query"
+
+            tags = {
+                "db.name": event.database_name,
+                SPANDATA.DB_SYSTEM: "mongodb",
+                SPANDATA.DB_OPERATION: event.command_name,
+            }
+
+            try:
+                tags["net.peer.name"] = event.connection_id[0]
+                tags["net.peer.port"] = str(event.connection_id[1])
+            except TypeError:
+                pass
+
+            data = {"operation_ids": {}}  # type: Dict[str, Any]
+            data["operation_ids"]["operation"] = event.operation_id
+            data["operation_ids"]["request"] = event.request_id
+
+            data.update(_get_db_data(event))
+
+            try:
+                lsid = command.pop("lsid")["id"]
+                data["operation_ids"]["session"] = str(lsid)
+            except KeyError:
+                pass
+
+            if not _should_send_default_pii():
+                command = _strip_pii(command)
+
+            query = "{} {}".format(event.command_name, command)
+            span = hub.start_span(op=op, description=query)
+
+            for tag, value in tags.items():
+                span.set_tag(tag, value)
+
+            for key, value in data.items():
+                span.set_data(key, value)
+
+            with capture_internal_exceptions():
+                hub.add_breadcrumb(message=query, category="query", type=op, data=tags)
+
+            self._ongoing_operations[self._operation_key(event)] = span.__enter__()
+
+    def failed(self, event):
+        # type: (CommandFailedEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("internal_error")
+            span.__exit__(None, None, None)
+        except KeyError:
+            return
+
+    def succeeded(self, event):
+        # type: (CommandSucceededEvent) -> None
+        hub = Hub.current
+        if hub.get_integration(PyMongoIntegration) is None:
+            return
+
+        try:
+            span = self._ongoing_operations.pop(self._operation_key(event))
+            span.set_status("ok")
+            span.__exit__(None, None, None)
+        except KeyError:
+            pass
+
+
+class PyMongoIntegration(Integration):
+    identifier = "pymongo"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        monitoring.register(CommandTracer())
diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py
index 07142254d2..3b9b2fdb96 100644
--- a/sentry_sdk/integrations/pyramid.py
+++ b/sentry_sdk/integrations/pyramid.py
@@ -5,7 +5,12 @@
 import weakref
 
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
 from sentry_sdk._compat import reraise, iteritems
 
 from sentry_sdk.integrations import Integration, DidNotEnable
@@ -18,9 +23,9 @@
 except ImportError:
     raise DidNotEnable("Pyramid not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from pyramid.response import Response
     from typing import Any
     from sentry_sdk.integrations.wsgi import _ScopedResponse
@@ -31,7 +36,7 @@
     from webob.compat import cgi_FieldStorage  # type: ignore
 
     from sentry_sdk.utils import ExcInfo
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
 
 
 if getattr(Request, "authenticated_userid", None):
@@ -51,7 +56,7 @@ def authenticated_userid(request):
 class PyramidIntegration(Integration):
     identifier = "pyramid"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="route_name"):
         # type: (str) -> None
@@ -76,14 +81,9 @@ def sentry_patched_call_view(registry, request, *args, **kwargs):
 
             if integration is not None:
                 with hub.configure_scope() as scope:
-                    try:
-                        if integration.transaction_style == "route_name":
-                            scope.transaction = request.matched_route.name
-                        elif integration.transaction_style == "route_pattern":
-                            scope.transaction = request.matched_route.pattern
-                    except Exception:
-                        pass
-
+                    _set_transaction_name_and_source(
+                        scope, integration.transaction_style, request
+                    )
                     scope.add_event_processor(
                         _make_event_processor(weakref.ref(request), integration)
                     )
@@ -156,6 +156,21 @@ def _capture_exception(exc_info):
     hub.capture_event(event, hint=hint)
 
 
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+    try:
+        name_for_style = {
+            "route_name": request.matched_route.name,
+            "route_pattern": request.matched_route.pattern,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
 class PyramidRequestExtractor(RequestExtractor):
     def url(self):
         # type: () -> str
@@ -200,8 +215,8 @@ def size_of_file(self, postdata):
 
 def _make_event_processor(weak_request, integration):
     # type: (Callable[[], Request], PyramidIntegration) -> EventProcessor
-    def event_processor(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+    def pyramid_event_processor(event, hint):
+        # type: (Event, Dict[str, Any]) -> Event
         request = weak_request()
         if request is None:
             return event
@@ -216,4 +231,4 @@ def event_processor(event, hint):
 
         return event
 
-    return event_processor
+    return pyramid_event_processor
diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py
index 411817c708..8803fa7cea 100644
--- a/sentry_sdk/integrations/quart.py
+++ b/sentry_sdk/integrations/quart.py
@@ -1,19 +1,28 @@
 from __future__ import absolute_import
 
+import asyncio
+import inspect
+import threading
+
 from sentry_sdk.hub import _should_send_default_pii, Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing import SOURCE_FOR_STYLE
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+)
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._functools import wraps
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
-    from typing import Dict
     from typing import Union
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
 
 try:
     import quart_auth  # type: ignore
@@ -22,11 +31,12 @@
 
 try:
     from quart import (  # type: ignore
+        has_request_context,
+        has_websocket_context,
         Request,
         Quart,
-        _request_ctx_stack,
-        _websocket_ctx_stack,
-        _app_ctx_stack,
+        request,
+        websocket,
     )
     from quart.signals import (  # type: ignore
         got_background_exception,
@@ -37,6 +47,12 @@
     )
 except ImportError:
     raise DidNotEnable("Quart is not installed")
+else:
+    # Quart 0.19 is based on Flask and hence no longer has a Scaffold
+    try:
+        from quart.scaffold import Scaffold  # type: ignore
+    except ImportError:
+        from flask.sansio.scaffold import Scaffold  # type: ignore
 
 TRANSACTION_STYLE_VALUES = ("endpoint", "url")
 
@@ -44,7 +60,7 @@
 class QuartIntegration(Integration):
     identifier = "quart"
 
-    transaction_style = None
+    transaction_style = ""
 
     def __init__(self, transaction_style="endpoint"):
         # type: (str) -> None
@@ -65,43 +81,100 @@ def setup_once():
         got_request_exception.connect(_capture_exception)
         got_websocket_exception.connect(_capture_exception)
 
-        old_app = Quart.__call__
+        patch_asgi_app()
+        patch_scaffold_route()
+
+
+def patch_asgi_app():
+    # type: () -> None
+    old_app = Quart.__call__
+
+    async def sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Any, Any, Any, Any) -> Any
+        if Hub.current.get_integration(QuartIntegration) is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Quart.__call__ = sentry_patched_asgi_app
+
+
+def patch_scaffold_route():
+    # type: () -> None
+    old_route = Scaffold.route
+
+    def _sentry_route(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        old_decorator = old_route(*args, **kwargs)
+
+        def decorator(old_func):
+            # type: (Any) -> Any
 
-        async def sentry_patched_asgi_app(self, scope, receive, send):
-            # type: (Any, Any, Any, Any) -> Any
-            if Hub.current.get_integration(QuartIntegration) is None:
-                return await old_app(self, scope, receive, send)
+            if inspect.isfunction(old_func) and not asyncio.iscoroutinefunction(
+                old_func
+            ):
 
-            middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
-            middleware.__call__ = middleware._run_asgi3
-            return await middleware(scope, receive, send)
+                @wraps(old_func)
+                def _sentry_func(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    integration = hub.get_integration(QuartIntegration)
+                    if integration is None:
+                        return old_func(*args, **kwargs)
 
-        Quart.__call__ = sentry_patched_asgi_app
+                    with hub.configure_scope() as sentry_scope:
+                        if sentry_scope.profile is not None:
+                            sentry_scope.profile.active_thread_id = (
+                                threading.current_thread().ident
+                            )
 
+                        return old_func(*args, **kwargs)
 
-def _request_websocket_started(sender, **kwargs):
+                return old_decorator(_sentry_func)
+
+            return old_decorator(old_func)
+
+        return decorator
+
+    Scaffold.route = _sentry_route
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (Scope, str, Request) -> None
+
+    try:
+        name_for_style = {
+            "url": request.url_rule.rule,
+            "endpoint": request.url_rule.endpoint,
+        }
+        scope.set_transaction_name(
+            name_for_style[transaction_style],
+            source=SOURCE_FOR_STYLE[transaction_style],
+        )
+    except Exception:
+        pass
+
+
+async def _request_websocket_started(app, **kwargs):
     # type: (Quart, **Any) -> None
     hub = Hub.current
     integration = hub.get_integration(QuartIntegration)
     if integration is None:
         return
 
-    app = _app_ctx_stack.top.app
     with hub.configure_scope() as scope:
-        if _request_ctx_stack.top is not None:
-            request_websocket = _request_ctx_stack.top.request
-        if _websocket_ctx_stack.top is not None:
-            request_websocket = _websocket_ctx_stack.top.websocket
+        if has_request_context():
+            request_websocket = request._get_current_object()
+        if has_websocket_context():
+            request_websocket = websocket._get_current_object()
 
         # Set the transaction name here, but rely on ASGI middleware
         # to actually start the transaction
-        try:
-            if integration.transaction_style == "endpoint":
-                scope.transaction = request_websocket.url_rule.endpoint
-            elif integration.transaction_style == "url":
-                scope.transaction = request_websocket.url_rule.rule
-        except Exception:
-            pass
+        _set_transaction_name_and_source(
+            scope, integration.transaction_style, request_websocket
+        )
 
         evt_processor = _make_request_event_processor(
             app, request_websocket, integration
@@ -112,7 +185,7 @@ def _request_websocket_started(sender, **kwargs):
 def _make_request_event_processor(app, request, integration):
     # type: (Quart, Request, QuartIntegration) -> EventProcessor
     def inner(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # type: (Event, dict[str, Any]) -> Event
         # if the request is gone we are fine not logging the data from
         # it.  This might happen if the processor is pushed away to
         # another thread.
@@ -138,7 +211,7 @@ def inner(event, hint):
     return inner
 
 
-def _capture_exception(sender, exception, **kwargs):
+async def _capture_exception(sender, exception, **kwargs):
     # type: (Quart, Union[ValueError, BaseException], **Any) -> None
     hub = Hub.current
     if hub.get_integration(QuartIntegration) is None:
@@ -157,7 +230,7 @@ def _capture_exception(sender, exception, **kwargs):
 
 
 def _add_user_to_event(event):
-    # type: (Dict[str, Any]) -> None
+    # type: (Event) -> None
     if quart_auth is None:
         return
 
diff --git a/sentry_sdk/integrations/redis.py b/sentry_sdk/integrations/redis.py
deleted file mode 100644
index df7cbae7bb..0000000000
--- a/sentry_sdk/integrations/redis.py
+++ /dev/null
@@ -1,106 +0,0 @@
-from __future__ import absolute_import
-
-from sentry_sdk import Hub
-from sentry_sdk.utils import capture_internal_exceptions, logger
-from sentry_sdk.integrations import Integration, DidNotEnable
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
-    from typing import Any
-
-_SINGLE_KEY_COMMANDS = frozenset(
-    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"]
-)
-_MULTI_KEY_COMMANDS = frozenset(["del", "touch", "unlink"])
-
-
-def _patch_rediscluster():
-    # type: () -> None
-    try:
-        import rediscluster  # type: ignore
-    except ImportError:
-        return
-
-    patch_redis_client(rediscluster.RedisCluster)
-
-    # up to v1.3.6, __version__ attribute is a tuple
-    # from v2.0.0, __version__ is a string and VERSION a tuple
-    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
-
-    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
-    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
-    if (0, 2, 0) < version < (2, 0, 0):
-        patch_redis_client(rediscluster.StrictRedisCluster)
-
-
-class RedisIntegration(Integration):
-    identifier = "redis"
-
-    @staticmethod
-    def setup_once():
-        # type: () -> None
-        try:
-            import redis
-        except ImportError:
-            raise DidNotEnable("Redis client not installed")
-
-        patch_redis_client(redis.StrictRedis)
-
-        try:
-            import rb.clients  # type: ignore
-        except ImportError:
-            pass
-        else:
-            patch_redis_client(rb.clients.FanoutClient)
-            patch_redis_client(rb.clients.MappingClient)
-            patch_redis_client(rb.clients.RoutingClient)
-
-        try:
-            _patch_rediscluster()
-        except Exception:
-            logger.exception("Error occurred while patching `rediscluster` library")
-
-
-def patch_redis_client(cls):
-    # type: (Any) -> None
-    """
-    This function can be used to instrument custom redis client classes or
-    subclasses.
-    """
-
-    old_execute_command = cls.execute_command
-
-    def sentry_patched_execute_command(self, name, *args, **kwargs):
-        # type: (Any, str, *Any, **Any) -> Any
-        hub = Hub.current
-
-        if hub.get_integration(RedisIntegration) is None:
-            return old_execute_command(self, name, *args, **kwargs)
-
-        description = name
-
-        with capture_internal_exceptions():
-            description_parts = [name]
-            for i, arg in enumerate(args):
-                if i > 10:
-                    break
-
-                description_parts.append(repr(arg))
-
-            description = " ".join(description_parts)
-
-        with hub.start_span(op="redis", description=description) as span:
-            if name:
-                span.set_tag("redis.command", name)
-
-            if name and args:
-                name_low = name.lower()
-                if (name_low in _SINGLE_KEY_COMMANDS) or (
-                    name_low in _MULTI_KEY_COMMANDS and len(args) == 1
-                ):
-                    span.set_tag("redis.key", args[0])
-
-            return old_execute_command(self, name, *args, **kwargs)
-
-    cls.execute_command = sentry_patched_execute_command
diff --git a/sentry_sdk/integrations/redis/__init__.py b/sentry_sdk/integrations/redis/__init__.py
new file mode 100644
index 0000000000..e09f9ccea4
--- /dev/null
+++ b/sentry_sdk/integrations/redis/__init__.py
@@ -0,0 +1,387 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP, SPANDATA
+from sentry_sdk._compat import text_type
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    logger,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Dict, Sequence
+    from redis import Redis, RedisCluster
+    from redis.asyncio.cluster import (
+        RedisCluster as AsyncRedisCluster,
+        ClusterPipeline as AsyncClusterPipeline,
+    )
+    from sentry_sdk.tracing import Span
+
+_SINGLE_KEY_COMMANDS = frozenset(
+    ["decr", "decrby", "get", "incr", "incrby", "pttl", "set", "setex", "setnx", "ttl"],
+)
+_MULTI_KEY_COMMANDS = frozenset(
+    ["del", "touch", "unlink"],
+)
+_COMMANDS_INCLUDING_SENSITIVE_DATA = [
+    "auth",
+]
+_MAX_NUM_ARGS = 10  # Trim argument lists to this many values
+_MAX_NUM_COMMANDS = 10  # Trim command lists to this many values
+_DEFAULT_MAX_DATA_SIZE = 1024
+
+
+def _get_safe_command(name, args):
+    # type: (str, Sequence[Any]) -> str
+    command_parts = [name]
+
+    for i, arg in enumerate(args):
+        if i > _MAX_NUM_ARGS:
+            break
+
+        name_low = name.lower()
+
+        if name_low in _COMMANDS_INCLUDING_SENSITIVE_DATA:
+            command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+            continue
+
+        arg_is_the_key = i == 0
+        if arg_is_the_key:
+            command_parts.append(repr(arg))
+
+        else:
+            if _should_send_default_pii():
+                command_parts.append(repr(arg))
+            else:
+                command_parts.append(SENSITIVE_DATA_SUBSTITUTE)
+
+    command = " ".join(command_parts)
+    return command
+
+
+def _get_span_description(name, *args):
+    # type: (str, *Any) -> str
+    description = name
+
+    with capture_internal_exceptions():
+        description = _get_safe_command(name, args)
+
+    return description
+
+
+def _get_redis_command_args(command):
+    # type: (Any) -> Sequence[Any]
+    return command[0]
+
+
+def _parse_rediscluster_command(command):
+    # type: (Any) -> Sequence[Any]
+    return command.args
+
+
+def _set_pipeline_data(
+    span, is_cluster, get_command_args_fn, is_transaction, command_stack
+):
+    # type: (Span, bool, Any, bool, Sequence[Any]) -> None
+    span.set_tag("redis.is_cluster", is_cluster)
+    span.set_tag("redis.transaction", is_transaction)
+
+    commands = []
+    for i, arg in enumerate(command_stack):
+        if i >= _MAX_NUM_COMMANDS:
+            break
+
+        command = get_command_args_fn(arg)
+        commands.append(_get_safe_command(command[0], command[1:]))
+
+    span.set_data(
+        "redis.commands",
+        {
+            "count": len(command_stack),
+            "first_ten": commands,
+        },
+    )
+
+
+def _set_client_data(span, is_cluster, name, *args):
+    # type: (Span, bool, str, *Any) -> None
+    span.set_tag("redis.is_cluster", is_cluster)
+    if name:
+        span.set_tag("redis.command", name)
+        span.set_tag(SPANDATA.DB_OPERATION, name)
+
+    if name and args:
+        name_low = name.lower()
+        if (name_low in _SINGLE_KEY_COMMANDS) or (
+            name_low in _MULTI_KEY_COMMANDS and len(args) == 1
+        ):
+            span.set_tag("redis.key", args[0])
+
+
+def _set_db_data_on_span(span, connection_params):
+    # type: (Span, Dict[str, Any]) -> None
+    span.set_data(SPANDATA.DB_SYSTEM, "redis")
+
+    db = connection_params.get("db")
+    if db is not None:
+        span.set_data(SPANDATA.DB_NAME, text_type(db))
+
+    host = connection_params.get("host")
+    if host is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, host)
+
+    port = connection_params.get("port")
+    if port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, port)
+
+
+def _set_db_data(span, redis_instance):
+    # type: (Span, Redis[Any]) -> None
+    try:
+        _set_db_data_on_span(span, redis_instance.connection_pool.connection_kwargs)
+    except AttributeError:
+        pass  # connections_kwargs may be missing in some cases
+
+
+def _set_cluster_db_data(span, redis_cluster_instance):
+    # type: (Span, RedisCluster[Any]) -> None
+    default_node = redis_cluster_instance.get_default_node()
+    if default_node is not None:
+        _set_db_data_on_span(
+            span, {"host": default_node.host, "port": default_node.port}
+        )
+
+
+def _set_async_cluster_db_data(span, async_redis_cluster_instance):
+    # type: (Span, AsyncRedisCluster[Any]) -> None
+    default_node = async_redis_cluster_instance.get_default_node()
+    if default_node is not None and default_node.connection_kwargs is not None:
+        _set_db_data_on_span(span, default_node.connection_kwargs)
+
+
+def _set_async_cluster_pipeline_db_data(span, async_redis_cluster_pipeline_instance):
+    # type: (Span, AsyncClusterPipeline[Any]) -> None
+    with capture_internal_exceptions():
+        _set_async_cluster_db_data(
+            span,
+            # the AsyncClusterPipeline has always had a `_client` attr but it is private so potentially problematic and mypy
+            # does not recognize it - see https://github.com/redis/redis-py/blame/v5.0.0/redis/asyncio/cluster.py#L1386
+            async_redis_cluster_pipeline_instance._client,  # type: ignore[attr-defined]
+        )
+
+
+def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn):
+    # type: (Any, bool, Any, Callable[[Span, Any], None]) -> None
+    old_execute = pipeline_cls.execute
+
+    def sentry_patched_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return old_execute(self, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
+            with capture_internal_exceptions():
+                set_db_data_fn(span, self)
+                _set_pipeline_data(
+                    span,
+                    is_cluster,
+                    get_command_args_fn,
+                    False if is_cluster else self.transaction,
+                    self.command_stack,
+                )
+
+            return old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = sentry_patched_execute
+
+
+def patch_redis_client(cls, is_cluster, set_db_data_fn):
+    # type: (Any, bool, Callable[[Span, Any], None]) -> None
+    """
+    This function can be used to instrument custom redis client classes or
+    subclasses.
+    """
+    old_execute_command = cls.execute_command
+
+    def sentry_patched_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        hub = Hub.current
+        integration = hub.get_integration(RedisIntegration)
+
+        if integration is None:
+            return old_execute_command(self, name, *args, **kwargs)
+
+        description = _get_span_description(name, *args)
+
+        data_should_be_truncated = (
+            integration.max_data_size and len(description) > integration.max_data_size
+        )
+        if data_should_be_truncated:
+            description = description[: integration.max_data_size - len("...")] + "..."
+
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+            set_db_data_fn(span, self)
+            _set_client_data(span, is_cluster, name, *args)
+
+            return old_execute_command(self, name, *args, **kwargs)
+
+    cls.execute_command = sentry_patched_execute_command
+
+
+def _patch_redis(StrictRedis, client):  # noqa: N803
+    # type: (Any, Any) -> None
+    patch_redis_client(StrictRedis, is_cluster=False, set_db_data_fn=_set_db_data)
+    patch_redis_pipeline(client.Pipeline, False, _get_redis_command_args, _set_db_data)
+    try:
+        strict_pipeline = client.StrictPipeline
+    except AttributeError:
+        pass
+    else:
+        patch_redis_pipeline(
+            strict_pipeline, False, _get_redis_command_args, _set_db_data
+        )
+
+    try:
+        import redis.asyncio
+    except ImportError:
+        pass
+    else:
+        from sentry_sdk.integrations.redis.asyncio import (
+            patch_redis_async_client,
+            patch_redis_async_pipeline,
+        )
+
+        patch_redis_async_client(
+            redis.asyncio.client.StrictRedis,
+            is_cluster=False,
+            set_db_data_fn=_set_db_data,
+        )
+        patch_redis_async_pipeline(
+            redis.asyncio.client.Pipeline,
+            False,
+            _get_redis_command_args,
+            set_db_data_fn=_set_db_data,
+        )
+
+
+def _patch_redis_cluster():
+    # type: () -> None
+    """Patches the cluster module on redis SDK (as opposed to rediscluster library)"""
+    try:
+        from redis import RedisCluster, cluster
+    except ImportError:
+        pass
+    else:
+        patch_redis_client(RedisCluster, True, _set_cluster_db_data)
+        patch_redis_pipeline(
+            cluster.ClusterPipeline,
+            True,
+            _parse_rediscluster_command,
+            _set_cluster_db_data,
+        )
+
+    try:
+        from redis.asyncio import cluster as async_cluster
+    except ImportError:
+        pass
+    else:
+        from sentry_sdk.integrations.redis.asyncio import (
+            patch_redis_async_client,
+            patch_redis_async_pipeline,
+        )
+
+        patch_redis_async_client(
+            async_cluster.RedisCluster,
+            is_cluster=True,
+            set_db_data_fn=_set_async_cluster_db_data,
+        )
+        patch_redis_async_pipeline(
+            async_cluster.ClusterPipeline,
+            True,
+            _parse_rediscluster_command,
+            set_db_data_fn=_set_async_cluster_pipeline_db_data,
+        )
+
+
+def _patch_rb():
+    # type: () -> None
+    try:
+        import rb.clients  # type: ignore
+    except ImportError:
+        pass
+    else:
+        patch_redis_client(
+            rb.clients.FanoutClient, is_cluster=False, set_db_data_fn=_set_db_data
+        )
+        patch_redis_client(
+            rb.clients.MappingClient, is_cluster=False, set_db_data_fn=_set_db_data
+        )
+        patch_redis_client(
+            rb.clients.RoutingClient, is_cluster=False, set_db_data_fn=_set_db_data
+        )
+
+
+def _patch_rediscluster():
+    # type: () -> None
+    try:
+        import rediscluster  # type: ignore
+    except ImportError:
+        return
+
+    patch_redis_client(
+        rediscluster.RedisCluster, is_cluster=True, set_db_data_fn=_set_db_data
+    )
+
+    # up to v1.3.6, __version__ attribute is a tuple
+    # from v2.0.0, __version__ is a string and VERSION a tuple
+    version = getattr(rediscluster, "VERSION", rediscluster.__version__)
+
+    # StrictRedisCluster was introduced in v0.2.0 and removed in v2.0.0
+    # https://github.com/Grokzen/redis-py-cluster/blob/master/docs/release-notes.rst
+    if (0, 2, 0) < version < (2, 0, 0):
+        pipeline_cls = rediscluster.pipeline.StrictClusterPipeline
+        patch_redis_client(
+            rediscluster.StrictRedisCluster,
+            is_cluster=True,
+            set_db_data_fn=_set_db_data,
+        )
+    else:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+
+    patch_redis_pipeline(
+        pipeline_cls, True, _parse_rediscluster_command, set_db_data_fn=_set_db_data
+    )
+
+
+class RedisIntegration(Integration):
+    identifier = "redis"
+
+    def __init__(self, max_data_size=_DEFAULT_MAX_DATA_SIZE):
+        # type: (int) -> None
+        self.max_data_size = max_data_size
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        try:
+            from redis import StrictRedis, client
+        except ImportError:
+            raise DidNotEnable("Redis client not installed")
+
+        _patch_redis(StrictRedis, client)
+        _patch_redis_cluster()
+        _patch_rb()
+
+        try:
+            _patch_rediscluster()
+        except Exception:
+            logger.exception("Error occurred while patching `rediscluster` library")
diff --git a/sentry_sdk/integrations/redis/asyncio.py b/sentry_sdk/integrations/redis/asyncio.py
new file mode 100644
index 0000000000..09fad3426a
--- /dev/null
+++ b/sentry_sdk/integrations/redis/asyncio.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+
+from sentry_sdk import Hub
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.redis import (
+    RedisIntegration,
+    _get_span_description,
+    _set_client_data,
+    _set_pipeline_data,
+)
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.tracing import Span
+from sentry_sdk.utils import capture_internal_exceptions
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Union
+    from redis.asyncio.client import Pipeline, StrictRedis
+    from redis.asyncio.cluster import ClusterPipeline, RedisCluster
+
+
+def patch_redis_async_pipeline(
+    pipeline_cls, is_cluster, get_command_args_fn, set_db_data_fn
+):
+    # type: (Union[type[Pipeline[Any]], type[ClusterPipeline[Any]]], bool, Any, Callable[[Span, Any], None]) -> None
+    old_execute = pipeline_cls.execute
+
+    async def _sentry_execute(self, *args, **kwargs):
+        # type: (Any, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return await old_execute(self, *args, **kwargs)
+
+        with hub.start_span(
+            op=OP.DB_REDIS, description="redis.pipeline.execute"
+        ) as span:
+            with capture_internal_exceptions():
+                set_db_data_fn(span, self)
+                _set_pipeline_data(
+                    span,
+                    is_cluster,
+                    get_command_args_fn,
+                    False if is_cluster else self.is_transaction,
+                    self._command_stack if is_cluster else self.command_stack,
+                )
+
+            return await old_execute(self, *args, **kwargs)
+
+    pipeline_cls.execute = _sentry_execute  # type: ignore[method-assign]
+
+
+def patch_redis_async_client(cls, is_cluster, set_db_data_fn):
+    # type: (Union[type[StrictRedis[Any]], type[RedisCluster[Any]]], bool, Callable[[Span, Any], None]) -> None
+    old_execute_command = cls.execute_command
+
+    async def _sentry_execute_command(self, name, *args, **kwargs):
+        # type: (Any, str, *Any, **Any) -> Any
+        hub = Hub.current
+
+        if hub.get_integration(RedisIntegration) is None:
+            return await old_execute_command(self, name, *args, **kwargs)
+
+        description = _get_span_description(name, *args)
+
+        with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+            set_db_data_fn(span, self)
+            _set_client_data(span, is_cluster, name, *args)
+
+            return await old_execute_command(self, name, *args, **kwargs)
+
+    cls.execute_command = _sentry_execute_command  # type: ignore[method-assign]
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index f4c77d7df2..2b32e59880 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -1,27 +1,35 @@
 from __future__ import absolute_import
 
 import weakref
+from sentry_sdk.consts import OP
 
+from sentry_sdk.api import continue_trace
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import DidNotEnable, Integration
 from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.tracing import Transaction
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    format_timestamp,
+    parse_version,
+)
 
 try:
     from rq.queue import Queue
     from rq.timeouts import JobTimeoutException
     from rq.version import VERSION as RQ_VERSION
     from rq.worker import Worker
+    from rq.job import JobStatus
 except ImportError:
     raise DidNotEnable("RQ not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
-    from typing import Any, Callable, Dict
+if TYPE_CHECKING:
+    from typing import Any, Callable
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
     from sentry_sdk.utils import ExcInfo
 
     from rq.job import Job
@@ -34,9 +42,9 @@ class RqIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, RQ_VERSION.split(".")[:3]))
-        except (ValueError, TypeError):
+        version = parse_version(RQ_VERSION)
+
+        if version is None:
             raise DidNotEnable("Unparsable RQ version: {}".format(RQ_VERSION))
 
         if version < (0, 6):
@@ -59,10 +67,11 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
                 scope.clear_breadcrumbs()
                 scope.add_event_processor(_make_event_processor(weakref.ref(job)))
 
-                transaction = Transaction.continue_from_headers(
+                transaction = continue_trace(
                     job.meta.get("_sentry_trace_headers") or {},
-                    op="rq.task",
+                    op=OP.QUEUE_TASK_RQ,
                     name="unknown RQ task",
+                    source=TRANSACTION_SOURCE_TASK,
                 )
 
                 with capture_internal_exceptions():
@@ -87,8 +96,10 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
 
         def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
             # type: (Worker, Any, *Any, **Any) -> Any
-            if job.is_failed:
-                _capture_exception(exc_info)  # type: ignore
+            # Note, the order of the `or` here is important,
+            # because calling `job.is_failed` will change `_status`.
+            if job._status == JobStatus.FAILED or job.is_failed:
+                _capture_exception(exc_info)
 
             return old_handle_exception(self, job, *exc_info, **kwargs)
 
@@ -100,9 +111,10 @@ def sentry_patched_enqueue_job(self, job, **kwargs):
             # type: (Queue, Any, **Any) -> Any
             hub = Hub.current
             if hub.get_integration(RqIntegration) is not None:
-                job.meta["_sentry_trace_headers"] = dict(
-                    hub.iter_trace_propagation_headers()
-                )
+                if hub.scope.span is not None:
+                    job.meta["_sentry_trace_headers"] = dict(
+                        hub.iter_trace_propagation_headers()
+                    )
 
             return old_enqueue_job(self, job, **kwargs)
 
@@ -114,12 +126,12 @@ def sentry_patched_enqueue_job(self, job, **kwargs):
 def _make_event_processor(weak_job):
     # type: (Callable[[], Job]) -> EventProcessor
     def event_processor(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # type: (Event, dict[str, Any]) -> Event
         job = weak_job()
         if job is not None:
             with capture_internal_exceptions():
                 extra = event.setdefault("extra", {})
-                extra["rq-job"] = {
+                rq_job = {
                     "job_id": job.id,
                     "func": job.func_name,
                     "args": job.args,
@@ -127,6 +139,13 @@ def event_processor(event, hint):
                     "description": job.description,
                 }
 
+                if job.enqueued_at:
+                    rq_job["enqueued_at"] = format_timestamp(job.enqueued_at)
+                if job.started_at:
+                    rq_job["started_at"] = format_timestamp(job.started_at)
+
+                extra["rq-job"] = rq_job
+
         if "exc_info" in hint:
             with capture_internal_exceptions():
                 if issubclass(hint["exc_info"][0], JobTimeoutException):
diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py
index 4e20cc9ece..7e0c690da0 100644
--- a/sentry_sdk/integrations/sanic.py
+++ b/sentry_sdk/integrations/sanic.py
@@ -2,21 +2,26 @@
 import weakref
 from inspect import isawaitable
 
+from sentry_sdk import continue_trace
 from sentry_sdk._compat import urlparse, reraise
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL
 from sentry_sdk.utils import (
     capture_internal_exceptions,
     event_from_exception,
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
+    parse_version,
 )
 from sentry_sdk.integrations import Integration, DidNotEnable
 from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
 from sentry_sdk.integrations.logging import ignore_logger
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
+    from collections.abc import Container
     from typing import Any
     from typing import Callable
     from typing import Optional
@@ -25,6 +30,7 @@
     from typing import Dict
 
     from sanic.request import Request, RequestParameters
+    from sanic.response import BaseHTTPResponse
 
     from sentry_sdk._types import Event, EventProcessor, Hint
     from sanic.router import Route
@@ -50,15 +56,25 @@
 
 class SanicIntegration(Integration):
     identifier = "sanic"
-    version = (0, 0)  # type: Tuple[int, ...]
+    version = None
+
+    def __init__(self, unsampled_statuses=frozenset({404})):
+        # type: (Optional[Container[int]]) -> None
+        """
+        The unsampled_statuses parameter can be used to specify for which HTTP statuses the
+        transactions should not be sent to Sentry. By default, transactions are sent for all
+        HTTP statuses, except 404. Set unsampled_statuses to None to send transactions for all
+        HTTP statuses, including 404.
+        """
+        self._unsampled_statuses = unsampled_statuses or set()
 
     @staticmethod
     def setup_once():
         # type: () -> None
 
-        try:
-            SanicIntegration.version = tuple(map(int, SANIC_VERSION.split(".")))
-        except (TypeError, ValueError):
+        SanicIntegration.version = parse_version(SANIC_VERSION)
+
+        if SanicIntegration.version is None:
             raise DidNotEnable("Unparsable Sanic version: {}".format(SANIC_VERSION))
 
         if SanicIntegration.version < (0, 8):
@@ -178,20 +194,51 @@ async def _hub_enter(request):
         scope.clear_breadcrumbs()
         scope.add_event_processor(_make_request_processor(weak_request))
 
+    transaction = continue_trace(
+        dict(request.headers),
+        op=OP.HTTP_SERVER,
+        # Unless the request results in a 404 error, the name and source will get overwritten in _set_transaction
+        name=request.path,
+        source=TRANSACTION_SOURCE_URL,
+    )
+    request.ctx._sentry_transaction = request.ctx._sentry_hub.start_transaction(
+        transaction
+    ).__enter__()
+
 
-async def _hub_exit(request, **_):
-    # type: (Request, **Any) -> None
-    request.ctx._sentry_hub.__exit__(None, None, None)
+async def _hub_exit(request, response=None):
+    # type: (Request, Optional[BaseHTTPResponse]) -> None
+    with capture_internal_exceptions():
+        if not request.ctx._sentry_do_integration:
+            return
 
+        integration = Hub.current.get_integration(SanicIntegration)  # type: Integration
 
-async def _set_transaction(request, route, **kwargs):
+        response_status = None if response is None else response.status
+
+        # This capture_internal_exceptions block has been intentionally nested here, so that in case an exception
+        # happens while trying to end the transaction, we still attempt to exit the hub.
+        with capture_internal_exceptions():
+            request.ctx._sentry_transaction.set_http_status(response_status)
+            request.ctx._sentry_transaction.sampled &= (
+                isinstance(integration, SanicIntegration)
+                and response_status not in integration._unsampled_statuses
+            )
+            request.ctx._sentry_transaction.__exit__(None, None, None)
+
+        request.ctx._sentry_hub.__exit__(None, None, None)
+
+
+async def _set_transaction(request, route, **_):
     # type: (Request, Route, **Any) -> None
     hub = Hub.current
-    if hub.get_integration(SanicIntegration) is not None:
+    if request.ctx._sentry_do_integration:
         with capture_internal_exceptions():
             with hub.configure_scope() as scope:
                 route_name = route.name.replace(request.app.name, "").strip(".")
-                scope.transaction = route_name
+                scope.set_transaction_name(
+                    route_name, source=TRANSACTION_SOURCE_COMPONENT
+                )
 
 
 def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
@@ -222,7 +269,7 @@ async def sentry_wrapped_error_handler(request, exception):
         finally:
             # As mentioned in previous comment in _startup, this can be removed
             # after https://github.com/sanic-org/sanic/issues/2297 is resolved
-            if SanicIntegration.version == (21, 9):
+            if SanicIntegration.version and SanicIntegration.version == (21, 9):
                 await _hub_exit(request)
 
     return sentry_wrapped_error_handler
@@ -268,9 +315,14 @@ def _legacy_router_get(self, *args):
                         # Format: app_name.route_name
                         sanic_route = sanic_route[len(sanic_app_name) + 1 :]
 
-                    scope.transaction = sanic_route
+                    scope.set_transaction_name(
+                        sanic_route, source=TRANSACTION_SOURCE_COMPONENT
+                    )
                 else:
-                    scope.transaction = rv[0].__name__
+                    scope.set_transaction_name(
+                        rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT
+                    )
+
     return rv
 
 
@@ -290,6 +342,8 @@ def _capture_exception(exception):
             client_options=client.options,
             mechanism={"type": "sanic", "handled": False},
         )
+        if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet:
+            return
         hub.capture_event(event, hint=hint)
 
 
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
index c46f8cee31..534034547a 100644
--- a/sentry_sdk/integrations/serverless.py
+++ b/sentry_sdk/integrations/serverless.py
@@ -6,9 +6,9 @@
 from sentry_sdk._functools import wraps
 
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import TypeVar
@@ -27,7 +27,7 @@ def overload(x):
 
 
 @overload
-def serverless_function(f, flush=True):  # noqa: F811
+def serverless_function(f, flush=True):
     # type: (F, bool) -> F
     pass
 
diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py
new file mode 100644
index 0000000000..7a4e358185
--- /dev/null
+++ b/sentry_sdk/integrations/socket.py
@@ -0,0 +1,91 @@
+from __future__ import absolute_import
+
+import socket
+from sentry_sdk import Hub
+from sentry_sdk._types import MYPY
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration
+
+if MYPY:
+    from socket import AddressFamily, SocketKind
+    from typing import Tuple, Optional, Union, List
+
+__all__ = ["SocketIntegration"]
+
+
+class SocketIntegration(Integration):
+    identifier = "socket"
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        """
+        patches two of the most used functions of socket: create_connection and getaddrinfo(dns resolver)
+        """
+        _patch_create_connection()
+        _patch_getaddrinfo()
+
+
+def _get_span_description(host, port):
+    # type: (Union[bytes, str, None], Union[str, int, None]) -> str
+
+    try:
+        host = host.decode()  # type: ignore
+    except (UnicodeDecodeError, AttributeError):
+        pass
+
+    description = "%s:%s" % (host, port)  # type: ignore
+
+    return description
+
+
+def _patch_create_connection():
+    # type: () -> None
+    real_create_connection = socket.create_connection
+
+    def create_connection(
+        address,
+        timeout=socket._GLOBAL_DEFAULT_TIMEOUT,  # type: ignore
+        source_address=None,
+    ):
+        # type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket
+        hub = Hub.current
+        if hub.get_integration(SocketIntegration) is None:
+            return real_create_connection(
+                address=address, timeout=timeout, source_address=source_address
+            )
+
+        with hub.start_span(
+            op=OP.SOCKET_CONNECTION,
+            description=_get_span_description(address[0], address[1]),
+        ) as span:
+            span.set_data("address", address)
+            span.set_data("timeout", timeout)
+            span.set_data("source_address", source_address)
+
+            return real_create_connection(
+                address=address, timeout=timeout, source_address=source_address
+            )
+
+    socket.create_connection = create_connection  # type: ignore
+
+
+def _patch_getaddrinfo():
+    # type: () -> None
+    real_getaddrinfo = socket.getaddrinfo
+
+    def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
+        # type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]
+        hub = Hub.current
+        if hub.get_integration(SocketIntegration) is None:
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+        with hub.start_span(
+            op=OP.SOCKET_DNS, description=_get_span_description(host, port)
+        ) as span:
+            span.set_data("host", host)
+            span.set_data("port", port)
+
+            return real_getaddrinfo(host, port, family, type, proto, flags)
+
+    socket.getaddrinfo = getaddrinfo  # type: ignore
diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py
index ea43c37821..b3085fc4af 100644
--- a/sentry_sdk/integrations/spark/spark_driver.py
+++ b/sentry_sdk/integrations/spark/spark_driver.py
@@ -3,9 +3,9 @@
 from sentry_sdk.integrations import Integration
 from sentry_sdk.utils import capture_internal_exceptions
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py
index 2c27647dab..632e870973 100644
--- a/sentry_sdk/integrations/spark/spark_worker.py
+++ b/sentry_sdk/integrations/spark/spark_worker.py
@@ -13,9 +13,9 @@
     event_hint_with_exc_info,
 )
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
 
@@ -58,7 +58,7 @@ def _capture_exception(exc_info, hub):
     if rv:
         rv.reverse()
         hint = event_hint_with_exc_info(exc_info)
-        event = {"level": "error", "exception": {"values": rv}}
+        event = {"level": "error", "exception": {"values": rv}}  # type: Event
 
         _tag_task_context()
 
diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
index deb97c05ad..5850237e97 100644
--- a/sentry_sdk/integrations/sqlalchemy.py
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -1,9 +1,13 @@
 from __future__ import absolute_import
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import text_type
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.db.explain_plan.sqlalchemy import attach_explain_plan_to_span
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.tracing_utils import record_sql_queries
+from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
+from sentry_sdk.utils import capture_internal_exceptions, parse_version
 
 try:
     from sqlalchemy.engine import Engine  # type: ignore
@@ -12,7 +16,7 @@
 except ImportError:
     raise DidNotEnable("SQLAlchemy not installed.")
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import ContextManager
     from typing import Optional
@@ -27,9 +31,9 @@ class SqlalchemyIntegration(Integration):
     def setup_once():
         # type: () -> None
 
-        try:
-            version = tuple(map(int, SQLALCHEMY_VERSION.split("b")[0].split(".")))
-        except (TypeError, ValueError):
+        version = parse_version(SQLALCHEMY_VERSION)
+
+        if version is None:
             raise DidNotEnable(
                 "Unparsable SQLAlchemy version: {}".format(SQLALCHEMY_VERSION)
             )
@@ -63,11 +67,26 @@ def _before_cursor_execute(
     span = ctx_mgr.__enter__()
 
     if span is not None:
+        _set_db_data(span, conn)
+        if hub.client:
+            options = hub.client.options["_experiments"].get("attach_explain_plans")
+            if options is not None:
+                attach_explain_plan_to_span(
+                    span,
+                    conn,
+                    statement,
+                    parameters,
+                    options,
+                )
         context._sentry_sql_span = span
 
 
 def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
     # type: (Any, Any, Any, Any, Any, *Any) -> None
+    hub = Hub.current
+    if hub.get_integration(SqlalchemyIntegration) is None:
+        return
+
     ctx_mgr = getattr(
         context, "_sentry_sql_span_manager", None
     )  # type: Optional[ContextManager[Any]]
@@ -76,6 +95,11 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
         context._sentry_sql_span_manager = None
         ctx_mgr.__exit__(None, None, None)
 
+    span = getattr(context, "_sentry_sql_span", None)  # type: Optional[Span]
+    if span is not None:
+        with capture_internal_exceptions():
+            add_query_source(hub, span)
+
 
 def _handle_error(context, *args):
     # type: (Any, *Any) -> None
@@ -98,3 +122,48 @@ def _handle_error(context, *args):
     if ctx_mgr is not None:
         execution_context._sentry_sql_span_manager = None
         ctx_mgr.__exit__(None, None, None)
+
+
+# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
+def _get_db_system(name):
+    # type: (str) -> Optional[str]
+    name = text_type(name)
+
+    if "sqlite" in name:
+        return "sqlite"
+
+    if "postgres" in name:
+        return "postgresql"
+
+    if "mariadb" in name:
+        return "mariadb"
+
+    if "mysql" in name:
+        return "mysql"
+
+    if "oracle" in name:
+        return "oracle"
+
+    return None
+
+
+def _set_db_data(span, conn):
+    # type: (Span, Any) -> None
+    db_system = _get_db_system(conn.engine.name)
+    if db_system is not None:
+        span.set_data(SPANDATA.DB_SYSTEM, db_system)
+
+    if conn.engine.url is None:
+        return
+
+    db_name = conn.engine.url.database
+    if db_name is not None:
+        span.set_data(SPANDATA.DB_NAME, db_name)
+
+    server_address = conn.engine.url.host
+    if server_address is not None:
+        span.set_data(SPANDATA.SERVER_ADDRESS, server_address)
+
+    server_port = conn.engine.url.port
+    if server_port is not None:
+        span.set_data(SPANDATA.SERVER_PORT, server_port)
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
new file mode 100644
index 0000000000..79bb18aa78
--- /dev/null
+++ b/sentry_sdk/integrations/starlette.py
@@ -0,0 +1,688 @@
+from __future__ import absolute_import
+
+import asyncio
+import functools
+from copy import deepcopy
+
+from sentry_sdk._compat import iteritems
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations._wsgi_common import (
+    _is_json_content_type,
+    request_body_within_bounds,
+)
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import (
+    SOURCE_FOR_STYLE,
+    TRANSACTION_SOURCE_COMPONENT,
+    TRANSACTION_SOURCE_ROUTE,
+)
+from sentry_sdk.utils import (
+    AnnotatedValue,
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+    parse_version,
+    transaction_from_function,
+)
+
+if TYPE_CHECKING:
+    from typing import Any, Awaitable, Callable, Dict, Optional, Tuple
+
+    from sentry_sdk.scope import Scope as SentryScope
+    from sentry_sdk._types import Event
+
+try:
+    import starlette  # type: ignore
+    from starlette import __version__ as STARLETTE_VERSION
+    from starlette.applications import Starlette  # type: ignore
+    from starlette.datastructures import UploadFile  # type: ignore
+    from starlette.middleware import Middleware  # type: ignore
+    from starlette.middleware.authentication import (  # type: ignore
+        AuthenticationMiddleware,
+    )
+    from starlette.requests import Request  # type: ignore
+    from starlette.routing import Match  # type: ignore
+    from starlette.types import ASGIApp, Receive, Scope as StarletteScope, Send  # type: ignore
+except ImportError:
+    raise DidNotEnable("Starlette is not installed")
+
+try:
+    # Starlette 0.20
+    from starlette.middleware.exceptions import ExceptionMiddleware  # type: ignore
+except ImportError:
+    # Startlette 0.19.1
+    from starlette.exceptions import ExceptionMiddleware  # type: ignore
+
+try:
+    # Optional dependency of Starlette to parse form data.
+    import multipart  # type: ignore
+except ImportError:
+    multipart = None
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlette request"
+
+TRANSACTION_STYLE_VALUES = ("endpoint", "url")
+
+
+class StarletteIntegration(Integration):
+    identifier = "starlette"
+
+    transaction_style = ""
+
+    def __init__(self, transaction_style="url"):
+        # type: (str) -> None
+        if transaction_style not in TRANSACTION_STYLE_VALUES:
+            raise ValueError(
+                "Invalid value for transaction_style: %s (must be in %s)"
+                % (transaction_style, TRANSACTION_STYLE_VALUES)
+            )
+        self.transaction_style = transaction_style
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = parse_version(STARLETTE_VERSION)
+
+        if version is None:
+            raise DidNotEnable(
+                "Unparsable Starlette version: {}".format(STARLETTE_VERSION)
+            )
+
+        patch_middlewares()
+        patch_asgi_app()
+        patch_request_response()
+
+        if version >= (0, 24):
+            patch_templates()
+
+
+def _enable_span_for_middleware(middleware_class):
+    # type: (Any) -> type
+    old_call = middleware_class.__call__
+
+    async def _create_span_call(app, scope, receive, send, **kwargs):
+        # type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
+        hub = Hub.current
+        integration = hub.get_integration(StarletteIntegration)
+        if integration is not None:
+            middleware_name = app.__class__.__name__
+
+            # Update transaction name with middleware name
+            with hub.configure_scope() as sentry_scope:
+                name, source = _get_transaction_from_middleware(app, scope, integration)
+                if name is not None:
+                    sentry_scope.set_transaction_name(
+                        name,
+                        source=source,
+                    )
+
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlette.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(*args, **kwargs):
+                    # type: (*Any, **Any) -> Any
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLETTE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlette.middleware_name", middleware_name)
+                        return await send(*args, **kwargs)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(app, scope, new_receive, new_send, **kwargs)
+
+        else:
+            return await old_call(app, scope, receive, send, **kwargs)
+
+    not_yet_patched = old_call.__name__ not in [
+        "_create_span_call",
+        "_sentry_authenticationmiddleware_call",
+        "_sentry_exceptionmiddleware_call",
+    ]
+
+    if not_yet_patched:
+        middleware_class.__call__ = _create_span_call
+
+    return middleware_class
+
+
+def _capture_exception(exception, handled=False):
+    # type: (BaseException, **Any) -> None
+    hub = Hub.current
+    if hub.get_integration(StarletteIntegration) is None:
+        return
+
+    event, hint = event_from_exception(
+        exception,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarletteIntegration.identifier, "handled": handled},
+    )
+
+    hub.capture_event(event, hint=hint)
+
+
+def patch_exception_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Capture all exceptions in Starlette app and
+    also extract user information.
+    """
+    old_middleware_init = middleware_class.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, *args, **kwargs):
+            # type: (Any, Any, Any) -> None
+            old_middleware_init(self, *args, **kwargs)
+
+            # Patch existing exception handlers
+            old_handlers = self._exception_handlers.copy()
+
+            async def _sentry_patched_exception_handler(self, *args, **kwargs):
+                # type: (Any, Any, Any) -> None
+                exp = args[0]
+
+                is_http_server_error = (
+                    hasattr(exp, "status_code")
+                    and isinstance(exp.status_code, int)
+                    and exp.status_code >= 500
+                )
+                if is_http_server_error:
+                    _capture_exception(exp, handled=True)
+
+                # Find a matching handler
+                old_handler = None
+                for cls in type(exp).__mro__:
+                    if cls in old_handlers:
+                        old_handler = old_handlers[cls]
+                        break
+
+                if old_handler is None:
+                    return
+
+                if _is_async_callable(old_handler):
+                    return await old_handler(self, *args, **kwargs)
+                else:
+                    return old_handler(self, *args, **kwargs)
+
+            for key in self._exception_handlers.keys():
+                self._exception_handlers[key] = _sentry_patched_exception_handler
+
+        middleware_class.__init__ = _sentry_middleware_init
+
+        old_call = middleware_class.__call__
+
+        async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            # Also add the user (that was eventually set by be Authentication middle
+            # that was called before this middleware). This is done because the authentication
+            # middleware sets the user in the scope and then (in the same function)
+            # calls this exception middelware. In case there is no exception (or no handler
+            # for the type of exception occuring) then the exception bubbles up and setting the
+            # user information into the sentry scope is done in auth middleware and the
+            # ASGI middleware will then send everything to Sentry and this is fine.
+            # But if there is an exception happening that the exception middleware here
+            # has a handler for, it will send the exception directly to Sentry, so we need
+            # the user information right now.
+            # This is why we do it here.
+            _add_user_to_sentry_scope(scope)
+            await old_call(self, scope, receive, send)
+
+        middleware_class.__call__ = _sentry_exceptionmiddleware_call
+
+
+def _add_user_to_sentry_scope(scope):
+    # type: (Dict[str, Any]) -> None
+    """
+    Extracts user information from the ASGI scope and
+    adds it to Sentry's scope.
+    """
+    if "user" not in scope:
+        return
+
+    if not _should_send_default_pii():
+        return
+
+    hub = Hub.current
+    if hub.get_integration(StarletteIntegration) is None:
+        return
+
+    with hub.configure_scope() as sentry_scope:
+        user_info = {}  # type: Dict[str, Any]
+        starlette_user = scope["user"]
+
+        username = getattr(starlette_user, "username", None)
+        if username:
+            user_info.setdefault("username", starlette_user.username)
+
+        user_id = getattr(starlette_user, "id", None)
+        if user_id:
+            user_info.setdefault("id", starlette_user.id)
+
+        email = getattr(starlette_user, "email", None)
+        if email:
+            user_info.setdefault("email", starlette_user.email)
+
+        sentry_scope.user = user_info
+
+
+def patch_authentication_middleware(middleware_class):
+    # type: (Any) -> None
+    """
+    Add user information to Sentry scope.
+    """
+    old_call = middleware_class.__call__
+
+    not_yet_patched = "_sentry_authenticationmiddleware_call" not in str(old_call)
+
+    if not_yet_patched:
+
+        async def _sentry_authenticationmiddleware_call(self, scope, receive, send):
+            # type: (Dict[str, Any], Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]]) -> None
+            await old_call(self, scope, receive, send)
+            _add_user_to_sentry_scope(scope)
+
+        middleware_class.__call__ = _sentry_authenticationmiddleware_call
+
+
+def patch_middlewares():
+    # type: () -> None
+    """
+    Patches Starlettes `Middleware` class to record
+    spans for every middleware invoked.
+    """
+    old_middleware_init = Middleware.__init__
+
+    not_yet_patched = "_sentry_middleware_init" not in str(old_middleware_init)
+
+    if not_yet_patched:
+
+        def _sentry_middleware_init(self, cls, **options):
+            # type: (Any, Any, Any) -> None
+            if cls == SentryAsgiMiddleware:
+                return old_middleware_init(self, cls, **options)
+
+            span_enabled_cls = _enable_span_for_middleware(cls)
+            old_middleware_init(self, span_enabled_cls, **options)
+
+            if cls == AuthenticationMiddleware:
+                patch_authentication_middleware(cls)
+
+            if cls == ExceptionMiddleware:
+                patch_exception_middleware(cls)
+
+        Middleware.__init__ = _sentry_middleware_init
+
+
+def patch_asgi_app():
+    # type: () -> None
+    """
+    Instrument Starlette ASGI app using the SentryAsgiMiddleware.
+    """
+    old_app = Starlette.__call__
+
+    async def _sentry_patched_asgi_app(self, scope, receive, send):
+        # type: (Starlette, StarletteScope, Receive, Send) -> None
+        integration = Hub.current.get_integration(StarletteIntegration)
+        if integration is None:
+            return await old_app(self, scope, receive, send)
+
+        middleware = SentryAsgiMiddleware(
+            lambda *a, **kw: old_app(self, *a, **kw),
+            mechanism_type=StarletteIntegration.identifier,
+            transaction_style=integration.transaction_style,
+        )
+
+        middleware.__call__ = middleware._run_asgi3
+        return await middleware(scope, receive, send)
+
+    Starlette.__call__ = _sentry_patched_asgi_app
+
+
+# This was vendored in from Starlette to support Starlette 0.19.1 because
+# this function was only introduced in 0.20.x
+def _is_async_callable(obj):
+    # type: (Any) -> bool
+    while isinstance(obj, functools.partial):
+        obj = obj.func
+
+    return asyncio.iscoroutinefunction(obj) or (
+        callable(obj) and asyncio.iscoroutinefunction(obj.__call__)
+    )
+
+
+def patch_request_response():
+    # type: () -> None
+    old_request_response = starlette.routing.request_response
+
+    def _sentry_request_response(func):
+        # type: (Callable[[Any], Any]) -> ASGIApp
+        old_func = func
+
+        is_coroutine = _is_async_callable(old_func)
+        if is_coroutine:
+
+            async def _sentry_async_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return await old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
+                    extractor = StarletteRequestExtractor(request)
+                    info = await extractor.extract_request_info()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+                        def event_processor(event, hint):
+                            # type: (Event, Dict[str, Any]) -> Event
+
+                            # Add info from request to event
+                            request_info = event.get("request", {})
+                            if info:
+                                if "cookies" in info:
+                                    request_info["cookies"] = info["cookies"]
+                                if "data" in info:
+                                    request_info["data"] = info["data"]
+                            event["request"] = deepcopy(request_info)
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return await old_func(*args, **kwargs)
+
+            func = _sentry_async_func
+        else:
+
+            def _sentry_sync_func(*args, **kwargs):
+                # type: (*Any, **Any) -> Any
+                hub = Hub.current
+                integration = hub.get_integration(StarletteIntegration)
+                if integration is None:
+                    return old_func(*args, **kwargs)
+
+                with hub.configure_scope() as sentry_scope:
+                    if sentry_scope.profile is not None:
+                        sentry_scope.profile.update_active_thread_id()
+
+                    request = args[0]
+
+                    _set_transaction_name_and_source(
+                        sentry_scope, integration.transaction_style, request
+                    )
+
+                    extractor = StarletteRequestExtractor(request)
+                    cookies = extractor.extract_cookies_from_request()
+
+                    def _make_request_event_processor(req, integration):
+                        # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+                        def event_processor(event, hint):
+                            # type: (Event, dict[str, Any]) -> Event
+
+                            # Extract information from request
+                            request_info = event.get("request", {})
+                            if cookies:
+                                request_info["cookies"] = cookies
+
+                            event["request"] = deepcopy(request_info)
+
+                            return event
+
+                        return event_processor
+
+                sentry_scope._name = StarletteIntegration.identifier
+                sentry_scope.add_event_processor(
+                    _make_request_event_processor(request, integration)
+                )
+
+                return old_func(*args, **kwargs)
+
+            func = _sentry_sync_func
+
+        return old_request_response(func)
+
+    starlette.routing.request_response = _sentry_request_response
+
+
+def patch_templates():
+    # type: () -> None
+
+    # If markupsafe is not installed, then Jinja2 is not installed
+    # (markupsafe is a dependency of Jinja2)
+    # In this case we do not need to patch the Jinja2Templates class
+    try:
+        from markupsafe import Markup
+    except ImportError:
+        return  # Nothing to do
+
+    from starlette.templating import Jinja2Templates  # type: ignore
+
+    old_jinja2templates_init = Jinja2Templates.__init__
+
+    not_yet_patched = "_sentry_jinja2templates_init" not in str(
+        old_jinja2templates_init
+    )
+
+    if not_yet_patched:
+
+        def _sentry_jinja2templates_init(self, *args, **kwargs):
+            # type: (Jinja2Templates, *Any, **Any) -> None
+            def add_sentry_trace_meta(request):
+                # type: (Request) -> Dict[str, Any]
+                hub = Hub.current
+                trace_meta = Markup(hub.trace_propagation_meta())
+                return {
+                    "sentry_trace_meta": trace_meta,
+                }
+
+            kwargs.setdefault("context_processors", [])
+
+            if add_sentry_trace_meta not in kwargs["context_processors"]:
+                kwargs["context_processors"].append(add_sentry_trace_meta)
+
+            return old_jinja2templates_init(self, *args, **kwargs)
+
+        Jinja2Templates.__init__ = _sentry_jinja2templates_init
+
+
+class StarletteRequestExtractor:
+    """
+    Extracts useful information from the Starlette request
+    (like form data or cookies) and adds it to the Sentry event.
+    """
+
+    request = None  # type: Request
+
+    def __init__(self, request):
+        # type: (StarletteRequestExtractor, Request) -> None
+        self.request = request
+
+    def extract_cookies_from_request(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = Hub.current.client
+        if client is None:
+            return None
+
+        cookies = None  # type: Optional[Dict[str, Any]]
+        if _should_send_default_pii():
+            cookies = self.cookies()
+
+        return cookies
+
+    async def extract_request_info(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        client = Hub.current.client
+        if client is None:
+            return None
+
+        request_info = {}  # type: Dict[str, Any]
+
+        with capture_internal_exceptions():
+            # Add cookies
+            if _should_send_default_pii():
+                request_info["cookies"] = self.cookies()
+
+            # If there is no body, just return the cookies
+            content_length = await self.content_length()
+            if not content_length:
+                return request_info
+
+            # Add annotation if body is too big
+            if content_length and not request_body_within_bounds(
+                client, content_length
+            ):
+                request_info["data"] = AnnotatedValue.removed_because_over_size_limit()
+                return request_info
+
+            # Add JSON body, if it is a JSON request
+            json = await self.json()
+            if json:
+                request_info["data"] = json
+                return request_info
+
+            # Add form as key/value pairs, if request has form data
+            form = await self.form()
+            if form:
+                form_data = {}
+                for key, val in iteritems(form):
+                    is_file = isinstance(val, UploadFile)
+                    form_data[key] = (
+                        val
+                        if not is_file
+                        else AnnotatedValue.removed_because_raw_data()
+                    )
+
+                request_info["data"] = form_data
+                return request_info
+
+            # Raw data, do not add body just an annotation
+            request_info["data"] = AnnotatedValue.removed_because_raw_data()
+            return request_info
+
+    async def content_length(self):
+        # type: (StarletteRequestExtractor) -> Optional[int]
+        if "content-length" in self.request.headers:
+            return int(self.request.headers["content-length"])
+
+        return None
+
+    def cookies(self):
+        # type: (StarletteRequestExtractor) -> Dict[str, Any]
+        return self.request.cookies
+
+    async def form(self):
+        # type: (StarletteRequestExtractor) -> Any
+        if multipart is None:
+            return None
+
+        # Parse the body first to get it cached, as Starlette does not cache form() as it
+        # does with body() and json() https://github.com/encode/starlette/discussions/1933
+        # Calling `.form()` without calling `.body()` first will
+        # potentially break the users project.
+        await self.request.body()
+
+        return await self.request.form()
+
+    def is_json(self):
+        # type: (StarletteRequestExtractor) -> bool
+        return _is_json_content_type(self.request.headers.get("content-type"))
+
+    async def json(self):
+        # type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
+        if not self.is_json():
+            return None
+
+        return await self.request.json()
+
+
+def _transaction_name_from_router(scope):
+    # type: (StarletteScope) -> Optional[str]
+    router = scope.get("router")
+    if not router:
+        return None
+
+    for route in router.routes:
+        match = route.matches(scope)
+        if match[0] == Match.FULL:
+            return route.path
+
+    return None
+
+
+def _set_transaction_name_and_source(scope, transaction_style, request):
+    # type: (SentryScope, str, Any) -> None
+    name = None
+    source = SOURCE_FOR_STYLE[transaction_style]
+
+    if transaction_style == "endpoint":
+        endpoint = request.scope.get("endpoint")
+        if endpoint:
+            name = transaction_from_function(endpoint) or None
+
+    elif transaction_style == "url":
+        name = _transaction_name_from_router(request.scope)
+
+    if name is None:
+        name = _DEFAULT_TRANSACTION_NAME
+        source = TRANSACTION_SOURCE_ROUTE
+
+    scope.set_transaction_name(name, source=source)
+    logger.debug(
+        "[Starlette] Set transaction name and source on scope: %s / %s", name, source
+    )
+
+
+def _get_transaction_from_middleware(app, asgi_scope, integration):
+    # type: (Any, Dict[str, Any], StarletteIntegration) -> Tuple[Optional[str], Optional[str]]
+    name = None
+    source = None
+
+    if integration.transaction_style == "endpoint":
+        name = transaction_from_function(app.__class__)
+        source = TRANSACTION_SOURCE_COMPONENT
+    elif integration.transaction_style == "url":
+        name = _transaction_name_from_router(asgi_scope)
+        source = TRANSACTION_SOURCE_ROUTE
+
+    return name, source
diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py
new file mode 100644
index 0000000000..070675c2e7
--- /dev/null
+++ b/sentry_sdk/integrations/starlite.py
@@ -0,0 +1,274 @@
+from typing import TYPE_CHECKING
+
+from pydantic import BaseModel  # type: ignore
+from sentry_sdk.consts import OP
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.utils import event_from_exception, transaction_from_function
+
+try:
+    from starlite import Request, Starlite, State  # type: ignore
+    from starlite.handlers.base import BaseRouteHandler  # type: ignore
+    from starlite.middleware import DefineMiddleware  # type: ignore
+    from starlite.plugins.base import get_plugin_for_value  # type: ignore
+    from starlite.routes.http import HTTPRoute  # type: ignore
+    from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref  # type: ignore
+
+    if TYPE_CHECKING:
+        from typing import Any, Dict, List, Optional, Union
+        from starlite.types import (  # type: ignore
+            ASGIApp,
+            HTTPReceiveMessage,
+            HTTPScope,
+            Message,
+            Middleware,
+            Receive,
+            Scope,
+            Send,
+            WebSocketReceiveMessage,
+        )
+        from starlite import MiddlewareProtocol
+        from sentry_sdk._types import Event
+except ImportError:
+    raise DidNotEnable("Starlite is not installed")
+
+
+_DEFAULT_TRANSACTION_NAME = "generic Starlite request"
+
+
+class SentryStarliteASGIMiddleware(SentryAsgiMiddleware):
+    def __init__(self, app: "ASGIApp"):
+        super().__init__(
+            app=app,
+            unsafe_context_data=False,
+            transaction_style="endpoint",
+            mechanism_type="asgi",
+        )
+
+
+class StarliteIntegration(Integration):
+    identifier = "starlite"
+
+    @staticmethod
+    def setup_once() -> None:
+        patch_app_init()
+        patch_middlewares()
+        patch_http_route_handle()
+
+
+def patch_app_init() -> None:
+    """
+    Replaces the Starlite class's `__init__` function in order to inject `after_exception` handlers and set the
+    `SentryStarliteASGIMiddleware` as the outmost middleware in the stack.
+    See:
+    - https://starlite-api.github.io/starlite/usage/0-the-starlite-app/5-application-hooks/#after-exception
+    - https://starlite-api.github.io/starlite/usage/7-middleware/0-middleware-intro/
+    """
+    old__init__ = Starlite.__init__
+
+    def injection_wrapper(self: "Starlite", *args: "Any", **kwargs: "Any") -> None:
+        after_exception = kwargs.pop("after_exception", [])
+        kwargs.update(
+            after_exception=[
+                exception_handler,
+                *(
+                    after_exception
+                    if isinstance(after_exception, list)
+                    else [after_exception]
+                ),
+            ]
+        )
+
+        SentryStarliteASGIMiddleware.__call__ = SentryStarliteASGIMiddleware._run_asgi3  # type: ignore
+        middleware = kwargs.pop("middleware", None) or []
+        kwargs["middleware"] = [SentryStarliteASGIMiddleware, *middleware]
+        old__init__(self, *args, **kwargs)
+
+    Starlite.__init__ = injection_wrapper
+
+
+def patch_middlewares() -> None:
+    old__resolve_middleware_stack = BaseRouteHandler.resolve_middleware
+
+    def resolve_middleware_wrapper(self: "Any") -> "List[Middleware]":
+        return [
+            enable_span_for_middleware(middleware)
+            for middleware in old__resolve_middleware_stack(self)
+        ]
+
+    BaseRouteHandler.resolve_middleware = resolve_middleware_wrapper
+
+
+def enable_span_for_middleware(middleware: "Middleware") -> "Middleware":
+    if (
+        not hasattr(middleware, "__call__")  # noqa: B004
+        or middleware is SentryStarliteASGIMiddleware
+    ):
+        return middleware
+
+    if isinstance(middleware, DefineMiddleware):
+        old_call: "ASGIApp" = middleware.middleware.__call__
+    else:
+        old_call = middleware.__call__
+
+    async def _create_span_call(
+        self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration = hub.get_integration(StarliteIntegration)
+        if integration is not None:
+            middleware_name = self.__class__.__name__
+            with hub.start_span(
+                op=OP.MIDDLEWARE_STARLITE, description=middleware_name
+            ) as middleware_span:
+                middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+                # Creating spans for the "receive" callback
+                async def _sentry_receive(
+                    *args: "Any", **kwargs: "Any"
+                ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+                        description=getattr(receive, "__qualname__", str(receive)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await receive(*args, **kwargs)
+
+                receive_name = getattr(receive, "__name__", str(receive))
+                receive_patched = receive_name == "_sentry_receive"
+                new_receive = _sentry_receive if not receive_patched else receive
+
+                # Creating spans for the "send" callback
+                async def _sentry_send(message: "Message") -> None:
+                    hub = Hub.current
+                    with hub.start_span(
+                        op=OP.MIDDLEWARE_STARLITE_SEND,
+                        description=getattr(send, "__qualname__", str(send)),
+                    ) as span:
+                        span.set_tag("starlite.middleware_name", middleware_name)
+                        return await send(message)
+
+                send_name = getattr(send, "__name__", str(send))
+                send_patched = send_name == "_sentry_send"
+                new_send = _sentry_send if not send_patched else send
+
+                return await old_call(self, scope, new_receive, new_send)
+        else:
+            return await old_call(self, scope, receive, send)
+
+    not_yet_patched = old_call.__name__ not in ["_create_span_call"]
+
+    if not_yet_patched:
+        if isinstance(middleware, DefineMiddleware):
+            middleware.middleware.__call__ = _create_span_call
+        else:
+            middleware.__call__ = _create_span_call
+
+    return middleware
+
+
+def patch_http_route_handle() -> None:
+    old_handle = HTTPRoute.handle
+
+    async def handle_wrapper(
+        self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send"
+    ) -> None:
+        hub = Hub.current
+        integration: StarliteIntegration = hub.get_integration(StarliteIntegration)
+        if integration is None:
+            return await old_handle(self, scope, receive, send)
+
+        with hub.configure_scope() as sentry_scope:
+            request: "Request[Any, Any]" = scope["app"].request_class(
+                scope=scope, receive=receive, send=send
+            )
+            extracted_request_data = ConnectionDataExtractor(
+                parse_body=True, parse_query=True
+            )(request)
+            body = extracted_request_data.pop("body")
+
+            request_data = await body
+
+            def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event":
+                route_handler = scope.get("route_handler")
+
+                request_info = event.get("request", {})
+                request_info["content_length"] = len(scope.get("_body", b""))
+                if _should_send_default_pii():
+                    request_info["cookies"] = extracted_request_data["cookies"]
+                if request_data is not None:
+                    request_info["data"] = request_data
+
+                func = None
+                if route_handler.name is not None:
+                    tx_name = route_handler.name
+                elif isinstance(route_handler.fn, Ref):
+                    func = route_handler.fn.value
+                else:
+                    func = route_handler.fn
+                if func is not None:
+                    tx_name = transaction_from_function(func)
+
+                tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+                if not tx_name:
+                    tx_name = _DEFAULT_TRANSACTION_NAME
+                    tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
+
+                event.update(
+                    {
+                        "request": request_info,
+                        "transaction": tx_name,
+                        "transaction_info": tx_info,
+                    }
+                )
+                return event
+
+            sentry_scope._name = StarliteIntegration.identifier
+            sentry_scope.add_event_processor(event_processor)
+
+            return await old_handle(self, scope, receive, send)
+
+    HTTPRoute.handle = handle_wrapper
+
+
+def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
+    scope_user = scope.get("user", {})
+    if not scope_user:
+        return None
+    if isinstance(scope_user, dict):
+        return scope_user
+    if isinstance(scope_user, BaseModel):
+        return scope_user.dict()
+    if hasattr(scope_user, "asdict"):  # dataclasses
+        return scope_user.asdict()
+
+    plugin = get_plugin_for_value(scope_user)
+    if plugin and not is_async_callable(plugin.to_dict):
+        return plugin.to_dict(scope_user)
+
+    return None
+
+
+def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None:
+    hub = Hub.current
+    if hub.get_integration(StarliteIntegration) is None:
+        return
+
+    user_info: "Optional[Dict[str, Any]]" = None
+    if _should_send_default_pii():
+        user_info = retrieve_user_from_scope(scope)
+    if user_info and isinstance(user_info, dict):
+        with hub.configure_scope() as sentry_scope:
+            sentry_scope.set_user(user_info)
+
+    event, hint = event_from_exception(
+        exc,
+        client_options=hub.client.options if hub.client else None,
+        mechanism={"type": StarliteIntegration.identifier, "handled": False},
+    )
+
+    hub.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 9495d406dc..4e4b411868 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -2,16 +2,24 @@
 import subprocess
 import sys
 import platform
+from sentry_sdk.consts import OP, SPANDATA
 
 from sentry_sdk.hub import Hub
 from sentry_sdk.integrations import Integration
 from sentry_sdk.scope import add_global_event_processor
-from sentry_sdk.tracing_utils import EnvironHeaders
-from sentry_sdk.utils import capture_internal_exceptions, logger, safe_repr
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
+from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace
+from sentry_sdk.utils import (
+    SENSITIVE_DATA_SUBSTITUTE,
+    capture_internal_exceptions,
+    is_sentry_url,
+    logger,
+    safe_repr,
+    parse_url,
+)
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -31,7 +39,7 @@
     "name": platform.python_implementation(),
     "version": "%s.%s.%s" % (sys.version_info[:3]),
     "build": sys.version,
-}
+}  # type: dict[str, object]
 
 
 class StdlibIntegration(Integration):
@@ -62,15 +70,16 @@ def _install_httplib():
     def putrequest(self, method, url, *args, **kwargs):
         # type: (HTTPConnection, str, str, *Any, **Any) -> Any
         hub = Hub.current
-        if hub.get_integration(StdlibIntegration) is None:
-            return real_putrequest(self, method, url, *args, **kwargs)
 
         host = self.host
         port = self.port
         default_port = self.default_port
 
+        if hub.get_integration(StdlibIntegration) is None or is_sentry_url(hub, host):
+            return real_putrequest(self, method, url, *args, **kwargs)
+
         real_url = url
-        if not real_url.startswith(("http://", "https://")):
+        if real_url is None or not real_url.startswith(("http://", "https://")):
             real_url = "%s://%s%s%s" % (
                 default_port == 443 and "https" or "http",
                 host,
@@ -78,20 +87,32 @@ def putrequest(self, method, url, *args, **kwargs):
                 url,
             )
 
-        span = hub.start_span(op="http", description="%s %s" % (method, real_url))
+        parsed_url = None
+        with capture_internal_exceptions():
+            parsed_url = parse_url(real_url, sanitize=False)
+
+        span = hub.start_span(
+            op=OP.HTTP_CLIENT,
+            description="%s %s"
+            % (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
+        )
 
-        span.set_data("method", method)
-        span.set_data("url", real_url)
+        span.set_data(SPANDATA.HTTP_METHOD, method)
+        if parsed_url is not None:
+            span.set_data("url", parsed_url.url)
+            span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
+            span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
 
         rv = real_putrequest(self, method, url, *args, **kwargs)
 
-        for key, value in hub.iter_trace_propagation_headers(span):
-            logger.debug(
-                "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
-                    key=key, value=value, real_url=real_url
+        if should_propagate_trace(hub, real_url):
+            for key, value in hub.iter_trace_propagation_headers(span):
+                logger.debug(
+                    "[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
+                        key=key, value=value, real_url=real_url
+                    )
                 )
-            )
-            self.putheader(key, value)
+                self.putheader(key, value)
 
         self._sentrysdk_span = span
 
@@ -106,7 +127,6 @@ def getresponse(self, *args, **kwargs):
 
         rv = real_getresponse(self, *args, **kwargs)
 
-        span.set_data("status_code", rv.status)
         span.set_http_status(int(rv.status))
         span.set_data("reason", rv.reason)
         span.finish()
@@ -183,12 +203,15 @@ def sentry_patched_popen_init(self, *a, **kw):
 
         env = None
 
-        with hub.start_span(op="subprocess", description=description) as span:
-
+        with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
             for k, v in hub.iter_trace_propagation_headers(span):
                 if env is None:
                     env = _init_argument(
-                        a, kw, "env", 10, lambda x: dict(x or os.environ)
+                        a,
+                        kw,
+                        "env",
+                        10,
+                        lambda x: dict(x if x is not None else os.environ),
                     )
                 env["SUBPROCESS_" + k.upper().replace("-", "_")] = v
 
@@ -211,7 +234,7 @@ def sentry_patched_popen_wait(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_wait(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.wait") as span:
+        with hub.start_span(op=OP.SUBPROCESS_WAIT) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_wait(self, *a, **kw)
 
@@ -226,7 +249,7 @@ def sentry_patched_popen_communicate(self, *a, **kw):
         if hub.get_integration(StdlibIntegration) is None:
             return old_popen_communicate(self, *a, **kw)
 
-        with hub.start_span(op="subprocess.communicate") as span:
+        with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
             span.set_tag("subprocess.pid", self.pid)
             return old_popen_communicate(self, *a, **kw)
 
diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py
new file mode 100644
index 0000000000..5bc4184bee
--- /dev/null
+++ b/sentry_sdk/integrations/strawberry.py
@@ -0,0 +1,406 @@
+import hashlib
+from functools import cached_property
+from inspect import isawaitable
+from sentry_sdk import configure_scope, start_span
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    event_from_exception,
+    logger,
+    package_version,
+    _get_installed_modules,
+)
+from sentry_sdk._types import TYPE_CHECKING
+
+try:
+    import strawberry.schema.schema as strawberry_schema  # type: ignore
+    from strawberry import Schema
+    from strawberry.extensions import SchemaExtension  # type: ignore
+    from strawberry.extensions.tracing.utils import should_skip_tracing as strawberry_should_skip_tracing  # type: ignore
+    from strawberry.extensions.tracing import (  # type: ignore
+        SentryTracingExtension as StrawberrySentryAsyncExtension,
+        SentryTracingExtensionSync as StrawberrySentrySyncExtension,
+    )
+    from strawberry.http import async_base_view, sync_base_view  # type: ignore
+except ImportError:
+    raise DidNotEnable("strawberry-graphql is not installed")
+
+if TYPE_CHECKING:
+    from typing import Any, Callable, Generator, List, Optional
+    from graphql import GraphQLError, GraphQLResolveInfo  # type: ignore
+    from strawberry.http import GraphQLHTTPResponse
+    from strawberry.types import ExecutionContext, ExecutionResult  # type: ignore
+    from sentry_sdk._types import Event, EventProcessor
+
+
+ignore_logger("strawberry.execution")
+
+
+class StrawberryIntegration(Integration):
+    identifier = "strawberry"
+
+    def __init__(self, async_execution=None):
+        # type: (Optional[bool]) -> None
+        if async_execution not in (None, False, True):
+            raise ValueError(
+                'Invalid value for async_execution: "{}" (must be bool)'.format(
+                    async_execution
+                )
+            )
+        self.async_execution = async_execution
+
+    @staticmethod
+    def setup_once():
+        # type: () -> None
+        version = package_version("strawberry-graphql")
+
+        if version is None:
+            raise DidNotEnable(
+                "Unparsable strawberry-graphql version: {}".format(version)
+            )
+
+        if version < (0, 209, 5):
+            raise DidNotEnable("strawberry-graphql 0.209.5 or newer required.")
+
+        _patch_schema_init()
+        _patch_execute()
+        _patch_views()
+
+
+def _patch_schema_init():
+    # type: () -> None
+    old_schema_init = Schema.__init__
+
+    def _sentry_patched_schema_init(self, *args, **kwargs):
+        # type: (Schema, Any, Any) -> None
+        integration = Hub.current.get_integration(StrawberryIntegration)
+        if integration is None:
+            return old_schema_init(self, *args, **kwargs)
+
+        extensions = kwargs.get("extensions") or []
+
+        if integration.async_execution is not None:
+            should_use_async_extension = integration.async_execution
+        else:
+            # try to figure it out ourselves
+            should_use_async_extension = _guess_if_using_async(extensions)
+
+            logger.info(
+                "Assuming strawberry is running %s. If not, initialize it as StrawberryIntegration(async_execution=%s).",
+                "async" if should_use_async_extension else "sync",
+                "False" if should_use_async_extension else "True",
+            )
+
+        # remove the built in strawberry sentry extension, if present
+        extensions = [
+            extension
+            for extension in extensions
+            if extension
+            not in (StrawberrySentryAsyncExtension, StrawberrySentrySyncExtension)
+        ]
+
+        # add our extension
+        extensions.append(
+            SentryAsyncExtension if should_use_async_extension else SentrySyncExtension
+        )
+
+        kwargs["extensions"] = extensions
+
+        return old_schema_init(self, *args, **kwargs)
+
+    Schema.__init__ = _sentry_patched_schema_init
+
+
+class SentryAsyncExtension(SchemaExtension):  # type: ignore
+    def __init__(
+        self,
+        *,
+        execution_context=None,
+    ):
+        # type: (Any, Optional[ExecutionContext]) -> None
+        if execution_context:
+            self.execution_context = execution_context
+
+    @cached_property
+    def _resource_name(self):
+        # type: () -> str
+        query_hash = self.hash_query(self.execution_context.query)
+
+        if self.execution_context.operation_name:
+            return "{}:{}".format(self.execution_context.operation_name, query_hash)
+
+        return query_hash
+
+    def hash_query(self, query):
+        # type: (str) -> str
+        return hashlib.md5(query.encode("utf-8")).hexdigest()
+
+    def on_operation(self):
+        # type: () -> Generator[None, None, None]
+        self._operation_name = self.execution_context.operation_name
+
+        operation_type = "query"
+        op = OP.GRAPHQL_QUERY
+
+        if self.execution_context.query is None:
+            self.execution_context.query = ""
+
+        if self.execution_context.query.strip().startswith("mutation"):
+            operation_type = "mutation"
+            op = OP.GRAPHQL_MUTATION
+        elif self.execution_context.query.strip().startswith("subscription"):
+            operation_type = "subscription"
+            op = OP.GRAPHQL_SUBSCRIPTION
+
+        description = operation_type
+        if self._operation_name:
+            description += " {}".format(self._operation_name)
+
+        Hub.current.add_breadcrumb(
+            category="graphql.operation",
+            data={
+                "operation_name": self._operation_name,
+                "operation_type": operation_type,
+            },
+        )
+
+        with configure_scope() as scope:
+            if scope.span:
+                self.graphql_span = scope.span.start_child(
+                    op=op, description=description
+                )
+            else:
+                self.graphql_span = start_span(op=op, description=description)
+
+        self.graphql_span.set_data("graphql.operation.type", operation_type)
+        self.graphql_span.set_data("graphql.operation.name", self._operation_name)
+        self.graphql_span.set_data("graphql.document", self.execution_context.query)
+        self.graphql_span.set_data("graphql.resource_name", self._resource_name)
+
+        yield
+
+        self.graphql_span.finish()
+
+    def on_validate(self):
+        # type: () -> Generator[None, None, None]
+        self.validation_span = self.graphql_span.start_child(
+            op=OP.GRAPHQL_VALIDATE, description="validation"
+        )
+
+        yield
+
+        self.validation_span.finish()
+
+    def on_parse(self):
+        # type: () -> Generator[None, None, None]
+        self.parsing_span = self.graphql_span.start_child(
+            op=OP.GRAPHQL_PARSE, description="parsing"
+        )
+
+        yield
+
+        self.parsing_span.finish()
+
+    def should_skip_tracing(self, _next, info):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], GraphQLResolveInfo) -> bool
+        return strawberry_should_skip_tracing(_next, info)
+
+    async def _resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        result = _next(root, info, *args, **kwargs)
+
+        if isawaitable(result):
+            result = await result
+
+        return result
+
+    async def resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, GraphQLResolveInfo, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        if self.should_skip_tracing(_next, info):
+            return await self._resolve(_next, root, info, *args, **kwargs)
+
+        field_path = "{}.{}".format(info.parent_type, info.field_name)
+
+        with self.graphql_span.start_child(
+            op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path)
+        ) as span:
+            span.set_data("graphql.field_name", info.field_name)
+            span.set_data("graphql.parent_type", info.parent_type.name)
+            span.set_data("graphql.field_path", field_path)
+            span.set_data("graphql.path", ".".join(map(str, info.path.as_list())))
+
+            return await self._resolve(_next, root, info, *args, **kwargs)
+
+
+class SentrySyncExtension(SentryAsyncExtension):
+    def resolve(self, _next, root, info, *args, **kwargs):
+        # type: (Callable[[Any, Any, Any, Any], Any], Any, GraphQLResolveInfo, str, Any) -> Any
+        if self.should_skip_tracing(_next, info):
+            return _next(root, info, *args, **kwargs)
+
+        field_path = "{}.{}".format(info.parent_type, info.field_name)
+
+        with self.graphql_span.start_child(
+            op=OP.GRAPHQL_RESOLVE, description="resolving {}".format(field_path)
+        ) as span:
+            span.set_data("graphql.field_name", info.field_name)
+            span.set_data("graphql.parent_type", info.parent_type.name)
+            span.set_data("graphql.field_path", field_path)
+            span.set_data("graphql.path", ".".join(map(str, info.path.as_list())))
+
+            return _next(root, info, *args, **kwargs)
+
+
+def _patch_execute():
+    # type: () -> None
+    old_execute_async = strawberry_schema.execute
+    old_execute_sync = strawberry_schema.execute_sync
+
+    async def _sentry_patched_execute_async(*args, **kwargs):
+        # type: (Any, Any) -> ExecutionResult
+        hub = Hub.current
+        integration = hub.get_integration(StrawberryIntegration)
+        if integration is None:
+            return await old_execute_async(*args, **kwargs)
+
+        result = await old_execute_async(*args, **kwargs)
+
+        if "execution_context" in kwargs and result.errors:
+            with hub.configure_scope() as scope:
+                event_processor = _make_request_event_processor(
+                    kwargs["execution_context"]
+                )
+                scope.add_event_processor(event_processor)
+
+        return result
+
+    def _sentry_patched_execute_sync(*args, **kwargs):
+        # type: (Any, Any) -> ExecutionResult
+        hub = Hub.current
+        integration = hub.get_integration(StrawberryIntegration)
+        if integration is None:
+            return old_execute_sync(*args, **kwargs)
+
+        result = old_execute_sync(*args, **kwargs)
+
+        if "execution_context" in kwargs and result.errors:
+            with hub.configure_scope() as scope:
+                event_processor = _make_request_event_processor(
+                    kwargs["execution_context"]
+                )
+                scope.add_event_processor(event_processor)
+
+        return result
+
+    strawberry_schema.execute = _sentry_patched_execute_async
+    strawberry_schema.execute_sync = _sentry_patched_execute_sync
+
+
+def _patch_views():
+    # type: () -> None
+    old_async_view_handle_errors = async_base_view.AsyncBaseHTTPView._handle_errors
+    old_sync_view_handle_errors = sync_base_view.SyncBaseHTTPView._handle_errors
+
+    def _sentry_patched_async_view_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        old_async_view_handle_errors(self, errors, response_data)
+        _sentry_patched_handle_errors(self, errors, response_data)
+
+    def _sentry_patched_sync_view_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        old_sync_view_handle_errors(self, errors, response_data)
+        _sentry_patched_handle_errors(self, errors, response_data)
+
+    def _sentry_patched_handle_errors(self, errors, response_data):
+        # type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
+        hub = Hub.current
+        integration = hub.get_integration(StrawberryIntegration)
+        if integration is None:
+            return
+
+        if not errors:
+            return
+
+        with hub.configure_scope() as scope:
+            event_processor = _make_response_event_processor(response_data)
+            scope.add_event_processor(event_processor)
+
+        with capture_internal_exceptions():
+            for error in errors:
+                event, hint = event_from_exception(
+                    error,
+                    client_options=hub.client.options if hub.client else None,
+                    mechanism={
+                        "type": integration.identifier,
+                        "handled": False,
+                    },
+                )
+                hub.capture_event(event, hint=hint)
+
+    async_base_view.AsyncBaseHTTPView._handle_errors = (
+        _sentry_patched_async_view_handle_errors
+    )
+    sync_base_view.SyncBaseHTTPView._handle_errors = (
+        _sentry_patched_sync_view_handle_errors
+    )
+
+
+def _make_request_event_processor(execution_context):
+    # type: (ExecutionContext) -> EventProcessor
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if _should_send_default_pii():
+                request_data = event.setdefault("request", {})
+                request_data["api_target"] = "graphql"
+
+                if not request_data.get("data"):
+                    data = {"query": execution_context.query}
+
+                    if execution_context.variables:
+                        data["variables"] = execution_context.variables
+                    if execution_context.operation_name:
+                        data["operationName"] = execution_context.operation_name
+
+                    request_data["data"] = data
+
+            else:
+                try:
+                    del event["request"]["data"]
+                except (KeyError, TypeError):
+                    pass
+
+        return event
+
+    return inner
+
+
+def _make_response_event_processor(response_data):
+    # type: (GraphQLHTTPResponse) -> EventProcessor
+
+    def inner(event, hint):
+        # type: (Event, dict[str, Any]) -> Event
+        with capture_internal_exceptions():
+            if _should_send_default_pii():
+                contexts = event.setdefault("contexts", {})
+                contexts["response"] = {"data": response_data}
+
+        return event
+
+    return inner
+
+
+def _guess_if_using_async(extensions):
+    # type: (List[SchemaExtension]) -> bool
+    if StrawberrySentryAsyncExtension in extensions:
+        return True
+    elif StrawberrySentrySyncExtension in extensions:
+        return False
+
+    return bool(
+        {"starlette", "starlite", "litestar", "fastapi"} & set(_get_installed_modules())
+    )
diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py
index f29e5e8797..499cf85e6d 100644
--- a/sentry_sdk/integrations/threading.py
+++ b/sentry_sdk/integrations/threading.py
@@ -1,15 +1,16 @@
 from __future__ import absolute_import
 
 import sys
+from functools import wraps
 from threading import Thread, current_thread
 
 from sentry_sdk import Hub
 from sentry_sdk._compat import reraise
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.integrations import Integration
 from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import TypeVar
     from typing import Callable
@@ -32,6 +33,7 @@ def setup_once():
         # type: () -> None
         old_start = Thread.start
 
+        @wraps(old_start)
         def sentry_start(self, *a, **kw):
             # type: (Thread, *Any, **Any) -> Any
             hub = Hub.current
@@ -58,6 +60,7 @@ def sentry_start(self, *a, **kw):
 
 def _wrap_run(parent_hub, old_run_func):
     # type: (Optional[Hub], F) -> F
+    @wraps(old_run_func)
     def run(*a, **kw):
         # type: (*Any, **Any) -> Any
         hub = parent_hub or Hub.current
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index 443ebefaa8..c6f7700f12 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -2,8 +2,13 @@
 import contextlib
 from inspect import iscoroutinefunction
 
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing import (
+    TRANSACTION_SOURCE_COMPONENT,
+    TRANSACTION_SOURCE_ROUTE,
+)
 from sentry_sdk.utils import (
     HAS_REAL_CONTEXTVARS,
     CONTEXTVARS_ERROR_MESSAGE,
@@ -27,16 +32,16 @@
 except ImportError:
     raise DidNotEnable("Tornado not installed")
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Dict
     from typing import Callable
     from typing import Generator
 
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
 
 
 class TornadoIntegration(Integration):
@@ -73,7 +78,7 @@ async def sentry_execute_request_handler(self, *args, **kwargs):
         else:
 
             @coroutine  # type: ignore
-            def sentry_execute_request_handler(self, *args, **kwargs):  # type: ignore
+            def sentry_execute_request_handler(self, *args, **kwargs):
                 # type: (RequestHandler, *Any, **Any) -> Any
                 with _handle_request_impl(self):
                     result = yield from old_execute(self, *args, **kwargs)
@@ -103,19 +108,22 @@ def _handle_request_impl(self):
     weak_handler = weakref.ref(self)
 
     with Hub(hub) as hub:
+        headers = self.request.headers
+
         with hub.configure_scope() as scope:
             scope.clear_breadcrumbs()
             processor = _make_event_processor(weak_handler)
             scope.add_event_processor(processor)
 
-        transaction = Transaction.continue_from_headers(
-            self.request.headers,
-            op="http.server",
+        transaction = continue_trace(
+            headers,
+            op=OP.HTTP_SERVER,
             # Like with all other integrations, this is our
             # fallback transaction in case there is no route.
             # sentry_urldispatcher_resolve is responsible for
             # setting a transaction name later.
             name="generic Tornado request",
+            source=TRANSACTION_SOURCE_ROUTE,
         )
 
         with hub.start_transaction(
@@ -147,7 +155,7 @@ def _capture_exception(ty, value, tb):
 def _make_event_processor(weak_handler):
     # type: (Callable[[], RequestHandler]) -> EventProcessor
     def tornado_processor(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # type: (Event, dict[str, Any]) -> Event
         handler = weak_handler()
         if handler is None:
             return event
@@ -156,7 +164,8 @@ def tornado_processor(event, hint):
 
         with capture_internal_exceptions():
             method = getattr(handler, handler.request.method.lower())
-            event["transaction"] = transaction_from_function(method)
+            event["transaction"] = transaction_from_function(method) or ""
+            event["transaction_info"] = {"source": TRANSACTION_SOURCE_COMPONENT}
 
         with capture_internal_exceptions():
             extractor = TornadoRequestExtractor(request)
diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py
index 062a756993..6f1aff2f15 100644
--- a/sentry_sdk/integrations/trytond.py
+++ b/sentry_sdk/integrations/trytond.py
@@ -2,12 +2,12 @@
 import sentry_sdk.utils
 import sentry_sdk.integrations
 import sentry_sdk.integrations.wsgi
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
 from trytond.exceptions import TrytonException  # type: ignore
 from trytond.wsgi import app  # type: ignore
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
 
 
@@ -22,7 +22,6 @@ def __init__(self):  # type: () -> None
 
     @staticmethod
     def setup_once():  # type: () -> None
-
         app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
 
         def error_handler(e):  # type: (Exception) -> None
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index 803406fb6d..e7fd0da66d 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -1,20 +1,22 @@
 import sys
 
+from sentry_sdk._compat import PY2, reraise
 from sentry_sdk._functools import partial
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk._werkzeug import get_host, _get_headers
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
 from sentry_sdk.hub import Hub, _should_send_default_pii
 from sentry_sdk.utils import (
     ContextVar,
     capture_internal_exceptions,
     event_from_exception,
 )
-from sentry_sdk._compat import PY2, reraise, iteritems
-from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
 from sentry_sdk.sessions import auto_session_tracking
 from sentry_sdk.integrations._wsgi_common import _filter_headers
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
     from typing import Callable
     from typing import Dict
     from typing import Iterator
@@ -25,14 +27,14 @@
     from typing import Protocol
 
     from sentry_sdk.utils import ExcInfo
-    from sentry_sdk._types import EventProcessor
+    from sentry_sdk._types import Event, EventProcessor
 
     WsgiResponseIter = TypeVar("WsgiResponseIter")
     WsgiResponseHeaders = TypeVar("WsgiResponseHeaders")
     WsgiExcInfo = TypeVar("WsgiExcInfo")
 
     class StartResponse(Protocol):
-        def __call__(self, status, response_headers, exc_info=None):
+        def __call__(self, status, response_headers, exc_info=None):  # type: ignore
             # type: (str, WsgiResponseHeaders, Optional[WsgiExcInfo]) -> WsgiResponseIter
             pass
 
@@ -53,35 +55,6 @@ def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
         return s.encode("latin1").decode(charset, errors)
 
 
-def get_host(environ, use_x_forwarded_for=False):
-    # type: (Dict[str, str], bool) -> str
-    """Return the host for the given WSGI environment. Yanked from Werkzeug."""
-    if use_x_forwarded_for and "HTTP_X_FORWARDED_HOST" in environ:
-        rv = environ["HTTP_X_FORWARDED_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("HTTP_HOST"):
-        rv = environ["HTTP_HOST"]
-        if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"):
-            rv = rv[:-3]
-        elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"):
-            rv = rv[:-4]
-    elif environ.get("SERVER_NAME"):
-        rv = environ["SERVER_NAME"]
-        if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in (
-            ("https", "443"),
-            ("http", "80"),
-        ):
-            rv += ":" + environ["SERVER_PORT"]
-    else:
-        # In spite of the WSGI spec, SERVER_NAME might not be present.
-        rv = "unknown"
-
-    return rv
-
-
 def get_request_url(environ, use_x_forwarded_for=False):
     # type: (Dict[str, str], bool) -> str
     """Return the absolute URL without query string for the given WSGI
@@ -121,8 +94,11 @@ def __call__(self, environ, start_response):
                                 )
                             )
 
-                    transaction = Transaction.continue_from_environ(
-                        environ, op="http.server", name="generic WSGI request"
+                    transaction = continue_trace(
+                        environ,
+                        op=OP.HTTP_SERVER,
+                        name="generic WSGI request",
+                        source=TRANSACTION_SOURCE_ROUTE,
                     )
 
                     with hub.start_transaction(
@@ -143,7 +119,7 @@ def __call__(self, environ, start_response):
         return _ScopedResponse(hub, rv)
 
 
-def _sentry_start_response(
+def _sentry_start_response(  # type: ignore
     old_start_response,  # type: StartResponse
     transaction,  # type: Transaction
     status,  # type: str
@@ -181,27 +157,6 @@ def _get_environ(environ):
             yield key, environ[key]
 
 
-# `get_headers` comes from `werkzeug.datastructures.EnvironHeaders`
-#
-# We need this function because Django does not give us a "pure" http header
-# dict. So we might as well use it for all WSGI integrations.
-def _get_headers(environ):
-    # type: (Dict[str, str]) -> Iterator[Tuple[str, str]]
-    """
-    Returns only proper HTTP headers.
-
-    """
-    for key, value in iteritems(environ):
-        key = str(key)
-        if key.startswith("HTTP_") and key not in (
-            "HTTP_CONTENT_TYPE",
-            "HTTP_CONTENT_LENGTH",
-        ):
-            yield key[5:].replace("_", "-").title(), value
-        elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"):
-            yield key.replace("_", "-").title(), value
-
-
 def get_client_ip(environ):
     # type: (Dict[str, str]) -> Optional[Any]
     """
@@ -299,7 +254,7 @@ def _make_wsgi_event_processor(environ, use_x_forwarded_for):
     headers = _filter_headers(dict(_get_headers(environ)))
 
     def event_processor(event, hint):
-        # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any]
+        # type: (Event, Dict[str, Any]) -> Event
         with capture_internal_exceptions():
             # if the code below fails halfway through we at least have some data
             request_info = event.setdefault("request", {})
diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py
new file mode 100644
index 0000000000..1e4f5a532e
--- /dev/null
+++ b/sentry_sdk/metrics.py
@@ -0,0 +1,963 @@
+import io
+import os
+import random
+import re
+import sys
+import threading
+import time
+import zlib
+from contextlib import contextmanager
+from datetime import datetime
+from functools import wraps, partial
+
+import sentry_sdk
+from sentry_sdk._compat import text_type, utc_from_timestamp, iteritems
+from sentry_sdk.utils import (
+    ContextVar,
+    now,
+    nanosecond_time,
+    to_timestamp,
+    serialize_frame,
+    json_dumps,
+)
+from sentry_sdk.envelope import Envelope, Item
+from sentry_sdk.tracing import (
+    TRANSACTION_SOURCE_ROUTE,
+    TRANSACTION_SOURCE_VIEW,
+    TRANSACTION_SOURCE_COMPONENT,
+    TRANSACTION_SOURCE_TASK,
+)
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Callable
+    from typing import Dict
+    from typing import Generator
+    from typing import Iterable
+    from typing import List
+    from typing import Optional
+    from typing import Set
+    from typing import Tuple
+    from typing import Union
+
+    from sentry_sdk._types import BucketKey
+    from sentry_sdk._types import DurationUnit
+    from sentry_sdk._types import FlushedMetricValue
+    from sentry_sdk._types import MeasurementUnit
+    from sentry_sdk._types import MetricMetaKey
+    from sentry_sdk._types import MetricTagValue
+    from sentry_sdk._types import MetricTags
+    from sentry_sdk._types import MetricTagsInternal
+    from sentry_sdk._types import MetricType
+    from sentry_sdk._types import MetricValue
+
+
+_in_metrics = ContextVar("in_metrics", default=False)
+_set = set  # set is shadowed below
+
+GOOD_TRANSACTION_SOURCES = frozenset(
+    [
+        TRANSACTION_SOURCE_ROUTE,
+        TRANSACTION_SOURCE_VIEW,
+        TRANSACTION_SOURCE_COMPONENT,
+        TRANSACTION_SOURCE_TASK,
+    ]
+)
+
+_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "")
+_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_")
+_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "")
+_TAG_VALUE_SANITIZATION_TABLE = {
+    "\n": "\\n",
+    "\r": "\\r",
+    "\t": "\\t",
+    "\\": "\\\\",
+    "|": "\\u{7c}",
+    ",": "\\u{2c}",
+}
+
+
+def _sanitize_tag_value(value):
+    # type: (str) -> str
+    return "".join(
+        [
+            (
+                _TAG_VALUE_SANITIZATION_TABLE[char]
+                if char in _TAG_VALUE_SANITIZATION_TABLE
+                else char
+            )
+            for char in value
+        ]
+    )
+
+
+def get_code_location(stacklevel):
+    # type: (int) -> Optional[Dict[str, Any]]
+    try:
+        frm = sys._getframe(stacklevel)
+    except Exception:
+        return None
+
+    return serialize_frame(
+        frm, include_local_variables=False, include_source_context=True
+    )
+
+
+@contextmanager
+def recursion_protection():
+    # type: () -> Generator[bool, None, None]
+    """Enters recursion protection and returns the old flag."""
+    old_in_metrics = _in_metrics.get()
+    _in_metrics.set(True)
+    try:
+        yield old_in_metrics
+    finally:
+        _in_metrics.set(old_in_metrics)
+
+
+def metrics_noop(func):
+    # type: (Any) -> Any
+    """Convenient decorator that uses `recursion_protection` to
+    make a function a noop.
+    """
+
+    @wraps(func)
+    def new_func(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+        with recursion_protection() as in_metrics:
+            if not in_metrics:
+                return func(*args, **kwargs)
+
+    return new_func
+
+
+class Metric(object):
+    __slots__ = ()
+
+    @property
+    def weight(self):
+        # type: (...) -> int
+        raise NotImplementedError()
+
+    def add(
+        self, value  # type: MetricValue
+    ):
+        # type: (...) -> None
+        raise NotImplementedError()
+
+    def serialize_value(self):
+        # type: (...) -> Iterable[FlushedMetricValue]
+        raise NotImplementedError()
+
+
+class CounterMetric(Metric):
+    __slots__ = ("value",)
+
+    def __init__(
+        self, first  # type: MetricValue
+    ):
+        # type: (...) -> None
+        self.value = float(first)
+
+    @property
+    def weight(self):
+        # type: (...) -> int
+        return 1
+
+    def add(
+        self, value  # type: MetricValue
+    ):
+        # type: (...) -> None
+        self.value += float(value)
+
+    def serialize_value(self):
+        # type: (...) -> Iterable[FlushedMetricValue]
+        return (self.value,)
+
+
+class GaugeMetric(Metric):
+    __slots__ = (
+        "last",
+        "min",
+        "max",
+        "sum",
+        "count",
+    )
+
+    def __init__(
+        self, first  # type: MetricValue
+    ):
+        # type: (...) -> None
+        first = float(first)
+        self.last = first
+        self.min = first
+        self.max = first
+        self.sum = first
+        self.count = 1
+
+    @property
+    def weight(self):
+        # type: (...) -> int
+        # Number of elements.
+        return 5
+
+    def add(
+        self, value  # type: MetricValue
+    ):
+        # type: (...) -> None
+        value = float(value)
+        self.last = value
+        self.min = min(self.min, value)
+        self.max = max(self.max, value)
+        self.sum += value
+        self.count += 1
+
+    def serialize_value(self):
+        # type: (...) -> Iterable[FlushedMetricValue]
+        return (
+            self.last,
+            self.min,
+            self.max,
+            self.sum,
+            self.count,
+        )
+
+
+class DistributionMetric(Metric):
+    __slots__ = ("value",)
+
+    def __init__(
+        self, first  # type: MetricValue
+    ):
+        # type(...) -> None
+        self.value = [float(first)]
+
+    @property
+    def weight(self):
+        # type: (...) -> int
+        return len(self.value)
+
+    def add(
+        self, value  # type: MetricValue
+    ):
+        # type: (...) -> None
+        self.value.append(float(value))
+
+    def serialize_value(self):
+        # type: (...) -> Iterable[FlushedMetricValue]
+        return self.value
+
+
+class SetMetric(Metric):
+    __slots__ = ("value",)
+
+    def __init__(
+        self, first  # type: MetricValue
+    ):
+        # type: (...) -> None
+        self.value = {first}
+
+    @property
+    def weight(self):
+        # type: (...) -> int
+        return len(self.value)
+
+    def add(
+        self, value  # type: MetricValue
+    ):
+        # type: (...) -> None
+        self.value.add(value)
+
+    def serialize_value(self):
+        # type: (...) -> Iterable[FlushedMetricValue]
+        def _hash(x):
+            # type: (MetricValue) -> int
+            if isinstance(x, str):
+                return zlib.crc32(x.encode("utf-8")) & 0xFFFFFFFF
+            return int(x)
+
+        return (_hash(value) for value in self.value)
+
+
+def _encode_metrics(flushable_buckets):
+    # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]]) -> bytes
+    out = io.BytesIO()
+    _write = out.write
+
+    # Note on sanetization: we intentionally sanetize in emission (serialization)
+    # and not during aggregation for performance reasons.  This means that the
+    # envelope can in fact have duplicate buckets stored.  This is acceptable for
+    # relay side emission and should not happen commonly.
+
+    for timestamp, buckets in flushable_buckets:
+        for bucket_key, metric in iteritems(buckets):
+            metric_type, metric_name, metric_unit, metric_tags = bucket_key
+            metric_name = _sanitize_metric_key(metric_name)
+            metric_unit = _sanitize_unit(metric_unit)
+            _write(metric_name.encode("utf-8"))
+            _write(b"@")
+            _write(metric_unit.encode("utf-8"))
+
+            for serialized_value in metric.serialize_value():
+                _write(b":")
+                _write(str(serialized_value).encode("utf-8"))
+
+            _write(b"|")
+            _write(metric_type.encode("ascii"))
+
+            if metric_tags:
+                _write(b"|#")
+                first = True
+                for tag_key, tag_value in metric_tags:
+                    tag_key = _sanitize_tag_key(tag_key)
+                    if not tag_key:
+                        continue
+                    if first:
+                        first = False
+                    else:
+                        _write(b",")
+                    _write(tag_key.encode("utf-8"))
+                    _write(b":")
+                    _write(_sanitize_tag_value(tag_value).encode("utf-8"))
+
+            _write(b"|T")
+            _write(str(timestamp).encode("ascii"))
+            _write(b"\n")
+
+    return out.getvalue()
+
+
+def _encode_locations(timestamp, code_locations):
+    # type: (int, Iterable[Tuple[MetricMetaKey, Dict[str, Any]]]) -> bytes
+    mapping = {}  # type: Dict[str, List[Any]]
+
+    for key, loc in code_locations:
+        metric_type, name, unit = key
+        mri = "{}:{}@{}".format(
+            metric_type, _sanitize_metric_key(name), _sanitize_unit(unit)
+        )
+
+        loc["type"] = "location"
+        mapping.setdefault(mri, []).append(loc)
+
+    return json_dumps({"timestamp": timestamp, "mapping": mapping})
+
+
+METRIC_TYPES = {
+    "c": CounterMetric,
+    "g": GaugeMetric,
+    "d": DistributionMetric,
+    "s": SetMetric,
+}
+
+# some of these are dumb
+TIMING_FUNCTIONS = {
+    "nanosecond": nanosecond_time,
+    "microsecond": lambda: nanosecond_time() / 1000.0,
+    "millisecond": lambda: nanosecond_time() / 1000000.0,
+    "second": now,
+    "minute": lambda: now() / 60.0,
+    "hour": lambda: now() / 3600.0,
+    "day": lambda: now() / 3600.0 / 24.0,
+    "week": lambda: now() / 3600.0 / 24.0 / 7.0,
+}
+
+
+class LocalAggregator(object):
+    __slots__ = ("_measurements",)
+
+    def __init__(self):
+        # type: (...) -> None
+        self._measurements = (
+            {}
+        )  # type: Dict[Tuple[str, MetricTagsInternal], Tuple[float, float, int, float]]
+
+    def add(
+        self,
+        ty,  # type: MetricType
+        key,  # type: str
+        value,  # type: float
+        unit,  # type: MeasurementUnit
+        tags,  # type: MetricTagsInternal
+    ):
+        # type: (...) -> None
+        export_key = "%s:%s@%s" % (ty, key, unit)
+        bucket_key = (export_key, tags)
+
+        old = self._measurements.get(bucket_key)
+        if old is not None:
+            v_min, v_max, v_count, v_sum = old
+            v_min = min(v_min, value)
+            v_max = max(v_max, value)
+            v_count += 1
+            v_sum += value
+        else:
+            v_min = v_max = v_sum = value
+            v_count = 1
+        self._measurements[bucket_key] = (v_min, v_max, v_count, v_sum)
+
+    def to_json(self):
+        # type: (...) -> Dict[str, Any]
+        rv = {}  # type: Any
+        for (export_key, tags), (
+            v_min,
+            v_max,
+            v_count,
+            v_sum,
+        ) in self._measurements.items():
+            rv.setdefault(export_key, []).append(
+                {
+                    "tags": _tags_to_dict(tags),
+                    "min": v_min,
+                    "max": v_max,
+                    "count": v_count,
+                    "sum": v_sum,
+                }
+            )
+        return rv
+
+
+class MetricsAggregator(object):
+    ROLLUP_IN_SECONDS = 10.0
+    MAX_WEIGHT = 100000
+    FLUSHER_SLEEP_TIME = 5.0
+
+    def __init__(
+        self,
+        capture_func,  # type: Callable[[Envelope], None]
+        enable_code_locations=False,  # type: bool
+    ):
+        # type: (...) -> None
+        self.buckets = {}  # type: Dict[int, Any]
+        self._enable_code_locations = enable_code_locations
+        self._seen_locations = _set()  # type: Set[Tuple[int, MetricMetaKey]]
+        self._pending_locations = {}  # type: Dict[int, List[Tuple[MetricMetaKey, Any]]]
+        self._buckets_total_weight = 0
+        self._capture_func = capture_func
+        self._running = True
+        self._lock = threading.Lock()
+
+        self._flush_event = threading.Event()  # type: threading.Event
+        self._force_flush = False
+
+        # The aggregator shifts its flushing by up to an entire rollup window to
+        # avoid multiple clients trampling on end of a 10 second window as all the
+        # buckets are anchored to multiples of ROLLUP seconds.  We randomize this
+        # number once per aggregator boot to achieve some level of offsetting
+        # across a fleet of deployed SDKs.  Relay itself will also apply independent
+        # jittering.
+        self._flush_shift = random.random() * self.ROLLUP_IN_SECONDS
+
+        self._flusher = None  # type: Optional[threading.Thread]
+        self._flusher_pid = None  # type: Optional[int]
+
+    def _ensure_thread(self):
+        # type: (...) -> bool
+        """For forking processes we might need to restart this thread.
+        This ensures that our process actually has that thread running.
+        """
+        if not self._running:
+            return False
+
+        pid = os.getpid()
+        if self._flusher_pid == pid:
+            return True
+
+        with self._lock:
+            # Recheck to make sure another thread didn't get here and start the
+            # the flusher in the meantime
+            if self._flusher_pid == pid:
+                return True
+
+            self._flusher_pid = pid
+
+            self._flusher = threading.Thread(target=self._flush_loop)
+            self._flusher.daemon = True
+
+            try:
+                self._flusher.start()
+            except RuntimeError:
+                # Unfortunately at this point the interpreter is in a state that no
+                # longer allows us to spawn a thread and we have to bail.
+                self._running = False
+                return False
+
+        return True
+
+    def _flush_loop(self):
+        # type: (...) -> None
+        _in_metrics.set(True)
+        while self._running or self._force_flush:
+            if self._running:
+                self._flush_event.wait(self.FLUSHER_SLEEP_TIME)
+            self._flush()
+
+    def _flush(self):
+        # type: (...) -> None
+        self._emit(self._flushable_buckets(), self._flushable_locations())
+
+    def _flushable_buckets(self):
+        # type: (...) -> (Iterable[Tuple[int, Dict[BucketKey, Metric]]])
+        with self._lock:
+            force_flush = self._force_flush
+            cutoff = time.time() - self.ROLLUP_IN_SECONDS - self._flush_shift
+            flushable_buckets = ()  # type: Iterable[Tuple[int, Dict[BucketKey, Metric]]]
+            weight_to_remove = 0
+
+            if force_flush:
+                flushable_buckets = self.buckets.items()
+                self.buckets = {}
+                self._buckets_total_weight = 0
+                self._force_flush = False
+            else:
+                flushable_buckets = []
+                for buckets_timestamp, buckets in iteritems(self.buckets):
+                    # If the timestamp of the bucket is newer that the rollup we want to skip it.
+                    if buckets_timestamp <= cutoff:
+                        flushable_buckets.append((buckets_timestamp, buckets))
+
+                # We will clear the elements while holding the lock, in order to avoid requesting it downstream again.
+                for buckets_timestamp, buckets in flushable_buckets:
+                    for _, metric in iteritems(buckets):
+                        weight_to_remove += metric.weight
+                    del self.buckets[buckets_timestamp]
+
+                self._buckets_total_weight -= weight_to_remove
+
+        return flushable_buckets
+
+    def _flushable_locations(self):
+        # type: (...) -> Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]]
+        with self._lock:
+            locations = self._pending_locations
+            self._pending_locations = {}
+        return locations
+
+    @metrics_noop
+    def add(
+        self,
+        ty,  # type: MetricType
+        key,  # type: str
+        value,  # type: MetricValue
+        unit,  # type: MeasurementUnit
+        tags,  # type: Optional[MetricTags]
+        timestamp=None,  # type: Optional[Union[float, datetime]]
+        local_aggregator=None,  # type: Optional[LocalAggregator]
+        stacklevel=0,  # type: Optional[int]
+    ):
+        # type: (...) -> None
+        if not self._ensure_thread() or self._flusher is None:
+            return None
+
+        if timestamp is None:
+            timestamp = time.time()
+        elif isinstance(timestamp, datetime):
+            timestamp = to_timestamp(timestamp)
+
+        bucket_timestamp = int(
+            (timestamp // self.ROLLUP_IN_SECONDS) * self.ROLLUP_IN_SECONDS
+        )
+        serialized_tags = _serialize_tags(tags)
+        bucket_key = (
+            ty,
+            key,
+            unit,
+            serialized_tags,
+        )
+
+        with self._lock:
+            local_buckets = self.buckets.setdefault(bucket_timestamp, {})
+            metric = local_buckets.get(bucket_key)
+            if metric is not None:
+                previous_weight = metric.weight
+                metric.add(value)
+            else:
+                metric = local_buckets[bucket_key] = METRIC_TYPES[ty](value)
+                previous_weight = 0
+
+            added = metric.weight - previous_weight
+
+            if stacklevel is not None:
+                self.record_code_location(ty, key, unit, stacklevel + 2, timestamp)
+
+        # Given the new weight we consider whether we want to force flush.
+        self._consider_force_flush()
+
+        # For sets, we only record that a value has been added to the set but not which one.
+        # See develop docs: https://develop.sentry.dev/sdk/metrics/#sets
+        if local_aggregator is not None:
+            local_value = float(added if ty == "s" else value)
+            local_aggregator.add(ty, key, local_value, unit, serialized_tags)
+
+    def record_code_location(
+        self,
+        ty,  # type: MetricType
+        key,  # type: str
+        unit,  # type: MeasurementUnit
+        stacklevel,  # type: int
+        timestamp=None,  # type: Optional[float]
+    ):
+        # type: (...) -> None
+        if not self._enable_code_locations:
+            return
+        if timestamp is None:
+            timestamp = time.time()
+        meta_key = (ty, key, unit)
+        start_of_day = utc_from_timestamp(timestamp).replace(
+            hour=0, minute=0, second=0, microsecond=0, tzinfo=None
+        )
+        start_of_day = int(to_timestamp(start_of_day))
+
+        if (start_of_day, meta_key) not in self._seen_locations:
+            self._seen_locations.add((start_of_day, meta_key))
+            loc = get_code_location(stacklevel + 3)
+            if loc is not None:
+                # Group metadata by day to make flushing more efficient.
+                # There needs to be one envelope item per timestamp.
+                self._pending_locations.setdefault(start_of_day, []).append(
+                    (meta_key, loc)
+                )
+
+    @metrics_noop
+    def need_code_loation(
+        self,
+        ty,  # type: MetricType
+        key,  # type: str
+        unit,  # type: MeasurementUnit
+        timestamp,  # type: float
+    ):
+        # type: (...) -> bool
+        if self._enable_code_locations:
+            return False
+        meta_key = (ty, key, unit)
+        start_of_day = utc_from_timestamp(timestamp).replace(
+            hour=0, minute=0, second=0, microsecond=0, tzinfo=None
+        )
+        start_of_day = int(to_timestamp(start_of_day))
+        return (start_of_day, meta_key) not in self._seen_locations
+
+    def kill(self):
+        # type: (...) -> None
+        if self._flusher is None:
+            return
+
+        self._running = False
+        self._flush_event.set()
+        self._flusher = None
+
+    @metrics_noop
+    def flush(self):
+        # type: (...) -> None
+        self._force_flush = True
+        self._flush()
+
+    def _consider_force_flush(self):
+        # type: (...) -> None
+        # It's important to acquire a lock around this method, since it will touch shared data structures.
+        total_weight = len(self.buckets) + self._buckets_total_weight
+        if total_weight >= self.MAX_WEIGHT:
+            self._force_flush = True
+            self._flush_event.set()
+
+    def _emit(
+        self,
+        flushable_buckets,  # type: (Iterable[Tuple[int, Dict[BucketKey, Metric]]])
+        code_locations,  # type: Dict[int, List[Tuple[MetricMetaKey, Dict[str, Any]]]]
+    ):
+        # type: (...) -> Optional[Envelope]
+        envelope = Envelope()
+
+        if flushable_buckets:
+            encoded_metrics = _encode_metrics(flushable_buckets)
+            envelope.add_item(Item(payload=encoded_metrics, type="statsd"))
+
+        for timestamp, locations in iteritems(code_locations):
+            encoded_locations = _encode_locations(timestamp, locations)
+            envelope.add_item(Item(payload=encoded_locations, type="metric_meta"))
+
+        if envelope.items:
+            self._capture_func(envelope)
+            return envelope
+        return None
+
+
+def _serialize_tags(
+    tags,  # type: Optional[MetricTags]
+):
+    # type: (...) -> MetricTagsInternal
+    if not tags:
+        return ()
+
+    rv = []
+    for key, value in iteritems(tags):
+        # If the value is a collection, we want to flatten it.
+        if isinstance(value, (list, tuple)):
+            for inner_value in value:
+                if inner_value is not None:
+                    rv.append((key, text_type(inner_value)))
+        elif value is not None:
+            rv.append((key, text_type(value)))
+
+    # It's very important to sort the tags in order to obtain the
+    # same bucket key.
+    return tuple(sorted(rv))
+
+
+def _tags_to_dict(tags):
+    # type: (MetricTagsInternal) -> Dict[str, Any]
+    rv = {}  # type: Dict[str, Any]
+    for tag_name, tag_value in tags:
+        old_value = rv.get(tag_name)
+        if old_value is not None:
+            if isinstance(old_value, list):
+                old_value.append(tag_value)
+            else:
+                rv[tag_name] = [old_value, tag_value]
+        else:
+            rv[tag_name] = tag_value
+    return rv
+
+
+def _get_aggregator():
+    # type: () -> Optional[MetricsAggregator]
+    hub = sentry_sdk.Hub.current
+    client = hub.client
+    return (
+        client.metrics_aggregator
+        if client is not None and client.metrics_aggregator is not None
+        else None
+    )
+
+
+def _get_aggregator_and_update_tags(key, value, unit, tags):
+    # type: (str, Optional[MetricValue], MeasurementUnit, Optional[MetricTags]) -> Tuple[Optional[MetricsAggregator], Optional[LocalAggregator], Optional[MetricTags]]
+    hub = sentry_sdk.Hub.current
+    client = hub.client
+    if client is None or client.metrics_aggregator is None:
+        return None, None, tags
+
+    updated_tags = dict(tags or ())  # type: Dict[str, MetricTagValue]
+    updated_tags.setdefault("release", client.options["release"])
+    updated_tags.setdefault("environment", client.options["environment"])
+
+    scope = hub.scope
+    local_aggregator = None
+
+    # We go with the low-level API here to access transaction information as
+    # this one is the same between just errors and errors + performance
+    transaction_source = scope._transaction_info.get("source")
+    if transaction_source in GOOD_TRANSACTION_SOURCES:
+        transaction_name = scope._transaction
+        if transaction_name:
+            updated_tags.setdefault("transaction", transaction_name)
+        if scope._span is not None:
+            local_aggregator = scope._span._get_local_aggregator()
+
+    experiments = client.options.get("_experiments", {})
+    before_emit_callback = experiments.get("before_emit_metric")
+    if before_emit_callback is not None:
+        with recursion_protection() as in_metrics:
+            if not in_metrics:
+                if not before_emit_callback(key, value, unit, updated_tags):
+                    return None, None, updated_tags
+
+    return client.metrics_aggregator, local_aggregator, updated_tags
+
+
+def increment(
+    key,  # type: str
+    value=1.0,  # type: float
+    unit="none",  # type: MeasurementUnit
+    tags=None,  # type: Optional[MetricTags]
+    timestamp=None,  # type: Optional[Union[float, datetime]]
+    stacklevel=0,  # type: int
+):
+    # type: (...) -> None
+    """Increments a counter."""
+    aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+        key, value, unit, tags
+    )
+    if aggregator is not None:
+        aggregator.add(
+            "c", key, value, unit, tags, timestamp, local_aggregator, stacklevel
+        )
+
+
+# alias as incr is relatively common in python
+incr = increment
+
+
+class _Timing(object):
+    def __init__(
+        self,
+        key,  # type: str
+        tags,  # type: Optional[MetricTags]
+        timestamp,  # type: Optional[Union[float, datetime]]
+        value,  # type: Optional[float]
+        unit,  # type: DurationUnit
+        stacklevel,  # type: int
+    ):
+        # type: (...) -> None
+        self.key = key
+        self.tags = tags
+        self.timestamp = timestamp
+        self.value = value
+        self.unit = unit
+        self.entered = None  # type: Optional[float]
+        self._span = None  # type: Optional[sentry_sdk.tracing.Span]
+        self.stacklevel = stacklevel
+
+    def _validate_invocation(self, context):
+        # type: (str) -> None
+        if self.value is not None:
+            raise TypeError(
+                "cannot use timing as %s when a value is provided" % context
+            )
+
+    def __enter__(self):
+        # type: (...) -> _Timing
+        self.entered = TIMING_FUNCTIONS[self.unit]()
+        self._validate_invocation("context-manager")
+        self._span = sentry_sdk.start_span(op="metric.timing", description=self.key)
+        if self.tags:
+            for key, value in self.tags.items():
+                if isinstance(value, (tuple, list)):
+                    value = ",".join(sorted(map(str, value)))
+                self._span.set_tag(key, value)
+        self._span.__enter__()
+
+        # report code locations here for better accuracy
+        aggregator = _get_aggregator()
+        if aggregator is not None:
+            aggregator.record_code_location("d", self.key, self.unit, self.stacklevel)
+
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):
+        # type: (Any, Any, Any) -> None
+        assert self._span, "did not enter"
+        aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+            self.key,
+            self.value,
+            self.unit,
+            self.tags,
+        )
+        if aggregator is not None:
+            elapsed = TIMING_FUNCTIONS[self.unit]() - self.entered  # type: ignore
+            aggregator.add(
+                "d",
+                self.key,
+                elapsed,
+                self.unit,
+                tags,
+                self.timestamp,
+                local_aggregator,
+                None,  # code locations are reported in __enter__
+            )
+
+        self._span.__exit__(exc_type, exc_value, tb)
+        self._span = None
+
+    def __call__(self, f):
+        # type: (Any) -> Any
+        self._validate_invocation("decorator")
+
+        @wraps(f)
+        def timed_func(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+            with timing(
+                key=self.key,
+                tags=self.tags,
+                timestamp=self.timestamp,
+                unit=self.unit,
+                stacklevel=self.stacklevel + 1,
+            ):
+                return f(*args, **kwargs)
+
+        return timed_func
+
+
+def timing(
+    key,  # type: str
+    value=None,  # type: Optional[float]
+    unit="second",  # type: DurationUnit
+    tags=None,  # type: Optional[MetricTags]
+    timestamp=None,  # type: Optional[Union[float, datetime]]
+    stacklevel=0,  # type: int
+):
+    # type: (...) -> _Timing
+    """Emits a distribution with the time it takes to run the given code block.
+
+    This method supports three forms of invocation:
+
+    - when a `value` is provided, it functions similar to `distribution` but with
+    - it can be used as a context manager
+    - it can be used as a decorator
+    """
+    if value is not None:
+        aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+            key, value, unit, tags
+        )
+        if aggregator is not None:
+            aggregator.add(
+                "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel
+            )
+    return _Timing(key, tags, timestamp, value, unit, stacklevel)
+
+
+def distribution(
+    key,  # type: str
+    value,  # type: float
+    unit="none",  # type: MeasurementUnit
+    tags=None,  # type: Optional[MetricTags]
+    timestamp=None,  # type: Optional[Union[float, datetime]]
+    stacklevel=0,  # type: int
+):
+    # type: (...) -> None
+    """Emits a distribution."""
+    aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+        key, value, unit, tags
+    )
+    if aggregator is not None:
+        aggregator.add(
+            "d", key, value, unit, tags, timestamp, local_aggregator, stacklevel
+        )
+
+
+def set(
+    key,  # type: str
+    value,  # type: MetricValue
+    unit="none",  # type: MeasurementUnit
+    tags=None,  # type: Optional[MetricTags]
+    timestamp=None,  # type: Optional[Union[float, datetime]]
+    stacklevel=0,  # type: int
+):
+    # type: (...) -> None
+    """Emits a set."""
+    aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+        key, value, unit, tags
+    )
+    if aggregator is not None:
+        aggregator.add(
+            "s", key, value, unit, tags, timestamp, local_aggregator, stacklevel
+        )
+
+
+def gauge(
+    key,  # type: str
+    value,  # type: float
+    unit="none",  # type: MeasurementUnit
+    tags=None,  # type: Optional[MetricTags]
+    timestamp=None,  # type: Optional[Union[float, datetime]]
+    stacklevel=0,  # type: int
+):
+    # type: (...) -> None
+    """Emits a gauge."""
+    aggregator, local_aggregator, tags = _get_aggregator_and_update_tags(
+        key, value, unit, tags
+    )
+    if aggregator is not None:
+        aggregator.add(
+            "g", key, value, unit, tags, timestamp, local_aggregator, stacklevel
+        )
diff --git a/sentry_sdk/monitor.py b/sentry_sdk/monitor.py
new file mode 100644
index 0000000000..71ca5e6c31
--- /dev/null
+++ b/sentry_sdk/monitor.py
@@ -0,0 +1,123 @@
+import os
+import time
+from threading import Thread, Lock
+
+import sentry_sdk
+from sentry_sdk.utils import logger
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+
+
+MAX_DOWNSAMPLE_FACTOR = 10
+
+
+class Monitor(object):
+    """
+    Performs health checks in a separate thread once every interval seconds
+    and updates the internal state. Other parts of the SDK only read this state
+    and act accordingly.
+    """
+
+    name = "sentry.monitor"
+
+    def __init__(self, transport, interval=10):
+        # type: (sentry_sdk.transport.Transport, float) -> None
+        self.transport = transport  # type: sentry_sdk.transport.Transport
+        self.interval = interval  # type: float
+
+        self._healthy = True
+        self._downsample_factor = 0  # type: int
+
+        self._thread = None  # type: Optional[Thread]
+        self._thread_lock = Lock()
+        self._thread_for_pid = None  # type: Optional[int]
+        self._running = True
+
+    def _ensure_running(self):
+        # type: () -> None
+        """
+        Check that the monitor has an active thread to run in, or create one if not.
+
+        Note that this might fail (e.g. in Python 3.12 it's not possible to
+        spawn new threads at interpreter shutdown). In that case self._running
+        will be False after running this function.
+        """
+        if self._thread_for_pid == os.getpid() and self._thread is not None:
+            return None
+
+        with self._thread_lock:
+            if self._thread_for_pid == os.getpid() and self._thread is not None:
+                return None
+
+            def _thread():
+                # type: (...) -> None
+                while self._running:
+                    time.sleep(self.interval)
+                    if self._running:
+                        self.run()
+
+            thread = Thread(name=self.name, target=_thread)
+            thread.daemon = True
+            try:
+                thread.start()
+            except RuntimeError:
+                # Unfortunately at this point the interpreter is in a state that no
+                # longer allows us to spawn a thread and we have to bail.
+                self._running = False
+                return None
+
+            self._thread = thread
+            self._thread_for_pid = os.getpid()
+
+        return None
+
+    def run(self):
+        # type: () -> None
+        self.check_health()
+        self.set_downsample_factor()
+
+    def set_downsample_factor(self):
+        # type: () -> None
+        if self._healthy:
+            if self._downsample_factor > 0:
+                logger.debug(
+                    "[Monitor] health check positive, reverting to normal sampling"
+                )
+            self._downsample_factor = 0
+        else:
+            if self.downsample_factor < MAX_DOWNSAMPLE_FACTOR:
+                self._downsample_factor += 1
+            logger.debug(
+                "[Monitor] health check negative, downsampling with a factor of %d",
+                self._downsample_factor,
+            )
+
+    def check_health(self):
+        # type: () -> None
+        """
+        Perform the actual health checks,
+        currently only checks if the transport is rate-limited.
+        TODO: augment in the future with more checks.
+        """
+        self._healthy = self.transport.is_healthy()
+
+    def is_healthy(self):
+        # type: () -> bool
+        self._ensure_running()
+        return self._healthy
+
+    @property
+    def downsample_factor(self):
+        # type: () -> int
+        self._ensure_running()
+        return self._downsample_factor
+
+    def kill(self):
+        # type: () -> None
+        self._running = False
+
+    def __del__(self):
+        # type: () -> None
+        self.kill()
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
new file mode 100644
index 0000000000..da5a4a8228
--- /dev/null
+++ b/sentry_sdk/profiler.py
@@ -0,0 +1,987 @@
+"""
+This file is originally based on code from https://github.com/nylas/nylas-perftools,
+which is published under the following license:
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Nylas
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+
+import atexit
+import os
+import platform
+import random
+import sys
+import threading
+import time
+import uuid
+from collections import deque
+
+import sentry_sdk
+from sentry_sdk._compat import PY33, PY311
+from sentry_sdk._lru_cache import LRUCache
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import (
+    capture_internal_exception,
+    filename_for_module,
+    get_current_thread_meta,
+    is_gevent,
+    is_valid_sample_rate,
+    logger,
+    nanosecond_time,
+    set_in_app_in_frames,
+)
+
+if TYPE_CHECKING:
+    from types import FrameType
+    from typing import Any
+    from typing import Callable
+    from typing import Deque
+    from typing import Dict
+    from typing import List
+    from typing import Optional
+    from typing import Set
+    from typing import Sequence
+    from typing import Tuple
+    from typing_extensions import TypedDict
+
+    import sentry_sdk.tracing
+    from sentry_sdk._types import Event, SamplingContext, ProfilerMode
+
+    ThreadId = str
+
+    ProcessedSample = TypedDict(
+        "ProcessedSample",
+        {
+            "elapsed_since_start_ns": str,
+            "thread_id": ThreadId,
+            "stack_id": int,
+        },
+    )
+
+    ProcessedStack = List[int]
+
+    ProcessedFrame = TypedDict(
+        "ProcessedFrame",
+        {
+            "abs_path": str,
+            "filename": Optional[str],
+            "function": str,
+            "lineno": int,
+            "module": Optional[str],
+        },
+    )
+
+    ProcessedThreadMetadata = TypedDict(
+        "ProcessedThreadMetadata",
+        {"name": str},
+    )
+
+    ProcessedProfile = TypedDict(
+        "ProcessedProfile",
+        {
+            "frames": List[ProcessedFrame],
+            "stacks": List[ProcessedStack],
+            "samples": List[ProcessedSample],
+            "thread_metadata": Dict[ThreadId, ProcessedThreadMetadata],
+        },
+    )
+
+    ProfileContext = TypedDict(
+        "ProfileContext",
+        {"profile_id": str},
+    )
+
+    FrameId = Tuple[
+        str,  # abs_path
+        int,  # lineno
+        str,  # function
+    ]
+    FrameIds = Tuple[FrameId, ...]
+
+    # The exact value of this id is not very meaningful. The purpose
+    # of this id is to give us a compact and unique identifier for a
+    # raw stack that can be used as a key to a dictionary so that it
+    # can be used during the sampled format generation.
+    StackId = Tuple[int, int]
+
+    ExtractedStack = Tuple[StackId, FrameIds, List[ProcessedFrame]]
+    ExtractedSample = Sequence[Tuple[ThreadId, ExtractedStack]]
+
+
+try:
+    from gevent.monkey import get_original  # type: ignore
+    from gevent.threadpool import ThreadPool  # type: ignore
+
+    thread_sleep = get_original("time", "sleep")
+except ImportError:
+    thread_sleep = time.sleep
+
+    ThreadPool = None
+
+
+_scheduler = None  # type: Optional[Scheduler]
+
+# The default sampling frequency to use. This is set at 101 in order to
+# mitigate the effects of lockstep sampling.
+DEFAULT_SAMPLING_FREQUENCY = 101
+
+
+# The minimum number of unique samples that must exist in a profile to be
+# considered valid.
+PROFILE_MINIMUM_SAMPLES = 2
+
+
+def has_profiling_enabled(options):
+    # type: (Dict[str, Any]) -> bool
+    profiles_sampler = options["profiles_sampler"]
+    if profiles_sampler is not None:
+        return True
+
+    profiles_sample_rate = options["profiles_sample_rate"]
+    if profiles_sample_rate is not None and profiles_sample_rate > 0:
+        return True
+
+    profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
+    if profiles_sample_rate is not None and profiles_sample_rate > 0:
+        return True
+
+    return False
+
+
+def setup_profiler(options):
+    # type: (Dict[str, Any]) -> bool
+    global _scheduler
+
+    if _scheduler is not None:
+        logger.debug("[Profiling] Profiler is already setup")
+        return False
+
+    if not PY33:
+        logger.warn("[Profiling] Profiler requires Python >= 3.3")
+        return False
+
+    frequency = DEFAULT_SAMPLING_FREQUENCY
+
+    if is_gevent():
+        # If gevent has patched the threading modules then we cannot rely on
+        # them to spawn a native thread for sampling.
+        # Instead we default to the GeventScheduler which is capable of
+        # spawning native threads within gevent.
+        default_profiler_mode = GeventScheduler.mode
+    else:
+        default_profiler_mode = ThreadScheduler.mode
+
+    if options.get("profiler_mode") is not None:
+        profiler_mode = options["profiler_mode"]
+    else:
+        profiler_mode = (
+            options.get("_experiments", {}).get("profiler_mode")
+            or default_profiler_mode
+        )
+
+    if (
+        profiler_mode == ThreadScheduler.mode
+        # for legacy reasons, we'll keep supporting sleep mode for this scheduler
+        or profiler_mode == "sleep"
+    ):
+        _scheduler = ThreadScheduler(frequency=frequency)
+    elif profiler_mode == GeventScheduler.mode:
+        _scheduler = GeventScheduler(frequency=frequency)
+    else:
+        raise ValueError("Unknown profiler mode: {}".format(profiler_mode))
+
+    logger.debug(
+        "[Profiling] Setting up profiler in {mode} mode".format(mode=_scheduler.mode)
+    )
+    _scheduler.setup()
+
+    atexit.register(teardown_profiler)
+
+    return True
+
+
+def teardown_profiler():
+    # type: () -> None
+
+    global _scheduler
+
+    if _scheduler is not None:
+        _scheduler.teardown()
+
+    _scheduler = None
+
+
+# We want to impose a stack depth limit so that samples aren't too large.
+MAX_STACK_DEPTH = 128
+
+
+def extract_stack(
+    raw_frame,  # type: Optional[FrameType]
+    cache,  # type: LRUCache
+    cwd,  # type: str
+    max_stack_depth=MAX_STACK_DEPTH,  # type: int
+):
+    # type: (...) -> ExtractedStack
+    """
+    Extracts the stack starting the specified frame. The extracted stack
+    assumes the specified frame is the top of the stack, and works back
+    to the bottom of the stack.
+
+    In the event that the stack is more than `MAX_STACK_DEPTH` frames deep,
+    only the first `MAX_STACK_DEPTH` frames will be returned.
+    """
+
+    raw_frames = deque(maxlen=max_stack_depth)  # type: Deque[FrameType]
+
+    while raw_frame is not None:
+        f_back = raw_frame.f_back
+        raw_frames.append(raw_frame)
+        raw_frame = f_back
+
+    frame_ids = tuple(frame_id(raw_frame) for raw_frame in raw_frames)
+    frames = []
+    for i, fid in enumerate(frame_ids):
+        frame = cache.get(fid)
+        if frame is None:
+            frame = extract_frame(fid, raw_frames[i], cwd)
+            cache.set(fid, frame)
+        frames.append(frame)
+
+    # Instead of mapping the stack into frame ids and hashing
+    # that as a tuple, we can directly hash the stack.
+    # This saves us from having to generate yet another list.
+    # Additionally, using the stack as the key directly is
+    # costly because the stack can be large, so we pre-hash
+    # the stack, and use the hash as the key as this will be
+    # needed a few times to improve performance.
+    #
+    # To Reduce the likelihood of hash collisions, we include
+    # the stack depth. This means that only stacks of the same
+    # depth can suffer from hash collisions.
+    stack_id = len(raw_frames), hash(frame_ids)
+
+    return stack_id, frame_ids, frames
+
+
+def frame_id(raw_frame):
+    # type: (FrameType) -> FrameId
+    return (raw_frame.f_code.co_filename, raw_frame.f_lineno, get_frame_name(raw_frame))
+
+
+def extract_frame(fid, raw_frame, cwd):
+    # type: (FrameId, FrameType, str) -> ProcessedFrame
+    abs_path = raw_frame.f_code.co_filename
+
+    try:
+        module = raw_frame.f_globals["__name__"]
+    except Exception:
+        module = None
+
+    # namedtuples can be many times slower when initialing
+    # and accessing attribute so we opt to use a tuple here instead
+    return {
+        # This originally was `os.path.abspath(abs_path)` but that had
+        # a large performance overhead.
+        #
+        # According to docs, this is equivalent to
+        # `os.path.normpath(os.path.join(os.getcwd(), path))`.
+        # The `os.getcwd()` call is slow here, so we precompute it.
+        #
+        # Additionally, since we are using normalized path already,
+        # we skip calling `os.path.normpath` entirely.
+        "abs_path": os.path.join(cwd, abs_path),
+        "module": module,
+        "filename": filename_for_module(module, abs_path) or None,
+        "function": fid[2],
+        "lineno": raw_frame.f_lineno,
+    }
+
+
+if PY311:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+        return frame.f_code.co_qualname
+
+else:
+
+    def get_frame_name(frame):
+        # type: (FrameType) -> str
+
+        f_code = frame.f_code
+        co_varnames = f_code.co_varnames
+
+        # co_name only contains the frame name.  If the frame was a method,
+        # the class name will NOT be included.
+        name = f_code.co_name
+
+        # if it was a method, we can get the class name by inspecting
+        # the f_locals for the `self` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `self` if its an instance method
+                co_varnames
+                and co_varnames[0] == "self"
+                and "self" in frame.f_locals
+            ):
+                for cls in frame.f_locals["self"].__class__.__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except (AttributeError, ValueError):
+            pass
+
+        # if it was a class method, (decorated with `@classmethod`)
+        # we can get the class name by inspecting the f_locals for the `cls` argument
+        try:
+            if (
+                # the co_varnames start with the frame's positional arguments
+                # and we expect the first to be `cls` if its a class method
+                co_varnames
+                and co_varnames[0] == "cls"
+                and "cls" in frame.f_locals
+            ):
+                for cls in frame.f_locals["cls"].__mro__:
+                    if name in cls.__dict__:
+                        return "{}.{}".format(cls.__name__, name)
+        except (AttributeError, ValueError):
+            pass
+
+        # nothing we can do if it is a staticmethod (decorated with @staticmethod)
+
+        # we've done all we can, time to give up and return what we have
+        return name
+
+
+MAX_PROFILE_DURATION_NS = int(3e10)  # 30 seconds
+
+
+class Profile(object):
+    def __init__(
+        self,
+        transaction,  # type: sentry_sdk.tracing.Transaction
+        hub=None,  # type: Optional[sentry_sdk.Hub]
+        scheduler=None,  # type: Optional[Scheduler]
+    ):
+        # type: (...) -> None
+        self.scheduler = _scheduler if scheduler is None else scheduler
+        self.hub = hub
+
+        self.event_id = uuid.uuid4().hex  # type: str
+
+        # Here, we assume that the sampling decision on the transaction has been finalized.
+        #
+        # We cannot keep a reference to the transaction around here because it'll create
+        # a reference cycle. So we opt to pull out just the necessary attributes.
+        self.sampled = transaction.sampled  # type: Optional[bool]
+
+        # Various framework integrations are capable of overwriting the active thread id.
+        # If it is set to `None` at the end of the profile, we fall back to the default.
+        self._default_active_thread_id = get_current_thread_meta()[0] or 0  # type: int
+        self.active_thread_id = None  # type: Optional[int]
+
+        try:
+            self.start_ns = transaction._start_timestamp_monotonic_ns  # type: int
+        except AttributeError:
+            self.start_ns = 0
+
+        self.stop_ns = 0  # type: int
+        self.active = False  # type: bool
+
+        self.indexed_frames = {}  # type: Dict[FrameId, int]
+        self.indexed_stacks = {}  # type: Dict[StackId, int]
+        self.frames = []  # type: List[ProcessedFrame]
+        self.stacks = []  # type: List[ProcessedStack]
+        self.samples = []  # type: List[ProcessedSample]
+
+        self.unique_samples = 0
+
+        transaction._profile = self
+
+    def update_active_thread_id(self):
+        # type: () -> None
+        self.active_thread_id = get_current_thread_meta()[0]
+        logger.debug(
+            "[Profiling] updating active thread id to {tid}".format(
+                tid=self.active_thread_id
+            )
+        )
+
+    def _set_initial_sampling_decision(self, sampling_context):
+        # type: (SamplingContext) -> None
+        """
+        Sets the profile's sampling decision according to the following
+        precedence rules:
+
+        1. If the transaction to be profiled is not sampled, that decision
+        will be used, regardless of anything else.
+
+        2. Use `profiles_sample_rate` to decide.
+        """
+
+        # The corresponding transaction was not sampled,
+        # so don't generate a profile for it.
+        if not self.sampled:
+            logger.debug(
+                "[Profiling] Discarding profile because transaction is discarded."
+            )
+            self.sampled = False
+            return
+
+        # The profiler hasn't been properly initialized.
+        if self.scheduler is None:
+            logger.debug(
+                "[Profiling] Discarding profile because profiler was not started."
+            )
+            self.sampled = False
+            return
+
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+
+        # The client is None, so we can't get the sample rate.
+        if client is None:
+            self.sampled = False
+            return
+
+        options = client.options
+
+        if callable(options.get("profiles_sampler")):
+            sample_rate = options["profiles_sampler"](sampling_context)
+        elif options["profiles_sample_rate"] is not None:
+            sample_rate = options["profiles_sample_rate"]
+        else:
+            sample_rate = options["_experiments"].get("profiles_sample_rate")
+
+        # The profiles_sample_rate option was not set, so profiling
+        # was never enabled.
+        if sample_rate is None:
+            logger.debug(
+                "[Profiling] Discarding profile because profiling was not enabled."
+            )
+            self.sampled = False
+            return
+
+        if not is_valid_sample_rate(sample_rate, source="Profiling"):
+            logger.warning(
+                "[Profiling] Discarding profile because of invalid sample rate."
+            )
+            self.sampled = False
+            return
+
+        # Now we roll the dice. random.random is inclusive of 0, but not of 1,
+        # so strict < is safe here. In case sample_rate is a boolean, cast it
+        # to a float (True becomes 1.0 and False becomes 0.0)
+        self.sampled = random.random() < float(sample_rate)
+
+        if self.sampled:
+            logger.debug("[Profiling] Initializing profile")
+        else:
+            logger.debug(
+                "[Profiling] Discarding profile because it's not included in the random sample (sample rate = {sample_rate})".format(
+                    sample_rate=float(sample_rate)
+                )
+            )
+
+    def start(self):
+        # type: () -> None
+        if not self.sampled or self.active:
+            return
+
+        assert self.scheduler, "No scheduler specified"
+        logger.debug("[Profiling] Starting profile")
+        self.active = True
+        if not self.start_ns:
+            self.start_ns = nanosecond_time()
+        self.scheduler.start_profiling(self)
+
+    def stop(self):
+        # type: () -> None
+        if not self.sampled or not self.active:
+            return
+
+        assert self.scheduler, "No scheduler specified"
+        logger.debug("[Profiling] Stopping profile")
+        self.active = False
+        self.scheduler.stop_profiling(self)
+        self.stop_ns = nanosecond_time()
+
+    def __enter__(self):
+        # type: () -> Profile
+        hub = self.hub or sentry_sdk.Hub.current
+
+        _, scope = hub._stack[-1]
+        old_profile = scope.profile
+        scope.profile = self
+
+        self._context_manager_state = (hub, scope, old_profile)
+
+        self.start()
+
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.stop()
+
+        _, scope, old_profile = self._context_manager_state
+        del self._context_manager_state
+
+        scope.profile = old_profile
+
+    def write(self, ts, sample):
+        # type: (int, ExtractedSample) -> None
+        if not self.active:
+            return
+
+        if ts < self.start_ns:
+            return
+
+        offset = ts - self.start_ns
+        if offset > MAX_PROFILE_DURATION_NS:
+            self.stop()
+            return
+
+        self.unique_samples += 1
+
+        elapsed_since_start_ns = str(offset)
+
+        for tid, (stack_id, frame_ids, frames) in sample:
+            try:
+                # Check if the stack is indexed first, this lets us skip
+                # indexing frames if it's not necessary
+                if stack_id not in self.indexed_stacks:
+                    for i, frame_id in enumerate(frame_ids):
+                        if frame_id not in self.indexed_frames:
+                            self.indexed_frames[frame_id] = len(self.indexed_frames)
+                            self.frames.append(frames[i])
+
+                    self.indexed_stacks[stack_id] = len(self.indexed_stacks)
+                    self.stacks.append(
+                        [self.indexed_frames[frame_id] for frame_id in frame_ids]
+                    )
+
+                self.samples.append(
+                    {
+                        "elapsed_since_start_ns": elapsed_since_start_ns,
+                        "thread_id": tid,
+                        "stack_id": self.indexed_stacks[stack_id],
+                    }
+                )
+            except AttributeError:
+                # For some reason, the frame we get doesn't have certain attributes.
+                # When this happens, we abandon the current sample as it's bad.
+                capture_internal_exception(sys.exc_info())
+
+    def process(self):
+        # type: () -> ProcessedProfile
+
+        # This collects the thread metadata at the end of a profile. Doing it
+        # this way means that any threads that terminate before the profile ends
+        # will not have any metadata associated with it.
+        thread_metadata = {
+            str(thread.ident): {
+                "name": str(thread.name),
+            }
+            for thread in threading.enumerate()
+        }  # type: Dict[str, ProcessedThreadMetadata]
+
+        return {
+            "frames": self.frames,
+            "stacks": self.stacks,
+            "samples": self.samples,
+            "thread_metadata": thread_metadata,
+        }
+
+    def to_json(self, event_opt, options):
+        # type: (Event, Dict[str, Any]) -> Dict[str, Any]
+        profile = self.process()
+
+        set_in_app_in_frames(
+            profile["frames"],
+            options["in_app_exclude"],
+            options["in_app_include"],
+            options["project_root"],
+        )
+
+        return {
+            "environment": event_opt.get("environment"),
+            "event_id": self.event_id,
+            "platform": "python",
+            "profile": profile,
+            "release": event_opt.get("release", ""),
+            "timestamp": event_opt["start_timestamp"],
+            "version": "1",
+            "device": {
+                "architecture": platform.machine(),
+            },
+            "os": {
+                "name": platform.system(),
+                "version": platform.release(),
+            },
+            "runtime": {
+                "name": platform.python_implementation(),
+                "version": platform.python_version(),
+            },
+            "transactions": [
+                {
+                    "id": event_opt["event_id"],
+                    "name": event_opt["transaction"],
+                    # we start the transaction before the profile and this is
+                    # the transaction start time relative to the profile, so we
+                    # hardcode it to 0 until we can start the profile before
+                    "relative_start_ns": "0",
+                    # use the duration of the profile instead of the transaction
+                    # because we end the transaction after the profile
+                    "relative_end_ns": str(self.stop_ns - self.start_ns),
+                    "trace_id": event_opt["contexts"]["trace"]["trace_id"],
+                    "active_thread_id": str(
+                        self._default_active_thread_id
+                        if self.active_thread_id is None
+                        else self.active_thread_id
+                    ),
+                }
+            ],
+        }
+
+    def valid(self):
+        # type: () -> bool
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        if client is None:
+            return False
+
+        if not has_profiling_enabled(client.options):
+            return False
+
+        if self.sampled is None or not self.sampled:
+            if client.transport:
+                client.transport.record_lost_event(
+                    "sample_rate", data_category="profile"
+                )
+            return False
+
+        if self.unique_samples < PROFILE_MINIMUM_SAMPLES:
+            if client.transport:
+                client.transport.record_lost_event(
+                    "insufficient_data", data_category="profile"
+                )
+            logger.debug("[Profiling] Discarding profile because insufficient samples.")
+            return False
+
+        return True
+
+
+class Scheduler(object):
+    mode = "unknown"  # type: ProfilerMode
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        self.interval = 1.0 / frequency
+
+        self.sampler = self.make_sampler()
+
+        # cap the number of new profiles at any time so it does not grow infinitely
+        self.new_profiles = deque(maxlen=128)  # type: Deque[Profile]
+        self.active_profiles = set()  # type: Set[Profile]
+
+    def __enter__(self):
+        # type: () -> Scheduler
+        self.setup()
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        self.teardown()
+
+    def setup(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def teardown(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def ensure_running(self):
+        # type: () -> None
+        raise NotImplementedError
+
+    def start_profiling(self, profile):
+        # type: (Profile) -> None
+        self.ensure_running()
+        self.new_profiles.append(profile)
+
+    def stop_profiling(self, profile):
+        # type: (Profile) -> None
+        pass
+
+    def make_sampler(self):
+        # type: () -> Callable[..., None]
+        cwd = os.getcwd()
+
+        cache = LRUCache(max_size=256)
+
+        def _sample_stack(*args, **kwargs):
+            # type: (*Any, **Any) -> None
+            """
+            Take a sample of the stack on all the threads in the process.
+            This should be called at a regular interval to collect samples.
+            """
+            # no profiles taking place, so we can stop early
+            if not self.new_profiles and not self.active_profiles:
+                # make sure to clear the cache if we're not profiling so we dont
+                # keep a reference to the last stack of frames around
+                return
+
+            # This is the number of profiles we want to pop off.
+            # It's possible another thread adds a new profile to
+            # the list and we spend longer than we want inside
+            # the loop below.
+            #
+            # Also make sure to set this value before extracting
+            # frames so we do not write to any new profiles that
+            # were started after this point.
+            new_profiles = len(self.new_profiles)
+
+            now = nanosecond_time()
+
+            try:
+                sample = [
+                    (str(tid), extract_stack(frame, cache, cwd))
+                    for tid, frame in sys._current_frames().items()
+                ]
+            except AttributeError:
+                # For some reason, the frame we get doesn't have certain attributes.
+                # When this happens, we abandon the current sample as it's bad.
+                capture_internal_exception(sys.exc_info())
+                return
+
+            # Move the new profiles into the active_profiles set.
+            #
+            # We cannot directly add the to active_profiles set
+            # in `start_profiling` because it is called from other
+            # threads which can cause a RuntimeError when it the
+            # set sizes changes during iteration without a lock.
+            #
+            # We also want to avoid using a lock here so threads
+            # that are starting profiles are not blocked until it
+            # can acquire the lock.
+            for _ in range(new_profiles):
+                self.active_profiles.add(self.new_profiles.popleft())
+
+            inactive_profiles = []
+
+            for profile in self.active_profiles:
+                if profile.active:
+                    profile.write(now, sample)
+                else:
+                    # If a thread is marked inactive, we buffer it
+                    # to `inactive_profiles` so it can be removed.
+                    # We cannot remove it here as it would result
+                    # in a RuntimeError.
+                    inactive_profiles.append(profile)
+
+            for profile in inactive_profiles:
+                self.active_profiles.remove(profile)
+
+        return _sample_stack
+
+
+class ThreadScheduler(Scheduler):
+    """
+    This scheduler is based on running a daemon thread that will call
+    the sampler at a regular interval.
+    """
+
+    mode = "thread"  # type: ProfilerMode
+    name = "sentry.profiler.ThreadScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+        super(ThreadScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.running = False
+        self.thread = None  # type: Optional[threading.Thread]
+        self.pid = None  # type: Optional[int]
+        self.lock = threading.Lock()
+
+    def setup(self):
+        # type: () -> None
+        pass
+
+    def teardown(self):
+        # type: () -> None
+        if self.running:
+            self.running = False
+            if self.thread is not None:
+                self.thread.join()
+
+    def ensure_running(self):
+        # type: () -> None
+        """
+        Check that the profiler has an active thread to run in, and start one if
+        that's not the case.
+
+        Note that this might fail (e.g. in Python 3.12 it's not possible to
+        spawn new threads at interpreter shutdown). In that case self.running
+        will be False after running this function.
+        """
+        pid = os.getpid()
+
+        # is running on the right process
+        if self.running and self.pid == pid:
+            return
+
+        with self.lock:
+            # another thread may have tried to acquire the lock
+            # at the same time so it may start another thread
+            # make sure to check again before proceeding
+            if self.running and self.pid == pid:
+                return
+
+            self.pid = pid
+            self.running = True
+
+            # make sure the thread is a daemon here otherwise this
+            # can keep the application running after other threads
+            # have exited
+            self.thread = threading.Thread(name=self.name, target=self.run, daemon=True)
+            try:
+                self.thread.start()
+            except RuntimeError:
+                # Unfortunately at this point the interpreter is in a state that no
+                # longer allows us to spawn a thread and we have to bail.
+                self.running = False
+                self.thread = None
+                return
+
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
+
+        while self.running:
+            self.sampler()
+
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                thread_sleep(self.interval - elapsed)
+
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
+
+
+class GeventScheduler(Scheduler):
+    """
+    This scheduler is based on the thread scheduler but adapted to work with
+    gevent. When using gevent, it may monkey patch the threading modules
+    (`threading` and `_thread`). This results in the use of greenlets instead
+    of native threads.
+
+    This is an issue because the sampler CANNOT run in a greenlet because
+    1. Other greenlets doing sync work will prevent the sampler from running
+    2. The greenlet runs in the same thread as other greenlets so when taking
+       a sample, other greenlets will have been evicted from the thread. This
+       results in a sample containing only the sampler's code.
+    """
+
+    mode = "gevent"  # type: ProfilerMode
+    name = "sentry.profiler.GeventScheduler"
+
+    def __init__(self, frequency):
+        # type: (int) -> None
+
+        if ThreadPool is None:
+            raise ValueError("Profiler mode: {} is not available".format(self.mode))
+
+        super(GeventScheduler, self).__init__(frequency=frequency)
+
+        # used to signal to the thread that it should stop
+        self.running = False
+        self.thread = None  # type: Optional[ThreadPool]
+        self.pid = None  # type: Optional[int]
+
+        # This intentionally uses the gevent patched threading.Lock.
+        # The lock will be required when first trying to start profiles
+        # as we need to spawn the profiler thread from the greenlets.
+        self.lock = threading.Lock()
+
+    def setup(self):
+        # type: () -> None
+        pass
+
+    def teardown(self):
+        # type: () -> None
+        if self.running:
+            self.running = False
+            if self.thread is not None:
+                self.thread.join()
+
+    def ensure_running(self):
+        # type: () -> None
+        pid = os.getpid()
+
+        # is running on the right process
+        if self.running and self.pid == pid:
+            return
+
+        with self.lock:
+            # another thread may have tried to acquire the lock
+            # at the same time so it may start another thread
+            # make sure to check again before proceeding
+            if self.running and self.pid == pid:
+                return
+
+            self.pid = pid
+            self.running = True
+
+            self.thread = ThreadPool(1)
+            try:
+                self.thread.spawn(self.run)
+            except RuntimeError:
+                # Unfortunately at this point the interpreter is in a state that no
+                # longer allows us to spawn a thread and we have to bail.
+                self.running = False
+                self.thread = None
+                return
+
+    def run(self):
+        # type: () -> None
+        last = time.perf_counter()
+
+        while self.running:
+            self.sampler()
+
+            # some time may have elapsed since the last time
+            # we sampled, so we need to account for that and
+            # not sleep for too long
+            elapsed = time.perf_counter() - last
+            if elapsed < self.interval:
+                thread_sleep(self.interval - elapsed)
+
+            # after sleeping, make sure to take the current
+            # timestamp so we can use it next iteration
+            last = time.perf_counter()
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index bcfbf5c166..cd974e4a52 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -1,34 +1,65 @@
 from copy import copy
 from collections import deque
 from itertools import chain
+import os
+import sys
+import uuid
 
-from sentry_sdk._functools import wraps
-from sentry_sdk._types import MYPY
-from sentry_sdk.utils import logger, capture_internal_exceptions
-from sentry_sdk.tracing import Transaction
 from sentry_sdk.attachments import Attachment
+from sentry_sdk._compat import datetime_utcnow
+from sentry_sdk.consts import FALSE_VALUES, INSTRUMENTER
+from sentry_sdk._functools import wraps
+from sentry_sdk.profiler import Profile
+from sentry_sdk.session import Session
+from sentry_sdk.tracing_utils import (
+    Baggage,
+    extract_sentrytrace_data,
+    has_tracing_enabled,
+    normalize_incoming_data,
+)
+from sentry_sdk.tracing import (
+    BAGGAGE_HEADER_NAME,
+    SENTRY_TRACE_HEADER_NAME,
+    NoOpSpan,
+    Span,
+    Transaction,
+)
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.utils import (
+    event_from_exception,
+    exc_info_from_error,
+    logger,
+    capture_internal_exceptions,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import MutableMapping
 
-if MYPY:
     from typing import Any
-    from typing import Dict
-    from typing import Optional
+    from typing import Callable
     from typing import Deque
+    from typing import Dict
+    from typing import Generator
+    from typing import Iterator
     from typing import List
-    from typing import Callable
+    from typing import Optional
+    from typing import Tuple
     from typing import TypeVar
+    from typing import Union
 
     from sentry_sdk._types import (
         Breadcrumb,
+        BreadcrumbHint,
+        ErrorProcessor,
         Event,
         EventProcessor,
-        ErrorProcessor,
         ExcInfo,
         Hint,
+        LogLevelStr,
         Type,
     )
 
-    from sentry_sdk.tracing import Span
-    from sentry_sdk.session import Session
+    import sentry_sdk
 
     F = TypeVar("F", bound=Callable[..., Any])
     T = TypeVar("T")
@@ -63,6 +94,28 @@ def wrapper(self, *args, **kwargs):
     return wrapper  # type: ignore
 
 
+def _merge_scopes(base, scope_change, scope_kwargs):
+    # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
+    if scope_change and scope_kwargs:
+        raise TypeError("cannot provide scope and kwargs")
+
+    if scope_change is not None:
+        final_scope = copy(base)
+        if callable(scope_change):
+            scope_change(final_scope)
+        else:
+            final_scope.update_from_scope(scope_change)
+
+    elif scope_kwargs:
+        final_scope = copy(base)
+        final_scope.update_from_kwargs(**scope_kwargs)
+
+    else:
+        final_scope = base
+
+    return final_scope
+
+
 class Scope(object):
     """The scope holds extra information that should be sent with all
     events that belong to it.
@@ -81,6 +134,7 @@ class Scope(object):
         # note that for legacy reasons, _transaction is the transaction *name*,
         # not a Transaction object (the object is stored in _span)
         "_transaction",
+        "_transaction_info",
         "_user",
         "_tags",
         "_contexts",
@@ -93,6 +147,8 @@ class Scope(object):
         "_session",
         "_attachments",
         "_force_auto_session_tracking",
+        "_profile",
+        "_propagation_context",
     )
 
     def __init__(self):
@@ -101,19 +157,275 @@ def __init__(self):
         self._error_processors = []  # type: List[ErrorProcessor]
 
         self._name = None  # type: Optional[str]
+        self._propagation_context = None  # type: Optional[Dict[str, Any]]
+
         self.clear()
 
+        incoming_trace_information = self._load_trace_data_from_env()
+        self.generate_propagation_context(incoming_data=incoming_trace_information)
+
+    def _load_trace_data_from_env(self):
+        # type: () -> Optional[Dict[str, str]]
+        """
+        Load Sentry trace id and baggage from environment variables.
+        Can be disabled by setting SENTRY_USE_ENVIRONMENT to "false".
+        """
+        incoming_trace_information = None
+
+        sentry_use_environment = (
+            os.environ.get("SENTRY_USE_ENVIRONMENT") or ""
+        ).lower()
+        use_environment = sentry_use_environment not in FALSE_VALUES
+        if use_environment:
+            incoming_trace_information = {}
+
+            if os.environ.get("SENTRY_TRACE"):
+                incoming_trace_information[SENTRY_TRACE_HEADER_NAME] = (
+                    os.environ.get("SENTRY_TRACE") or ""
+                )
+
+            if os.environ.get("SENTRY_BAGGAGE"):
+                incoming_trace_information[BAGGAGE_HEADER_NAME] = (
+                    os.environ.get("SENTRY_BAGGAGE") or ""
+                )
+
+        return incoming_trace_information or None
+
+    def _extract_propagation_context(self, data):
+        # type: (Dict[str, Any]) -> Optional[Dict[str, Any]]
+        context = {}  # type: Dict[str, Any]
+        normalized_data = normalize_incoming_data(data)
+
+        baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME)
+        if baggage_header:
+            context["dynamic_sampling_context"] = Baggage.from_incoming_header(
+                baggage_header
+            ).dynamic_sampling_context()
+
+        sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME)
+        if sentry_trace_header:
+            sentrytrace_data = extract_sentrytrace_data(sentry_trace_header)
+            if sentrytrace_data is not None:
+                context.update(sentrytrace_data)
+
+        only_baggage_no_sentry_trace = (
+            "dynamic_sampling_context" in context and "trace_id" not in context
+        )
+        if only_baggage_no_sentry_trace:
+            context.update(self._create_new_propagation_context())
+
+        if context:
+            if not context.get("span_id"):
+                context["span_id"] = uuid.uuid4().hex[16:]
+
+            return context
+
+        return None
+
+    def _create_new_propagation_context(self):
+        # type: () -> Dict[str, Any]
+        return {
+            "trace_id": uuid.uuid4().hex,
+            "span_id": uuid.uuid4().hex[16:],
+            "parent_span_id": None,
+            "dynamic_sampling_context": None,
+        }
+
+    def set_new_propagation_context(self):
+        # type: () -> None
+        """
+        Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one.
+        """
+        self._propagation_context = self._create_new_propagation_context()
+        logger.debug(
+            "[Tracing] Create new propagation context: %s",
+            self._propagation_context,
+        )
+
+    def generate_propagation_context(self, incoming_data=None):
+        # type: (Optional[Dict[str, str]]) -> None
+        """
+        Makes sure `_propagation_context` is set.
+        If there is `incoming_data` overwrite existing `_propagation_context`.
+        if there is no `incoming_data` create new `_propagation_context`, but do NOT overwrite if already existing.
+        """
+        if incoming_data:
+            context = self._extract_propagation_context(incoming_data)
+
+            if context is not None:
+                self._propagation_context = context
+                logger.debug(
+                    "[Tracing] Extracted propagation context from incoming data: %s",
+                    self._propagation_context,
+                )
+
+        if self._propagation_context is None:
+            self.set_new_propagation_context()
+
+    def get_dynamic_sampling_context(self):
+        # type: () -> Optional[Dict[str, str]]
+        """
+        Returns the Dynamic Sampling Context from the Propagation Context.
+        If not existing, creates a new one.
+        """
+        if self._propagation_context is None:
+            return None
+
+        baggage = self.get_baggage()
+        if baggage is not None:
+            self._propagation_context["dynamic_sampling_context"] = (
+                baggage.dynamic_sampling_context()
+            )
+
+        return self._propagation_context["dynamic_sampling_context"]
+
+    def get_traceparent(self, *args, **kwargs):
+        # type: (Any, Any) -> Optional[str]
+        """
+        Returns the Sentry "sentry-trace" header (aka the traceparent) from the
+        currently active span or the scopes Propagation Context.
+        """
+        client = kwargs.pop("client", None)
+
+        # If we have an active span, return traceparent from there
+        if (
+            client is not None
+            and has_tracing_enabled(client.options)
+            and self.span is not None
+        ):
+            return self.span.to_traceparent()
+
+        if self._propagation_context is None:
+            return None
+
+        traceparent = "%s-%s" % (
+            self._propagation_context["trace_id"],
+            self._propagation_context["span_id"],
+        )
+        return traceparent
+
+    def get_baggage(self, *args, **kwargs):
+        # type: (Any, Any) -> Optional[Baggage]
+        client = kwargs.pop("client", None)
+
+        # If we have an active span, return baggage from there
+        if (
+            client is not None
+            and has_tracing_enabled(client.options)
+            and self.span is not None
+        ):
+            return self.span.to_baggage()
+
+        if self._propagation_context is None:
+            return None
+
+        dynamic_sampling_context = self._propagation_context.get(
+            "dynamic_sampling_context"
+        )
+        if dynamic_sampling_context is None:
+            return Baggage.from_options(self)
+        else:
+            return Baggage(dynamic_sampling_context)
+
+    def get_trace_context(self):
+        # type: () -> Any
+        """
+        Returns the Sentry "trace" context from the Propagation Context.
+        """
+        if self._propagation_context is None:
+            return None
+
+        trace_context = {
+            "trace_id": self._propagation_context["trace_id"],
+            "span_id": self._propagation_context["span_id"],
+            "parent_span_id": self._propagation_context["parent_span_id"],
+            "dynamic_sampling_context": self.get_dynamic_sampling_context(),
+        }  # type: Dict[str, Any]
+
+        return trace_context
+
+    def trace_propagation_meta(self, *args, **kwargs):
+        # type: (*Any, **Any) -> str
+        """
+        Return meta tags which should be injected into HTML templates
+        to allow propagation of trace information.
+        """
+        span = kwargs.pop("span", None)
+        if span is not None:
+            logger.warning(
+                "The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
+            )
+
+        client = kwargs.pop("client", None)
+
+        meta = ""
+
+        sentry_trace = self.get_traceparent(client=client)
+        if sentry_trace is not None:
+            meta += '' % (
+                SENTRY_TRACE_HEADER_NAME,
+                sentry_trace,
+            )
+
+        baggage = self.get_baggage(client=client)
+        if baggage is not None:
+            meta += '' % (
+                BAGGAGE_HEADER_NAME,
+                baggage.serialize(),
+            )
+
+        return meta
+
+    def iter_headers(self):
+        # type: () -> Iterator[Tuple[str, str]]
+        """
+        Creates a generator which returns the `sentry-trace` and `baggage` headers from the Propagation Context.
+        """
+        if self._propagation_context is not None:
+            traceparent = self.get_traceparent()
+            if traceparent is not None:
+                yield SENTRY_TRACE_HEADER_NAME, traceparent
+
+            dsc = self.get_dynamic_sampling_context()
+            if dsc is not None:
+                baggage = Baggage(dsc).serialize()
+                yield BAGGAGE_HEADER_NAME, baggage
+
+    def iter_trace_propagation_headers(self, *args, **kwargs):
+        # type: (Any, Any) -> Generator[Tuple[str, str], None, None]
+        """
+        Return HTTP headers which allow propagation of trace data. Data taken
+        from the span representing the request, if available, or the current
+        span on the scope if not.
+        """
+        span = kwargs.pop("span", None)
+        client = kwargs.pop("client", None)
+
+        propagate_traces = client and client.options["propagate_traces"]
+        if not propagate_traces:
+            return
+
+        span = span or self.span
+
+        if client and has_tracing_enabled(client.options) and span is not None:
+            for header in span.iter_headers():
+                yield header
+        else:
+            for header in self.iter_headers():
+                yield header
+
     def clear(self):
         # type: () -> None
         """Clears the entire scope."""
-        self._level = None  # type: Optional[str]
+        self._level = None  # type: Optional[LogLevelStr]
         self._fingerprint = None  # type: Optional[List[str]]
         self._transaction = None  # type: Optional[str]
+        self._transaction_info = {}  # type: MutableMapping[str, str]
         self._user = None  # type: Optional[Dict[str, Any]]
 
         self._tags = {}  # type: Dict[str, Any]
         self._contexts = {}  # type: Dict[str, Dict[str, Any]]
-        self._extras = {}  # type: Dict[str, Any]
+        self._extras = {}  # type: MutableMapping[str, Any]
         self._attachments = []  # type: List[Attachment]
 
         self.clear_breadcrumbs()
@@ -123,15 +435,34 @@ def clear(self):
         self._session = None  # type: Optional[Session]
         self._force_auto_session_tracking = None  # type: Optional[bool]
 
+        self._profile = None  # type: Optional[Profile]
+
+        self._propagation_context = None
+
     @_attr_setter
     def level(self, value):
-        # type: (Optional[str]) -> None
-        """When set this overrides the level. Deprecated in favor of set_level."""
+        # type: (LogLevelStr) -> None
+        """
+        When set this overrides the level.
+
+        .. deprecated:: 1.0.0
+            Use :func:`set_level` instead.
+
+        :param value: The level to set.
+        """
+        logger.warning(
+            "Deprecated: use .set_level() instead. This will be removed in the future."
+        )
+
         self._level = value
 
     def set_level(self, value):
-        # type: (Optional[str]) -> None
-        """Sets the level for the scope."""
+        # type: (LogLevelStr) -> None
+        """
+        Sets the level for the scope.
+
+        :param value: The level to set.
+        """
         self._level = value
 
     @_attr_setter
@@ -162,7 +493,10 @@ def transaction(self):
     def transaction(self, value):
         # type: (Any) -> None
         # would be type: (Optional[str]) -> None, see https://github.com/python/mypy/issues/3004
-        """When set this forces a specific transaction name to be set."""
+        """When set this forces a specific transaction name to be set.
+
+        Deprecated: use set_transaction_name instead."""
+
         # XXX: the docstring above is misleading. The implementation of
         # apply_to_event prefers an existing value of event.transaction over
         # anything set in the scope.
@@ -172,10 +506,27 @@ def transaction(self, value):
         # Without breaking version compatibility, we could make the setter set a
         # transaction name or transaction (self._span) depending on the type of
         # the value argument.
+
+        logger.warning(
+            "Assigning to scope.transaction directly is deprecated: use scope.set_transaction_name() instead."
+        )
         self._transaction = value
         if self._span and self._span.containing_transaction:
             self._span.containing_transaction.name = value
 
+    def set_transaction_name(self, name, source=None):
+        # type: (str, Optional[str]) -> None
+        """Set the transaction name and optionally the transaction source."""
+        self._transaction = name
+
+        if self._span and self._span.containing_transaction:
+            self._span.containing_transaction.name = name
+            if source:
+                self._span.containing_transaction.source = source
+
+        if source:
+            self._transaction_info["source"] = source
+
     @_attr_setter
     def user(self, value):
         # type: (Optional[Dict[str, Any]]) -> None
@@ -205,21 +556,38 @@ def span(self, span):
             transaction = span
             if transaction.name:
                 self._transaction = transaction.name
+                if transaction.source:
+                    self._transaction_info["source"] = transaction.source
 
-    def set_tag(
-        self,
-        key,  # type: str
-        value,  # type: Any
-    ):
-        # type: (...) -> None
-        """Sets a tag for a key to a specific value."""
+    @property
+    def profile(self):
+        # type: () -> Optional[Profile]
+        return self._profile
+
+    @profile.setter
+    def profile(self, profile):
+        # type: (Optional[Profile]) -> None
+
+        self._profile = profile
+
+    def set_tag(self, key, value):
+        # type: (str, Any) -> None
+        """
+        Sets a tag for a key to a specific value.
+
+        :param key: Key of the tag to set.
+
+        :param value: Value of the tag to set.
+        """
         self._tags[key] = value
 
-    def remove_tag(
-        self, key  # type: str
-    ):
-        # type: (...) -> None
-        """Removes a specific tag."""
+    def remove_tag(self, key):
+        # type: (str) -> None
+        """
+        Removes a specific tag.
+
+        :param key: Key of the tag to remove.
+        """
         self._tags.pop(key, None)
 
     def set_context(
@@ -228,7 +596,9 @@ def set_context(
         value,  # type: Dict[str, Any]
     ):
         # type: (...) -> None
-        """Binds a context at a certain key to a specific value."""
+        """
+        Binds a context at a certain key to a specific value.
+        """
         self._contexts[key] = value
 
     def remove_context(
@@ -279,6 +649,358 @@ def add_attachment(
             )
         )
 
+    def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
+        # type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
+        """
+        Adds a breadcrumb.
+
+        :param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
+
+        :param hint: An optional value that can be used by `before_breadcrumb`
+            to customize the breadcrumbs that are emitted.
+        """
+        client = kwargs.pop("client", None)
+        if client is None:
+            return
+
+        before_breadcrumb = client.options.get("before_breadcrumb")
+        max_breadcrumbs = client.options.get("max_breadcrumbs")
+
+        crumb = dict(crumb or ())  # type: Breadcrumb
+        crumb.update(kwargs)
+        if not crumb:
+            return
+
+        hint = dict(hint or ())  # type: Hint
+
+        if crumb.get("timestamp") is None:
+            crumb["timestamp"] = datetime_utcnow()
+        if crumb.get("type") is None:
+            crumb["type"] = "default"
+
+        if before_breadcrumb is not None:
+            new_crumb = before_breadcrumb(crumb, hint)
+        else:
+            new_crumb = crumb
+
+        if new_crumb is not None:
+            self._breadcrumbs.append(new_crumb)
+        else:
+            logger.info("before breadcrumb dropped breadcrumb (%s)", crumb)
+
+        while len(self._breadcrumbs) > max_breadcrumbs:
+            self._breadcrumbs.popleft()
+
+    def start_transaction(
+        self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs
+    ):
+        # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan]
+        """
+        Start and return a transaction.
+
+        Start an existing transaction if given, otherwise create and start a new
+        transaction with kwargs.
+
+        This is the entry point to manual tracing instrumentation.
+
+        A tree structure can be built by adding child spans to the transaction,
+        and child spans to other spans. To start a new child span within the
+        transaction or any span, call the respective `.start_child()` method.
+
+        Every child span must be finished before the transaction is finished,
+        otherwise the unfinished spans are discarded.
+
+        When used as context managers, spans and transactions are automatically
+        finished at the end of the `with` block. If not using context managers,
+        call the `.finish()` method.
+
+        When the transaction is finished, it will be sent to Sentry with all its
+        finished child spans.
+
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
+        """
+        hub = kwargs.pop("hub", None)
+        client = kwargs.pop("client", None)
+
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
+        custom_sampling_context = kwargs.pop("custom_sampling_context", {})
+
+        # if we haven't been given a transaction, make one
+        if transaction is None:
+            kwargs.setdefault("hub", hub)
+            transaction = Transaction(**kwargs)
+
+        # use traces_sample_rate, traces_sampler, and/or inheritance to make a
+        # sampling decision
+        sampling_context = {
+            "transaction_context": transaction.to_json(),
+            "parent_sampled": transaction.parent_sampled,
+        }
+        sampling_context.update(custom_sampling_context)
+        transaction._set_initial_sampling_decision(sampling_context=sampling_context)
+
+        profile = Profile(transaction, hub=hub)
+        profile._set_initial_sampling_decision(sampling_context=sampling_context)
+
+        # we don't bother to keep spans if we already know we're not going to
+        # send the transaction
+        if transaction.sampled:
+            max_spans = (
+                client and client.options["_experiments"].get("max_spans")
+            ) or 1000
+            transaction.init_span_recorder(maxlen=max_spans)
+
+        return transaction
+
+    def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (Optional[Span], str, Any) -> Span
+        """
+        Start a span whose parent is the currently active span or transaction, if any.
+
+        The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
+        typically used as a context manager to start and stop timing in a `with`
+        block.
+
+        Only spans contained in a transaction are sent to Sentry. Most
+        integrations start a transaction at the appropriate time, for example
+        for every incoming HTTP request. Use
+        :py:meth:`sentry_sdk.start_transaction` to start a new transaction when
+        one is not already in progress.
+
+        For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
+        """
+        client = kwargs.get("client", None)
+
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
+        # THIS BLOCK IS DEPRECATED
+        # TODO: consider removing this in a future release.
+        # This is for backwards compatibility with releases before
+        # start_transaction existed, to allow for a smoother transition.
+        if isinstance(span, Transaction) or "transaction" in kwargs:
+            deprecation_msg = (
+                "Deprecated: use start_transaction to start transactions and "
+                "Transaction.start_child to start spans."
+            )
+
+            if isinstance(span, Transaction):
+                logger.warning(deprecation_msg)
+                return self.start_transaction(span, **kwargs)
+
+            if "transaction" in kwargs:
+                logger.warning(deprecation_msg)
+                name = kwargs.pop("transaction")
+                return self.start_transaction(name=name, **kwargs)
+
+        # THIS BLOCK IS DEPRECATED
+        # We do not pass a span into start_span in our code base, so I deprecate this.
+        if span is not None:
+            deprecation_msg = "Deprecated: passing a span into `start_span` is deprecated and will be removed in the future."
+            logger.warning(deprecation_msg)
+            return span
+
+        kwargs.pop("client")
+
+        active_span = self.span
+        if active_span is not None:
+            new_child_span = active_span.start_child(**kwargs)
+            return new_child_span
+
+        # If there is already a trace_id in the propagation context, use it.
+        # This does not need to be done for `start_child` above because it takes
+        # the trace_id from the parent span.
+        if "trace_id" not in kwargs:
+            traceparent = self.get_traceparent()
+            trace_id = traceparent.split("-")[0] if traceparent else None
+            if trace_id is not None:
+                kwargs["trace_id"] = trace_id
+
+        return Span(**kwargs)
+
+    def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
+        # type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
+        """
+        Sets the propagation context from environment or headers and returns a transaction.
+        """
+        self.generate_propagation_context(environ_or_headers)
+
+        transaction = Transaction.continue_from_headers(
+            normalize_incoming_data(environ_or_headers),
+            op=op,
+            name=name,
+            source=source,
+        )
+
+        return transaction
+
+    def capture_event(self, event, hint=None, client=None, scope=None, **scope_kwargs):
+        # type: (Event, Optional[Hint], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures an event.
+
+        Merges given scope data and calls :py:meth:`sentry_sdk.Client.capture_event`.
+
+        :param event: A ready-made event that can be directly sent to Sentry.
+
+        :param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
+
+        :param client: The client to use for sending the event to Sentry.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+        """
+        if client is None:
+            return None
+
+        scope = _merge_scopes(self, scope, scope_kwargs)
+
+        return client.capture_event(event=event, hint=hint, scope=scope)
+
+    def capture_message(
+        self, message, level=None, client=None, scope=None, **scope_kwargs
+    ):
+        # type: (str, Optional[LogLevelStr], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+        """
+        Captures a message.
+
+        :param message: The string to send as the message.
+
+        :param level: If no level is provided, the default level is `info`.
+
+        :param client: The client to use for sending the event to Sentry.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+        """
+        if client is None:
+            return None
+
+        if level is None:
+            level = "info"
+
+        event = {
+            "message": message,
+            "level": level,
+        }  # type: Event
+
+        return self.capture_event(event, client=client, scope=scope, **scope_kwargs)
+
+    def capture_exception(self, error=None, client=None, scope=None, **scope_kwargs):
+        # type: (Optional[Union[BaseException, ExcInfo]], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+        """Captures an exception.
+
+        :param error: An exception to capture. If `None`, `sys.exc_info()` will be used.
+
+        :param client: The client to use for sending the event to Sentry.
+
+        :param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :param scope_kwargs: Optional data to apply to event.
+            For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
+            The `scope` and `scope_kwargs` parameters are mutually exclusive.
+
+        :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+        """
+        if client is None:
+            return None
+
+        if error is not None:
+            exc_info = exc_info_from_error(error)
+        else:
+            exc_info = sys.exc_info()
+
+        event, hint = event_from_exception(exc_info, client_options=client.options)
+
+        try:
+            return self.capture_event(
+                event, hint=hint, client=client, scope=scope, **scope_kwargs
+            )
+        except Exception:
+            self._capture_internal_exception(sys.exc_info())
+
+        return None
+
+    def _capture_internal_exception(
+        self, exc_info  # type: Any
+    ):
+        # type: (...) -> Any
+        """
+        Capture an exception that is likely caused by a bug in the SDK
+        itself.
+
+        These exceptions do not end up in Sentry and are just logged instead.
+        """
+        logger.error("Internal error in sentry_sdk", exc_info=exc_info)
+
+    def start_session(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        """Starts a new session."""
+        client = kwargs.pop("client", None)
+        session_mode = kwargs.pop("session_mode", "application")
+
+        self.end_session(client=client)
+
+        self._session = Session(
+            release=client.options["release"] if client else None,
+            environment=client.options["environment"] if client else None,
+            user=self._user,
+            session_mode=session_mode,
+        )
+
+    def end_session(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        """Ends the current session if there is one."""
+        client = kwargs.pop("client", None)
+
+        session = self._session
+        self._session = None
+
+        if session is not None:
+            session.close()
+            if client is not None:
+                client.capture_session(session)
+
+    def stop_auto_session_tracking(self, *args, **kwargs):
+        # type: (*Any, **Any) -> None
+        """Stops automatic session tracking.
+
+        This temporarily session tracking for the current scope when called.
+        To resume session tracking call `resume_auto_session_tracking`.
+        """
+        client = kwargs.pop("client", None)
+
+        self.end_session(client=client)
+
+        self._force_auto_session_tracking = False
+
+    def resume_auto_session_tracking(self):
+        # type: (...) -> None
+        """Resumes automatic session tracking for the current scope if
+        disabled earlier.  This requires that generally automatic session
+        tracking is enabled.
+        """
+        self._force_auto_session_tracking = None
+
     def add_event_processor(
         self, func  # type: EventProcessor
     ):
@@ -324,88 +1046,154 @@ def func(event, exc_info):
 
         self._error_processors.append(func)
 
-    @_disable_capture
-    def apply_to_event(
-        self,
-        event,  # type: Event
-        hint,  # type: Hint
-    ):
-        # type: (...) -> Optional[Event]
-        """Applies the information contained on the scope to the given event."""
-
-        def _drop(event, cause, ty):
-            # type: (Dict[str, Any], Any, str) -> Optional[Any]
-            logger.info("%s (%s) dropped event (%s)", ty, cause, event)
-            return None
-
-        is_transaction = event.get("type") == "transaction"
-
-        # put all attachments into the hint. This lets callbacks play around
-        # with attachments. We also later pull this out of the hint when we
-        # create the envelope.
-        attachments_to_send = hint.get("attachments") or []
-        for attachment in self._attachments:
-            if not is_transaction or attachment.add_to_transactions:
-                attachments_to_send.append(attachment)
-        hint["attachments"] = attachments_to_send
-
+    def _apply_level_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if self._level is not None:
             event["level"] = self._level
 
-        if not is_transaction:
-            event.setdefault("breadcrumbs", {}).setdefault("values", []).extend(
-                self._breadcrumbs
-            )
+    def _apply_breadcrumbs_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
+        event.setdefault("breadcrumbs", {}).setdefault("values", []).extend(
+            self._breadcrumbs
+        )
 
+    def _apply_user_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if event.get("user") is None and self._user is not None:
             event["user"] = self._user
 
+    def _apply_transaction_name_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if event.get("transaction") is None and self._transaction is not None:
             event["transaction"] = self._transaction
 
+    def _apply_transaction_info_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
+        if event.get("transaction_info") is None and self._transaction_info is not None:
+            event["transaction_info"] = self._transaction_info
+
+    def _apply_fingerprint_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if event.get("fingerprint") is None and self._fingerprint is not None:
             event["fingerprint"] = self._fingerprint
 
+    def _apply_extra_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if self._extras:
             event.setdefault("extra", {}).update(self._extras)
 
+    def _apply_tags_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if self._tags:
             event.setdefault("tags", {}).update(self._tags)
 
+    def _apply_contexts_to_event(self, event, hint, options):
+        # type: (Event, Hint, Optional[Dict[str, Any]]) -> None
         if self._contexts:
             event.setdefault("contexts", {}).update(self._contexts)
 
-        if self._span is not None:
-            contexts = event.setdefault("contexts", {})
-            if not contexts.get("trace"):
+        contexts = event.setdefault("contexts", {})
+
+        # Add "trace" context
+        if contexts.get("trace") is None:
+            if has_tracing_enabled(options) and self._span is not None:
                 contexts["trace"] = self._span.get_trace_context()
+            else:
+                contexts["trace"] = self.get_trace_context()
+
+        # Add "reply_id" context
+        try:
+            replay_id = contexts["trace"]["dynamic_sampling_context"]["replay_id"]  # type: ignore
+        except (KeyError, TypeError):
+            replay_id = None
+
+        if replay_id is not None:
+            contexts["replay"] = {
+                "replay_id": replay_id,
+            }
 
+    @_disable_capture
+    def apply_to_event(
+        self,
+        event,  # type: Event
+        hint,  # type: Hint
+        options=None,  # type: Optional[Dict[str, Any]]
+    ):
+        # type: (...) -> Optional[Event]
+        """Applies the information contained on the scope to the given event."""
+        ty = event.get("type")
+        is_transaction = ty == "transaction"
+        is_check_in = ty == "check_in"
+
+        # put all attachments into the hint. This lets callbacks play around
+        # with attachments. We also later pull this out of the hint when we
+        # create the envelope.
+        attachments_to_send = hint.get("attachments") or []
+        for attachment in self._attachments:
+            if not is_transaction or attachment.add_to_transactions:
+                attachments_to_send.append(attachment)
+        hint["attachments"] = attachments_to_send
+
+        self._apply_contexts_to_event(event, hint, options)
+
+        if is_check_in:
+            # Check-ins only support the trace context, strip all others
+            event["contexts"] = {
+                "trace": event.setdefault("contexts", {}).get("trace", {})
+            }
+
+        if not is_check_in:
+            self._apply_level_to_event(event, hint, options)
+            self._apply_fingerprint_to_event(event, hint, options)
+            self._apply_user_to_event(event, hint, options)
+            self._apply_transaction_name_to_event(event, hint, options)
+            self._apply_transaction_info_to_event(event, hint, options)
+            self._apply_tags_to_event(event, hint, options)
+            self._apply_extra_to_event(event, hint, options)
+
+        if not is_transaction and not is_check_in:
+            self._apply_breadcrumbs_to_event(event, hint, options)
+
+        def _drop(cause, ty):
+            # type: (Any, str) -> Optional[Any]
+            logger.info("%s (%s) dropped event", ty, cause)
+            return None
+
+        # run error processors
         exc_info = hint.get("exc_info")
         if exc_info is not None:
             for error_processor in self._error_processors:
                 new_event = error_processor(event, exc_info)
                 if new_event is None:
-                    return _drop(event, error_processor, "error processor")
+                    return _drop(error_processor, "error processor")
+
                 event = new_event
 
-        for event_processor in chain(global_event_processors, self._event_processors):
-            new_event = event
-            with capture_internal_exceptions():
-                new_event = event_processor(event, hint)
-            if new_event is None:
-                return _drop(event, event_processor, "event processor")
-            event = new_event
+        # run event processors
+        if not is_check_in:
+            for event_processor in chain(
+                global_event_processors, self._event_processors
+            ):
+                new_event = event
+                with capture_internal_exceptions():
+                    new_event = event_processor(event, hint)
+                if new_event is None:
+                    return _drop(event_processor, "event processor")
+                event = new_event
 
         return event
 
     def update_from_scope(self, scope):
         # type: (Scope) -> None
+        """Update the scope with another scope's data."""
         if scope._level is not None:
             self._level = scope._level
         if scope._fingerprint is not None:
             self._fingerprint = scope._fingerprint
         if scope._transaction is not None:
             self._transaction = scope._transaction
+        if scope._transaction_info is not None:
+            self._transaction_info.update(scope._transaction_info)
         if scope._user is not None:
             self._user = scope._user
         if scope._tags:
@@ -420,17 +1208,22 @@ def update_from_scope(self, scope):
             self._span = scope._span
         if scope._attachments:
             self._attachments.extend(scope._attachments)
+        if scope._profile:
+            self._profile = scope._profile
+        if scope._propagation_context:
+            self._propagation_context = scope._propagation_context
 
     def update_from_kwargs(
         self,
         user=None,  # type: Optional[Any]
-        level=None,  # type: Optional[str]
+        level=None,  # type: Optional[LogLevelStr]
         extras=None,  # type: Optional[Dict[str, Any]]
         contexts=None,  # type: Optional[Dict[str, Any]]
         tags=None,  # type: Optional[Dict[str, str]]
         fingerprint=None,  # type: Optional[List[str]]
     ):
         # type: (...) -> None
+        """Update the scope's attributes."""
         if level is not None:
             self._level = level
         if user is not None:
@@ -452,6 +1245,7 @@ def __copy__(self):
         rv._name = self._name
         rv._fingerprint = self._fingerprint
         rv._transaction = self._transaction
+        rv._transaction_info = dict(self._transaction_info)
         rv._user = self._user
 
         rv._tags = dict(self._tags)
@@ -461,6 +1255,7 @@ def __copy__(self):
         rv._breadcrumbs = copy(self._breadcrumbs)
         rv._event_processors = list(self._event_processors)
         rv._error_processors = list(self._error_processors)
+        rv._propagation_context = self._propagation_context
 
         rv._should_capture = self._should_capture
         rv._span = self._span
@@ -468,6 +1263,8 @@ def __copy__(self):
         rv._force_auto_session_tracking = self._force_auto_session_tracking
         rv._attachments = list(self._attachments)
 
+        rv._profile = self._profile
+
         return rv
 
     def __repr__(self):
diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py
new file mode 100644
index 0000000000..3f089ab8f6
--- /dev/null
+++ b/sentry_sdk/scrubber.py
@@ -0,0 +1,160 @@
+try:
+    from typing import cast
+except ImportError:
+    cast = lambda _, obj: obj
+
+from sentry_sdk.utils import (
+    capture_internal_exceptions,
+    AnnotatedValue,
+    iter_event_frames,
+)
+from sentry_sdk._compat import string_types
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from sentry_sdk._types import Event
+    from typing import List
+    from typing import Optional
+
+
+DEFAULT_DENYLIST = [
+    # stolen from relay
+    "password",
+    "passwd",
+    "secret",
+    "api_key",
+    "apikey",
+    "auth",
+    "credentials",
+    "mysql_pwd",
+    "privatekey",
+    "private_key",
+    "token",
+    "ip_address",
+    "session",
+    # django
+    "csrftoken",
+    "sessionid",
+    # wsgi
+    "remote_addr",
+    "x_csrftoken",
+    "x_forwarded_for",
+    "set_cookie",
+    "cookie",
+    "authorization",
+    "x_api_key",
+    "x_forwarded_for",
+    "x_real_ip",
+    # other common names used in the wild
+    "aiohttp_session",  # aiohttp
+    "connect.sid",  # Express
+    "csrf_token",  # Pyramid
+    "csrf",  # (this is a cookie name used in accepted answers on stack overflow)
+    "_csrf",  # Express
+    "_csrf_token",  # Bottle
+    "PHPSESSID",  # PHP
+    "_session",  # Sanic
+    "symfony",  # Symfony
+    "user_session",  # Vue
+    "_xsrf",  # Tornado
+    "XSRF-TOKEN",  # Angular, Laravel
+]
+
+
+class EventScrubber(object):
+    def __init__(self, denylist=None, recursive=False):
+        # type: (Optional[List[str]], bool) -> None
+        self.denylist = DEFAULT_DENYLIST if denylist is None else denylist
+        self.denylist = [x.lower() for x in self.denylist]
+        self.recursive = recursive
+
+    def scrub_list(self, lst):
+        # type: (object) -> None
+        """
+        If a list is passed to this method, the method recursively searches the list and any
+        nested lists for any dictionaries. The method calls scrub_dict on all dictionaries
+        it finds.
+        If the parameter passed to this method is not a list, the method does nothing.
+        """
+        if not isinstance(lst, list):
+            return
+
+        for v in lst:
+            self.scrub_dict(v)  # no-op unless v is a dict
+            self.scrub_list(v)  # no-op unless v is a list
+
+    def scrub_dict(self, d):
+        # type: (object) -> None
+        """
+        If a dictionary is passed to this method, the method scrubs the dictionary of any
+        sensitive data. The method calls itself recursively on any nested dictionaries (
+        including dictionaries nested in lists) if self.recursive is True.
+        This method does nothing if the parameter passed to it is not a dictionary.
+        """
+        if not isinstance(d, dict):
+            return
+
+        for k, v in d.items():
+            # The cast is needed because mypy is not smart enough to figure out that k must be a
+            # string after the isinstance check.
+            if isinstance(k, string_types) and cast(str, k).lower() in self.denylist:
+                d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
+            elif self.recursive:
+                self.scrub_dict(v)  # no-op unless v is a dict
+                self.scrub_list(v)  # no-op unless v is a list
+
+    def scrub_request(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "request" in event:
+                if "headers" in event["request"]:
+                    self.scrub_dict(event["request"]["headers"])
+                if "cookies" in event["request"]:
+                    self.scrub_dict(event["request"]["cookies"])
+                if "data" in event["request"]:
+                    self.scrub_dict(event["request"]["data"])
+
+    def scrub_extra(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "extra" in event:
+                self.scrub_dict(event["extra"])
+
+    def scrub_user(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "user" in event:
+                self.scrub_dict(event["user"])
+
+    def scrub_breadcrumbs(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "breadcrumbs" in event:
+                if "values" in event["breadcrumbs"]:
+                    for value in event["breadcrumbs"]["values"]:
+                        if "data" in value:
+                            self.scrub_dict(value["data"])
+
+    def scrub_frames(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            for frame in iter_event_frames(event):
+                if "vars" in frame:
+                    self.scrub_dict(frame["vars"])
+
+    def scrub_spans(self, event):
+        # type: (Event) -> None
+        with capture_internal_exceptions():
+            if "spans" in event:
+                for span in event["spans"]:
+                    if "data" in span:
+                        self.scrub_dict(span["data"])
+
+    def scrub_event(self, event):
+        # type: (Event) -> None
+        self.scrub_request(event)
+        self.scrub_extra(event)
+        self.scrub_user(event)
+        self.scrub_breadcrumbs(event)
+        self.scrub_frames(event)
+        self.scrub_spans(event)
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index e657f6b2b8..51496f57ce 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -8,20 +8,20 @@
     capture_internal_exception,
     disable_capture_event,
     format_timestamp,
-    json_dumps,
     safe_repr,
     strip_string,
 )
+from sentry_sdk._compat import (
+    text_type,
+    PY2,
+    string_types,
+    number_types,
+    iteritems,
+    binary_sequence_types,
+)
+from sentry_sdk._types import TYPE_CHECKING
 
-import sentry_sdk.utils
-
-from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
-
-from sentry_sdk._types import MYPY
-
-if MYPY:
-    from datetime import timedelta
-
+if TYPE_CHECKING:
     from types import TracebackType
 
     from typing import Any
@@ -30,7 +30,6 @@
     from typing import Dict
     from typing import List
     from typing import Optional
-    from typing import Tuple
     from typing import Type
     from typing import Union
 
@@ -47,7 +46,7 @@
     # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
     from collections import Mapping, Sequence, Set
 
-    serializable_str_types = string_types
+    serializable_str_types = string_types + binary_sequence_types
 
 else:
     # New in 3.3
@@ -55,7 +54,7 @@
     from collections.abc import Mapping, Sequence, Set
 
     # Bytes are technically not strings in Python 3, but we can serialize them
-    serializable_str_types = (str, bytes)
+    serializable_str_types = string_types + binary_sequence_types
 
 
 # Maximum length of JSON-serialized event payloads that can be safely sent
@@ -68,6 +67,8 @@
 # this value due to attached metadata, so keep the number conservative.
 MAX_EVENT_BYTES = 10**6
 
+# Maximum depth and breadth of databags. Excess data will be trimmed. If
+# max_request_body_size is "always", request bodies won't be trimmed.
 MAX_DATABAG_DEPTH = 5
 MAX_DATABAG_BREADTH = 10
 CYCLE_MARKER = ""
@@ -113,12 +114,16 @@ def __exit__(
         self._ids.pop(id(self._objs.pop()), None)
 
 
-def serialize(event, smart_transaction_trimming=False, **kwargs):
-    # type: (Event, bool, **Any) -> Event
+def serialize(event, **kwargs):
+    # type: (Event, **Any) -> Event
     memo = Memo()
     path = []  # type: List[Segment]
     meta_stack = []  # type: List[Dict[str, Any]]
-    span_description_bytes = []  # type: List[int]
+
+    keep_request_bodies = (
+        kwargs.pop("max_request_body_size", None) == "always"
+    )  # type: bool
+    max_value_length = kwargs.pop("max_value_length", None)  # type: Optional[int]
 
     def _annotate(**meta):
         # type: (**Any) -> None
@@ -184,10 +189,11 @@ def _is_databag():
             if rv in (True, None):
                 return rv
 
-            p0 = path[0]
-            if p0 == "request" and path[1] == "data":
-                return True
+            is_request_body = _is_request_body()
+            if is_request_body in (True, None):
+                return is_request_body
 
+            p0 = path[0]
             if p0 == "breadcrumbs" and path[1] == "values":
                 path[2]
                 return True
@@ -200,13 +206,24 @@ def _is_databag():
 
         return False
 
+    def _is_request_body():
+        # type: () -> Optional[bool]
+        try:
+            if path[0] == "request" and path[1] == "data":
+                return True
+        except IndexError:
+            return None
+
+        return False
+
     def _serialize_node(
         obj,  # type: Any
         is_databag=None,  # type: Optional[bool]
+        is_request_body=None,  # type: Optional[bool]
         should_repr_strings=None,  # type: Optional[bool]
         segment=None,  # type: Optional[Segment]
-        remaining_breadth=None,  # type: Optional[int]
-        remaining_depth=None,  # type: Optional[int]
+        remaining_breadth=None,  # type: Optional[Union[int, float]]
+        remaining_depth=None,  # type: Optional[Union[int, float]]
     ):
         # type: (...) -> Any
         if segment is not None:
@@ -220,6 +237,7 @@ def _serialize_node(
                 return _serialize_node_impl(
                     obj,
                     is_databag=is_databag,
+                    is_request_body=is_request_body,
                     should_repr_strings=should_repr_strings,
                     remaining_depth=remaining_depth,
                     remaining_breadth=remaining_breadth,
@@ -244,26 +262,43 @@ def _flatten_annotated(obj):
         return obj
 
     def _serialize_node_impl(
-        obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
+        obj,
+        is_databag,
+        is_request_body,
+        should_repr_strings,
+        remaining_depth,
+        remaining_breadth,
     ):
-        # type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
+        # type: (Any, Optional[bool], Optional[bool], Optional[bool], Optional[Union[float, int]], Optional[Union[float, int]]) -> Any
+        if isinstance(obj, AnnotatedValue):
+            should_repr_strings = False
         if should_repr_strings is None:
             should_repr_strings = _should_repr_strings()
 
         if is_databag is None:
             is_databag = _is_databag()
 
-        if is_databag and remaining_depth is None:
-            remaining_depth = MAX_DATABAG_DEPTH
-        if is_databag and remaining_breadth is None:
-            remaining_breadth = MAX_DATABAG_BREADTH
+        if is_request_body is None:
+            is_request_body = _is_request_body()
+
+        if is_databag:
+            if is_request_body and keep_request_bodies:
+                remaining_depth = float("inf")
+                remaining_breadth = float("inf")
+            else:
+                if remaining_depth is None:
+                    remaining_depth = MAX_DATABAG_DEPTH
+                if remaining_breadth is None:
+                    remaining_breadth = MAX_DATABAG_BREADTH
 
         obj = _flatten_annotated(obj)
 
         if remaining_depth is not None and remaining_depth <= 0:
             _annotate(rem=[["!limit", "x"]])
             if is_databag:
-                return _flatten_annotated(strip_string(safe_repr(obj)))
+                return _flatten_annotated(
+                    strip_string(safe_repr(obj), max_length=max_value_length)
+                )
             return None
 
         if is_databag and global_repr_processors:
@@ -312,9 +347,10 @@ def _serialize_node_impl(
                     segment=str_k,
                     should_repr_strings=should_repr_strings,
                     is_databag=is_databag,
-                    remaining_depth=remaining_depth - 1
-                    if remaining_depth is not None
-                    else None,
+                    is_request_body=is_request_body,
+                    remaining_depth=(
+                        remaining_depth - 1 if remaining_depth is not None else None
+                    ),
                     remaining_breadth=remaining_breadth,
                 )
                 rv_dict[str_k] = v
@@ -338,9 +374,10 @@ def _serialize_node_impl(
                         segment=i,
                         should_repr_strings=should_repr_strings,
                         is_databag=is_databag,
-                        remaining_depth=remaining_depth - 1
-                        if remaining_depth is not None
-                        else None,
+                        is_request_body=is_request_body,
+                        remaining_depth=(
+                            remaining_depth - 1 if remaining_depth is not None else None
+                        ),
                         remaining_breadth=remaining_breadth,
                     )
                 )
@@ -350,119 +387,29 @@ def _serialize_node_impl(
         if should_repr_strings:
             obj = safe_repr(obj)
         else:
-            if isinstance(obj, bytes):
+            if isinstance(obj, bytes) or isinstance(obj, bytearray):
                 obj = obj.decode("utf-8", "replace")
 
             if not isinstance(obj, string_types):
                 obj = safe_repr(obj)
 
-        # Allow span descriptions to be longer than other strings.
-        #
-        # For database auto-instrumented spans, the description contains
-        # potentially long SQL queries that are most useful when not truncated.
-        # Because arbitrarily large events may be discarded by the server as a
-        # protection mechanism, we dynamically limit the description length
-        # later in _truncate_span_descriptions.
-        if (
-            smart_transaction_trimming
-            and len(path) == 3
-            and path[0] == "spans"
-            and path[-1] == "description"
-        ):
-            span_description_bytes.append(len(obj))
+        is_span_description = (
+            len(path) == 3 and path[0] == "spans" and path[-1] == "description"
+        )
+        if is_span_description:
             return obj
-        return _flatten_annotated(strip_string(obj))
 
-    def _truncate_span_descriptions(serialized_event, event, excess_bytes):
-        # type: (Event, Event, int) -> None
-        """
-        Modifies serialized_event in-place trying to remove excess_bytes from
-        span descriptions. The original event is used read-only to access the
-        span timestamps (represented as RFC3399-formatted strings in
-        serialized_event).
-
-        It uses heuristics to prioritize preserving the description of spans
-        that might be the most interesting ones in terms of understanding and
-        optimizing performance.
-        """
-        # When truncating a description, preserve a small prefix.
-        min_length = 10
-
-        def shortest_duration_longest_description_first(args):
-            # type: (Tuple[int, Span]) -> Tuple[timedelta, int]
-            i, serialized_span = args
-            span = event["spans"][i]
-            now = datetime.utcnow()
-            start = span.get("start_timestamp") or now
-            end = span.get("timestamp") or now
-            duration = end - start
-            description = serialized_span.get("description") or ""
-            return (duration, -len(description))
-
-        # Note: for simplicity we sort spans by exact duration and description
-        # length. If ever needed, we could have a more involved heuristic, e.g.
-        # replacing exact durations with "buckets" and/or looking at other span
-        # properties.
-        path.append("spans")
-        for i, span in sorted(
-            enumerate(serialized_event.get("spans") or []),
-            key=shortest_duration_longest_description_first,
-        ):
-            description = span.get("description") or ""
-            if len(description) <= min_length:
-                continue
-            excess_bytes -= len(description) - min_length
-            path.extend([i, "description"])
-            # Note: the last time we call strip_string we could preserve a few
-            # more bytes up to a total length of MAX_EVENT_BYTES. Since that's
-            # not strictly required, we leave it out for now for simplicity.
-            span["description"] = _flatten_annotated(
-                strip_string(description, max_length=min_length)
-            )
-            del path[-2:]
-            del meta_stack[len(path) + 1 :]
-
-            if excess_bytes <= 0:
-                break
-        path.pop()
-        del meta_stack[len(path) + 1 :]
+        return _flatten_annotated(strip_string(obj, max_length=max_value_length))
 
+    #
+    # Start of serialize() function
+    #
     disable_capture_event.set(True)
     try:
-        rv = _serialize_node(event, **kwargs)
-        if meta_stack and isinstance(rv, dict):
-            rv["_meta"] = meta_stack[0]
-
-        sum_span_description_bytes = sum(span_description_bytes)
-        if smart_transaction_trimming and sum_span_description_bytes > 0:
-            span_count = len(event.get("spans") or [])
-            # This is an upper bound of how many bytes all descriptions would
-            # consume if the usual string truncation in _serialize_node_impl
-            # would have taken place, not accounting for the metadata attached
-            # as event["_meta"].
-            descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
-
-            # If by not truncating descriptions we ended up with more bytes than
-            # per the usual string truncation, check if the event is too large
-            # and we need to truncate some descriptions.
-            #
-            # This is guarded with an if statement to avoid JSON-encoding the
-            # event unnecessarily.
-            if sum_span_description_bytes > descriptions_budget_bytes:
-                original_bytes = len(json_dumps(rv))
-                excess_bytes = original_bytes - MAX_EVENT_BYTES
-                if excess_bytes > 0:
-                    # Event is too large, will likely be discarded by the
-                    # server. Trim it down before sending.
-                    _truncate_span_descriptions(rv, event, excess_bytes)
-
-                    # Span descriptions truncated, set or reset _meta.
-                    #
-                    # We run the same code earlier because we want to account
-                    # for _meta when calculating original_bytes, the number of
-                    # bytes in the JSON-encoded event.
-                    if meta_stack and isinstance(rv, dict):
-                        rv["_meta"] = meta_stack[0]
-        return rv
+        serialized_event = _serialize_node(event, **kwargs)
+        if meta_stack and isinstance(serialized_event, dict):
+            serialized_event["_meta"] = meta_stack[0]
+
+        return serialized_event
     finally:
         disable_capture_event.set(False)
diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py
index 98a8c72cbb..45e2236ec9 100644
--- a/sentry_sdk/session.py
+++ b/sentry_sdk/session.py
@@ -1,10 +1,11 @@
 import uuid
-from datetime import datetime
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import datetime_utcnow
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import format_timestamp
 
-if MYPY:
+if TYPE_CHECKING:
+    from datetime import datetime
     from typing import Optional
     from typing import Union
     from typing import Any
@@ -48,7 +49,7 @@ def __init__(
         if sid is None:
             sid = uuid.uuid4()
         if started is None:
-            started = datetime.utcnow()
+            started = datetime_utcnow()
         if status is None:
             status = "ok"
         self.status = status
@@ -108,7 +109,7 @@ def update(
         if did is not None:
             self.did = str(did)
         if timestamp is None:
-            timestamp = datetime.utcnow()
+            timestamp = datetime_utcnow()
         self.timestamp = timestamp
         if started is not None:
             self.started = started
diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py
index 4e4d21b89c..68255184b7 100644
--- a/sentry_sdk/sessions.py
+++ b/sentry_sdk/sessions.py
@@ -6,10 +6,10 @@
 import sentry_sdk
 from sentry_sdk.envelope import Envelope
 from sentry_sdk.session import Session
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 from sentry_sdk.utils import format_timestamp
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Callable
     from typing import Dict
@@ -93,7 +93,7 @@ def flush(self):
 
             envelope.add_session(session)
 
-        for (attrs, states) in pending_aggregates.items():
+        for attrs, states in pending_aggregates.items():
             if len(envelope.items) == MAX_ENVELOPE_ITEMS:
                 self.capture_func(envelope)
                 envelope = Envelope()
@@ -105,6 +105,13 @@ def flush(self):
 
     def _ensure_running(self):
         # type: (...) -> None
+        """
+        Check that we have an active thread to run in, or create one if not.
+
+        Note that this might fail (e.g. in Python 3.12 it's not possible to
+        spawn new threads at interpreter shutdown). In that case self._running
+        will be False after running this function.
+        """
         if self._thread_for_pid == os.getpid() and self._thread is not None:
             return None
         with self._thread_lock:
@@ -120,9 +127,17 @@ def _thread():
 
             thread = Thread(target=_thread)
             thread.daemon = True
-            thread.start()
+            try:
+                thread.start()
+            except RuntimeError:
+                # Unfortunately at this point the interpreter is in a state that no
+                # longer allows us to spawn a thread and we have to bail.
+                self._running = False
+                return None
+
             self._thread = thread
             self._thread_for_pid = os.getpid()
+
         return None
 
     def add_aggregate_session(
diff --git a/sentry_sdk/spotlight.py b/sentry_sdk/spotlight.py
new file mode 100644
index 0000000000..3d02ee74f0
--- /dev/null
+++ b/sentry_sdk/spotlight.py
@@ -0,0 +1,58 @@
+import io
+import urllib3
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Any
+    from typing import Dict
+    from typing import Optional
+
+from sentry_sdk.utils import logger
+from sentry_sdk.envelope import Envelope
+
+
+class SpotlightClient(object):
+    def __init__(self, url):
+        # type: (str) -> None
+        self.url = url
+        self.http = urllib3.PoolManager()
+        self.tries = 0
+
+    def capture_envelope(self, envelope):
+        # type: (Envelope) -> None
+        if self.tries > 3:
+            logger.warning(
+                "Too many errors sending to Spotlight, stop sending events there."
+            )
+            return
+        body = io.BytesIO()
+        envelope.serialize_into(body)
+        try:
+            req = self.http.request(
+                url=self.url,
+                body=body.getvalue(),
+                method="POST",
+                headers={
+                    "Content-Type": "application/x-sentry-envelope",
+                },
+            )
+            req.close()
+        except Exception as e:
+            self.tries += 1
+            logger.warning(str(e))
+
+
+def setup_spotlight(options):
+    # type: (Dict[str, Any]) -> Optional[SpotlightClient]
+
+    url = options.get("spotlight")
+
+    if isinstance(url, str):
+        pass
+    elif url is True:
+        url = "http://localhost:8969/stream"
+    else:
+        return None
+
+    return SpotlightClient(url)
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index f6f625acc8..7afe7e0944 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,26 +1,72 @@
 import uuid
 import random
-import time
 
 from datetime import datetime, timedelta
 
 import sentry_sdk
-
-from sentry_sdk.utils import logger
-from sentry_sdk._types import MYPY
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.utils import (
+    get_current_thread_meta,
+    is_valid_sample_rate,
+    logger,
+    nanosecond_time,
+)
+from sentry_sdk._compat import datetime_utcnow, utc_from_timestamp, PY2
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk._types import TYPE_CHECKING
 
 
-if MYPY:
+if TYPE_CHECKING:
     import typing
 
-    from typing import Optional
+    from collections.abc import Callable, MutableMapping
     from typing import Any
     from typing import Dict
+    from typing import Iterator
     from typing import List
+    from typing import Optional
+    from typing import overload
+    from typing import ParamSpec
     from typing import Tuple
-    from typing import Iterator
+    from typing import Union
+    from typing import TypeVar
+
+    P = ParamSpec("P")
+    R = TypeVar("R")
+
+    import sentry_sdk.profiler
+    from sentry_sdk._types import Event, MeasurementUnit, SamplingContext
+
+
+BAGGAGE_HEADER_NAME = "baggage"
+SENTRY_TRACE_HEADER_NAME = "sentry-trace"
+
 
-    from sentry_sdk._types import SamplingContext, MeasurementUnit
+# Transaction source
+# see https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
+TRANSACTION_SOURCE_CUSTOM = "custom"
+TRANSACTION_SOURCE_URL = "url"
+TRANSACTION_SOURCE_ROUTE = "route"
+TRANSACTION_SOURCE_VIEW = "view"
+TRANSACTION_SOURCE_COMPONENT = "component"
+TRANSACTION_SOURCE_TASK = "task"
+
+# These are typically high cardinality and the server hates them
+LOW_QUALITY_TRANSACTION_SOURCES = [
+    TRANSACTION_SOURCE_URL,
+]
+
+SOURCE_FOR_STYLE = {
+    "endpoint": TRANSACTION_SOURCE_COMPONENT,
+    "function_name": TRANSACTION_SOURCE_COMPONENT,
+    "handler_name": TRANSACTION_SOURCE_COMPONENT,
+    "method_and_path_pattern": TRANSACTION_SOURCE_ROUTE,
+    "path": TRANSACTION_SOURCE_URL,
+    "route_name": TRANSACTION_SOURCE_COMPONENT,
+    "route_pattern": TRANSACTION_SOURCE_ROUTE,
+    "uri_template": TRANSACTION_SOURCE_ROUTE,
+    "url": TRANSACTION_SOURCE_ROUTE,
+}
 
 
 class _SpanRecorder(object):
@@ -47,6 +93,9 @@ def add(self, span):
 
 
 class Span(object):
+    """A span holds timing information of a block of code.
+    Spans can have multiple child spans thus forming a span tree."""
+
     __slots__ = (
         "trace_id",
         "span_id",
@@ -56,7 +105,7 @@ class Span(object):
         "op",
         "description",
         "start_timestamp",
-        "_start_timestamp_monotonic",
+        "_start_timestamp_monotonic_ns",
         "status",
         "timestamp",
         "_tags",
@@ -65,6 +114,7 @@ class Span(object):
         "hub",
         "_context_manager_state",
         "_containing_transaction",
+        "_local_aggregator",
     )
 
     def __new__(cls, **kwargs):
@@ -94,6 +144,7 @@ def __init__(
         status=None,  # type: Optional[str]
         transaction=None,  # type: Optional[str] # deprecated
         containing_transaction=None,  # type: Optional[Transaction]
+        start_timestamp=None,  # type: Optional[Union[datetime, float]]
     ):
         # type: (...) -> None
         self.trace_id = trace_id or uuid.uuid4().hex
@@ -105,16 +156,18 @@ def __init__(
         self.description = description
         self.status = status
         self.hub = hub
-        self._tags = {}  # type: Dict[str, str]
+        self._tags = {}  # type: MutableMapping[str, str]
         self._data = {}  # type: Dict[str, Any]
         self._containing_transaction = containing_transaction
-        self.start_timestamp = datetime.utcnow()
+        if start_timestamp is None:
+            start_timestamp = datetime_utcnow()
+        elif isinstance(start_timestamp, float):
+            start_timestamp = utc_from_timestamp(start_timestamp)
+        self.start_timestamp = start_timestamp
         try:
-            # TODO: For Python 3.7+, we could use a clock with ns resolution:
-            # self._start_timestamp_monotonic = time.perf_counter_ns()
-
-            # Python 3.3+
-            self._start_timestamp_monotonic = time.perf_counter()
+            # profiling depends on this value and requires that
+            # it is measured in nanoseconds
+            self._start_timestamp_monotonic_ns = nanosecond_time()
         except AttributeError:
             pass
 
@@ -122,6 +175,10 @@ def __init__(
         self.timestamp = None  # type: Optional[datetime]
 
         self._span_recorder = None  # type: Optional[_SpanRecorder]
+        self._local_aggregator = None  # type: Optional[LocalAggregator]
+
+        thread_id, thread_name = get_current_thread_meta()
+        self.set_thread(thread_id, thread_name)
 
     # TODO this should really live on the Transaction class rather than the Span
     # class
@@ -130,6 +187,13 @@ def init_span_recorder(self, maxlen):
         if self._span_recorder is None:
             self._span_recorder = _SpanRecorder(maxlen)
 
+    def _get_local_aggregator(self):
+        # type: (...) -> LocalAggregator
+        rv = self._local_aggregator
+        if rv is None:
+            rv = self._local_aggregator = LocalAggregator()
+        return rv
+
     def __repr__(self):
         # type: () -> str
         return (
@@ -169,14 +233,17 @@ def __exit__(self, ty, value, tb):
     @property
     def containing_transaction(self):
         # type: () -> Optional[Transaction]
+        """The ``Transaction`` that this span belongs to.
+        The ``Transaction`` is the root of the span tree,
+        so one could also think of this ``Transaction`` as the "root span"."""
 
         # this is a getter rather than a regular attribute so that transactions
         # can return `self` here instead (as a way to prevent them circularly
         # referencing themselves)
         return self._containing_transaction
 
-    def start_child(self, **kwargs):
-        # type: (**Any) -> Span
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> Span
         """
         Start a sub-span from the current span or transaction.
 
@@ -184,6 +251,13 @@ def start_child(self, **kwargs):
         trace id, sampling decision, transaction pointer, and span recorder are
         inherited from the current span/transaction.
         """
+        hub = self.hub or sentry_sdk.Hub.current
+        client = hub.client
+        configuration_instrumenter = client and client.options["instrumenter"]
+
+        if instrumenter != configuration_instrumenter:
+            return NoOpSpan()
+
         kwargs.setdefault("sampled", self.sampled)
 
         child = Span(
@@ -198,12 +272,15 @@ def start_child(self, **kwargs):
         )
         if span_recorder:
             span_recorder.add(child)
+
         return child
 
     def new_span(self, **kwargs):
         # type: (**Any) -> Span
-        """Deprecated: use start_child instead."""
-        logger.warning("Deprecated: use Span.start_child instead of Span.new_span.")
+        """DEPRECATED: use :py:meth:`sentry_sdk.tracing.Span.start_child` instead."""
+        logger.warning(
+            "Deprecated: use Span.start_child instead of Span.new_span. This will be removed in the future."
+        )
         return self.start_child(**kwargs)
 
     @classmethod
@@ -215,12 +292,15 @@ def continue_from_environ(
         # type: (...) -> Transaction
         """
         Create a Transaction with the given params, then add in data pulled from
-        the 'sentry-trace' and 'tracestate' headers from the environ (if any)
+        the ``sentry-trace`` and ``baggage`` headers from the environ (if any)
         before returning the Transaction.
 
-        This is different from `continue_from_headers` in that it assumes header
-        names in the form "HTTP_HEADER_NAME" - such as you would get from a wsgi
-        environ - rather than the form "header-name".
+        This is different from :py:meth:`~sentry_sdk.tracing.Span.continue_from_headers`
+        in that it assumes header names in the form ``HTTP_HEADER_NAME`` -
+        such as you would get from a WSGI/ASGI environ -
+        rather than the form ``header-name``.
+
+        :param environ: The ASGI/WSGI environ to pull information from.
         """
         if cls is Span:
             logger.warning(
@@ -238,7 +318,9 @@ def continue_from_headers(
         # type: (...) -> Transaction
         """
         Create a transaction with the given params (including any data pulled from
-        the 'sentry-trace' and 'tracestate' headers).
+        the ``sentry-trace`` and ``baggage`` headers).
+
+        :param headers: The dictionary with the HTTP headers to pull information from.
         """
         # TODO move this to the Transaction class
         if cls is Span:
@@ -247,8 +329,22 @@ def continue_from_headers(
                 "instead of Span.continue_from_headers."
             )
 
-        kwargs.update(extract_sentrytrace_data(headers.get("sentry-trace")))
-        kwargs.update(extract_tracestate_data(headers.get("tracestate")))
+        # TODO-neel move away from this kwargs stuff, it's confusing and opaque
+        # make more explicit
+        baggage = Baggage.from_incoming_header(headers.get(BAGGAGE_HEADER_NAME))
+        kwargs.update({BAGGAGE_HEADER_NAME: baggage})
+
+        sentrytrace_kwargs = extract_sentrytrace_data(
+            headers.get(SENTRY_TRACE_HEADER_NAME)
+        )
+
+        if sentrytrace_kwargs is not None:
+            kwargs.update(sentrytrace_kwargs)
+
+            # If there's an incoming sentry-trace but no incoming baggage header,
+            # for instance in traces coming from older SDKs,
+            # baggage will be empty and immutable and won't be populated as head SDK.
+            baggage.freeze()
 
         transaction = Transaction(**kwargs)
         transaction.same_process_as_parent = False
@@ -258,21 +354,16 @@ def continue_from_headers(
     def iter_headers(self):
         # type: () -> Iterator[Tuple[str, str]]
         """
-        Creates a generator which returns the span's `sentry-trace` and
-        `tracestate` headers.
-
-        If the span's containing transaction doesn't yet have a
-        `sentry_tracestate` value, this will cause one to be generated and
-        stored.
+        Creates a generator which returns the span's ``sentry-trace`` and ``baggage`` headers.
+        If the span's containing transaction doesn't yet have a ``baggage`` value,
+        this will cause one to be generated and stored.
         """
-        yield "sentry-trace", self.to_traceparent()
+        yield SENTRY_TRACE_HEADER_NAME, self.to_traceparent()
 
-        tracestate = self.to_tracestate() if has_tracestate_enabled(self) else None
-        # `tracestate` will only be `None` if there's no client or no DSN
-        # TODO (kmclb) the above will be true once the feature is no longer
-        # behind a flag
-        if tracestate:
-            yield "tracestate", tracestate
+        if self.containing_transaction:
+            baggage = self.containing_transaction.get_baggage().serialize()
+            if baggage:
+                yield BAGGAGE_HEADER_NAME, baggage
 
     @classmethod
     def from_traceparent(
@@ -282,11 +373,10 @@ def from_traceparent(
     ):
         # type: (...) -> Optional[Transaction]
         """
-        DEPRECATED: Use Transaction.continue_from_headers(headers, **kwargs)
-
-        Create a Transaction with the given params, then add in data pulled from
-        the given 'sentry-trace' header value before returning the Transaction.
+        DEPRECATED: Use :py:meth:`sentry_sdk.tracing.Span.continue_from_headers`.
 
+        Create a ``Transaction`` with the given params, then add in data pulled from
+        the given ``sentry-trace`` header value before returning the ``Transaction``.
         """
         logger.warning(
             "Deprecated: Use Transaction.continue_from_headers(headers, **kwargs) "
@@ -296,67 +386,33 @@ def from_traceparent(
         if not traceparent:
             return None
 
-        return cls.continue_from_headers({"sentry-trace": traceparent}, **kwargs)
+        return cls.continue_from_headers(
+            {SENTRY_TRACE_HEADER_NAME: traceparent}, **kwargs
+        )
 
     def to_traceparent(self):
         # type: () -> str
-        sampled = ""
         if self.sampled is True:
             sampled = "1"
-        if self.sampled is False:
+        elif self.sampled is False:
             sampled = "0"
-        return "%s-%s-%s" % (self.trace_id, self.span_id, sampled)
-
-    def to_tracestate(self):
-        # type: () -> Optional[str]
-        """
-        Computes the `tracestate` header value using data from the containing
-        transaction.
-
-        If the containing transaction doesn't yet have a `sentry_tracestate`
-        value, this will cause one to be generated and stored.
-
-        If there is no containing transaction, a value will be generated but not
-        stored.
-
-        Returns None if there's no client and/or no DSN.
-        """
-
-        sentry_tracestate = self.get_or_set_sentry_tracestate()
-        third_party_tracestate = (
-            self.containing_transaction._third_party_tracestate
-            if self.containing_transaction
-            else None
-        )
-
-        if not sentry_tracestate:
-            return None
-
-        header_value = sentry_tracestate
-
-        if third_party_tracestate:
-            header_value = header_value + "," + third_party_tracestate
+        else:
+            sampled = None
 
-        return header_value
+        traceparent = "%s-%s" % (self.trace_id, self.span_id)
+        if sampled is not None:
+            traceparent += "-%s" % (sampled,)
 
-    def get_or_set_sentry_tracestate(self):
-        # type: (Span) -> Optional[str]
-        """
-        Read sentry tracestate off of the span's containing transaction.
+        return traceparent
 
-        If the transaction doesn't yet have a `_sentry_tracestate` value,
-        compute one and store it.
+    def to_baggage(self):
+        # type: () -> Optional[Baggage]
+        """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage`
+        associated with this ``Span``, if any. (Taken from the root of the span tree.)
         """
-        transaction = self.containing_transaction
-
-        if transaction:
-            if not transaction._sentry_tracestate:
-                transaction._sentry_tracestate = compute_tracestate_entry(self)
-
-            return transaction._sentry_tracestate
-
-        # orphan span - nowhere to store the value, so just return it
-        return compute_tracestate_entry(self)
+        if self.containing_transaction:
+            return self.containing_transaction.get_baggage()
+        return None
 
     def set_tag(self, key, value):
         # type: (str, Any) -> None
@@ -370,9 +426,21 @@ def set_status(self, value):
         # type: (str) -> None
         self.status = value
 
+    def set_thread(self, thread_id, thread_name):
+        # type: (Optional[int], Optional[str]) -> None
+
+        if thread_id is not None:
+            self.set_data(SPANDATA.THREAD_ID, str(thread_id))
+
+            if thread_name is not None:
+                self.set_data(SPANDATA.THREAD_NAME, thread_name)
+
     def set_http_status(self, http_status):
         # type: (int) -> None
-        self.set_tag("http.status_code", str(http_status))
+        self.set_tag(
+            "http.status_code", str(http_status)
+        )  # we keep this for backwards compatability
+        self.set_data(SPANDATA.HTTP_STATUS_CODE, http_status)
 
         if http_status < 400:
             self.set_status("ok")
@@ -407,10 +475,23 @@ def is_success(self):
         # type: () -> bool
         return self.status == "ok"
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
-        # XXX: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
+        # Note: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
         # to incompatible return types for Span.finish and Transaction.finish.
+        """Sets the end timestamp of the span.
+        Additionally it also creates a breadcrumb from the span,
+        if the span represents a database or HTTP request.
+
+        :param hub: The hub to use for this transaction.
+            If not provided, the current hub will be used.
+        :param end_timestamp: Optional timestamp that should
+            be used as timestamp instead of the current time.
+
+        :return: Always ``None``. The type is ``Optional[str]`` to match
+            the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`.
+        """
+
         if self.timestamp is not None:
             # This span is already finished, ignore.
             return None
@@ -418,16 +499,26 @@ def finish(self, hub=None):
         hub = hub or self.hub or sentry_sdk.Hub.current
 
         try:
-            duration_seconds = time.perf_counter() - self._start_timestamp_monotonic
-            self.timestamp = self.start_timestamp + timedelta(seconds=duration_seconds)
+            if end_timestamp:
+                if isinstance(end_timestamp, float):
+                    end_timestamp = utc_from_timestamp(end_timestamp)
+                self.timestamp = end_timestamp
+            else:
+                elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns
+                self.timestamp = self.start_timestamp + timedelta(
+                    microseconds=elapsed / 1000
+                )
         except AttributeError:
-            self.timestamp = datetime.utcnow()
+            self.timestamp = datetime_utcnow()
 
         maybe_create_breadcrumbs_from_span(hub, self)
+
         return None
 
     def to_json(self):
         # type: () -> Dict[str, Any]
+        """Returns a JSON-compatible representation of the span."""
+
         rv = {
             "trace_id": self.trace_id,
             "span_id": self.span_id,
@@ -442,6 +533,11 @@ def to_json(self):
         if self.status:
             self._tags["status"] = self.status
 
+        if self._local_aggregator is not None:
+            metrics_summary = self._local_aggregator.to_json()
+            if metrics_summary:
+                rv["_metrics_summary"] = metrics_summary
+
         tags = self._tags
         if tags:
             rv["tags"] = tags
@@ -460,45 +556,56 @@ def get_trace_context(self):
             "parent_span_id": self.parent_span_id,
             "op": self.op,
             "description": self.description,
-        }
+        }  # type: Dict[str, Any]
         if self.status:
             rv["status"] = self.status
 
-        # if the transaction didn't inherit a tracestate value, and no outgoing
-        # requests - whose need for headers would have caused a tracestate value
-        # to be created - were made as part of the transaction, the transaction
-        # still won't have a tracestate value, so compute one now
-        sentry_tracestate = self.get_or_set_sentry_tracestate()
-
-        if sentry_tracestate:
-            rv["tracestate"] = sentry_tracestate
+        if self.containing_transaction:
+            rv["dynamic_sampling_context"] = (
+                self.containing_transaction.get_baggage().dynamic_sampling_context()
+            )
 
         return rv
 
 
 class Transaction(Span):
+    """The Transaction is the root element that holds all the spans
+    for Sentry performance instrumentation."""
+
     __slots__ = (
         "name",
+        "source",
         "parent_sampled",
-        # the sentry portion of the `tracestate` header used to transmit
-        # correlation context for server-side dynamic sampling, of the form
-        # `sentry=xxxxx`, where `xxxxx` is the base64-encoded json of the
-        # correlation context data, missing trailing any =
-        "_sentry_tracestate",
-        # tracestate data from other vendors, of the form `dogs=yes,cats=maybe`
-        "_third_party_tracestate",
+        # used to create baggage value for head SDKs in dynamic sampling
+        "sample_rate",
         "_measurements",
+        "_contexts",
+        "_profile",
+        "_baggage",
     )
 
     def __init__(
         self,
         name="",  # type: str
         parent_sampled=None,  # type: Optional[bool]
-        sentry_tracestate=None,  # type: Optional[str]
-        third_party_tracestate=None,  # type: Optional[str]
+        baggage=None,  # type: Optional[Baggage]
+        source=TRANSACTION_SOURCE_CUSTOM,  # type: str
         **kwargs  # type: Any
     ):
         # type: (...) -> None
+        """Constructs a new Transaction.
+
+        :param name: Identifier of the transaction.
+            Will show up in the Sentry UI.
+        :param parent_sampled: Whether the parent transaction was sampled.
+            If True this transaction will be kept, if False it will be discarded.
+        :param baggage: The W3C baggage header value.
+            (see https://www.w3.org/TR/baggage/)
+        :param source: A string describing the source of the transaction name.
+            This will be used to determine the transaction's type.
+            See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
+            for more information. Default "custom".
+        """
         # TODO: consider removing this in a future release.
         # This is for backwards compatibility with releases before Transaction
         # existed, to allow for a smoother transition.
@@ -508,20 +615,22 @@ def __init__(
                 "instead of Span(transaction=...)."
             )
             name = kwargs.pop("transaction")
-        Span.__init__(self, **kwargs)
+
+        super(Transaction, self).__init__(**kwargs)
+
         self.name = name
+        self.source = source
+        self.sample_rate = None  # type: Optional[float]
         self.parent_sampled = parent_sampled
-        # if tracestate isn't inherited and set here, it will get set lazily,
-        # either the first time an outgoing request needs it for a header or the
-        # first time an event needs it for inclusion in the captured data
-        self._sentry_tracestate = sentry_tracestate
-        self._third_party_tracestate = third_party_tracestate
         self._measurements = {}  # type: Dict[str, Any]
+        self._contexts = {}  # type: Dict[str, Any]
+        self._profile = None  # type: Optional[sentry_sdk.profiler.Profile]
+        self._baggage = baggage
 
     def __repr__(self):
         # type: () -> str
         return (
-            "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r)>"
+            "<%s(name=%r, op=%r, trace_id=%r, span_id=%r, parent_span_id=%r, sampled=%r, source=%r)>"
             % (
                 self.__class__.__name__,
                 self.name,
@@ -530,20 +639,51 @@ def __repr__(self):
                 self.span_id,
                 self.parent_span_id,
                 self.sampled,
+                self.source,
             )
         )
 
+    def __enter__(self):
+        # type: () -> Transaction
+        super(Transaction, self).__enter__()
+
+        if self._profile is not None:
+            self._profile.__enter__()
+
+        return self
+
+    def __exit__(self, ty, value, tb):
+        # type: (Optional[Any], Optional[Any], Optional[Any]) -> None
+        if self._profile is not None:
+            self._profile.__exit__(ty, value, tb)
+
+        super(Transaction, self).__exit__(ty, value, tb)
+
     @property
     def containing_transaction(self):
         # type: () -> Transaction
+        """The root element of the span tree.
+        In the case of a transaction it is the transaction itself.
+        """
 
         # Transactions (as spans) belong to themselves (as transactions). This
         # is a getter rather than a regular attribute to avoid having a circular
         # reference.
         return self
 
-    def finish(self, hub=None):
-        # type: (Optional[sentry_sdk.Hub]) -> Optional[str]
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
+        """Finishes the transaction and sends it to Sentry.
+        All finished spans in the transaction will also be sent to Sentry.
+
+        :param hub: The hub to use for this transaction.
+            If not provided, the current hub will be used.
+        :param end_timestamp: Optional timestamp that should
+            be used as timestamp instead of the current time.
+
+        :return: The event ID if the transaction was sent to Sentry,
+            otherwise None.
+        """
         if self.timestamp is not None:
             # This transaction is already finished, ignore.
             return None
@@ -563,9 +703,12 @@ def finish(self, hub=None):
             # exclusively based on sample rate but also traces sampler, but
             # we handle this the same here.
             if client.transport and has_tracing_enabled(client.options):
-                client.transport.record_lost_event(
-                    "sample_rate", data_category="transaction"
-                )
+                if client.monitor and client.monitor.downsample_factor > 0:
+                    reason = "backpressure"
+                else:
+                    reason = "sample_rate"
+
+                client.transport.record_lost_event(reason, data_category="transaction")
 
             return None
 
@@ -575,13 +718,14 @@ def finish(self, hub=None):
             )
             self.name = ""
 
-        Span.finish(self, hub)
+        super(Transaction, self).finish(hub, end_timestamp)
 
         if not self.sampled:
             # At this point a `sampled = None` should have already been resolved
             # to a concrete decision.
             if self.sampled is None:
                 logger.warning("Discarding transaction without sampling decision.")
+
             return None
 
         finished_spans = [
@@ -596,40 +740,83 @@ def finish(self, hub=None):
         # to be garbage collected
         self._span_recorder = None
 
+        contexts = {}
+        contexts.update(self._contexts)
+        contexts.update({"trace": self.get_trace_context()})
+
         event = {
             "type": "transaction",
             "transaction": self.name,
-            "contexts": {"trace": self.get_trace_context()},
+            "transaction_info": {"source": self.source},
+            "contexts": contexts,
             "tags": self._tags,
             "timestamp": self.timestamp,
             "start_timestamp": self.start_timestamp,
             "spans": finished_spans,
-        }
+        }  # type: Event
+
+        if self._profile is not None and self._profile.valid():
+            event["profile"] = self._profile
+            self._profile = None
 
-        if has_custom_measurements_enabled():
-            event["measurements"] = self._measurements
+        event["measurements"] = self._measurements
+
+        # This is here since `to_json` is not invoked.  This really should
+        # be gone when we switch to onlyspans.
+        if self._local_aggregator is not None:
+            metrics_summary = self._local_aggregator.to_json()
+            if metrics_summary:
+                event["_metrics_summary"] = metrics_summary
 
         return hub.capture_event(event)
 
     def set_measurement(self, name, value, unit=""):
         # type: (str, float, MeasurementUnit) -> None
-        if not has_custom_measurements_enabled():
-            logger.debug(
-                "[Tracing] Experimental custom_measurements feature is disabled"
-            )
-            return
-
         self._measurements[name] = {"value": value, "unit": unit}
 
+    def set_context(self, key, value):
+        # type: (str, Any) -> None
+        """Sets a context. Transactions can have multiple contexts
+        and they should follow the format described in the "Contexts Interface"
+        documentation.
+
+        :param key: The name of the context.
+        :param value: The information about the context.
+        """
+        self._contexts[key] = value
+
+    def set_http_status(self, http_status):
+        # type: (int) -> None
+        """Sets the status of the Transaction according to the given HTTP status.
+
+        :param http_status: The HTTP status code."""
+        super(Transaction, self).set_http_status(http_status)
+        self.set_context("response", {"status_code": http_status})
+
     def to_json(self):
         # type: () -> Dict[str, Any]
+        """Returns a JSON-compatible representation of the transaction."""
         rv = super(Transaction, self).to_json()
 
         rv["name"] = self.name
+        rv["source"] = self.source
         rv["sampled"] = self.sampled
 
         return rv
 
+    def get_baggage(self):
+        # type: () -> Baggage
+        """Returns the :py:class:`~sentry_sdk.tracing_utils.Baggage`
+        associated with the Transaction.
+
+        The first time a new baggage with Sentry items is made,
+        it will be frozen."""
+
+        if not self._baggage or self._baggage.mutable:
+            self._baggage = Baggage.populate_from_transaction(self)
+
+        return self._baggage
+
     def _set_initial_sampling_decision(self, sampling_context):
         # type: (SamplingContext) -> None
         """
@@ -667,6 +854,7 @@ def _set_initial_sampling_decision(self, sampling_context):
         # if the user has forced a sampling decision by passing a `sampled`
         # value when starting the transaction, go with that
         if self.sampled is not None:
+            self.sample_rate = float(self.sampled)
             return
 
         # we would have bailed already if neither `traces_sampler` nor
@@ -686,7 +874,7 @@ def _set_initial_sampling_decision(self, sampling_context):
         # Since this is coming from the user (or from a function provided by the
         # user), who knows what we might get. (The only valid values are
         # booleans or numbers between 0 and 1.)
-        if not is_valid_sample_rate(sample_rate):
+        if not is_valid_sample_rate(sample_rate, source="Tracing"):
             logger.warning(
                 "[Tracing] Discarding {transaction_description} because of invalid sample rate.".format(
                     transaction_description=transaction_description,
@@ -695,9 +883,14 @@ def _set_initial_sampling_decision(self, sampling_context):
             self.sampled = False
             return
 
+        self.sample_rate = float(sample_rate)
+
+        if client.monitor:
+            self.sample_rate /= 2**client.monitor.downsample_factor
+
         # if the function returned 0 (or false), or if `traces_sample_rate` is
         # 0, it's a sign the transaction should be dropped
-        if not sample_rate:
+        if not self.sample_rate:
             logger.debug(
                 "[Tracing] Discarding {transaction_description} because {reason}".format(
                     transaction_description=transaction_description,
@@ -714,7 +907,7 @@ def _set_initial_sampling_decision(self, sampling_context):
         # Now we roll the dice. random.random is inclusive of 0, but not of 1,
         # so strict < is safe here. In case sample_rate is a boolean, cast it
         # to a float (True becomes 1.0 and False becomes 0.0)
-        self.sampled = random.random() < float(sample_rate)
+        self.sampled = random.random() < self.sample_rate
 
         if self.sampled:
             logger.debug(
@@ -726,21 +919,146 @@ def _set_initial_sampling_decision(self, sampling_context):
             logger.debug(
                 "[Tracing] Discarding {transaction_description} because it's not included in the random sample (sampling rate = {sample_rate})".format(
                     transaction_description=transaction_description,
-                    sample_rate=float(sample_rate),
+                    sample_rate=self.sample_rate,
                 )
             )
 
 
+class NoOpSpan(Span):
+    def __repr__(self):
+        # type: () -> str
+        return self.__class__.__name__
+
+    @property
+    def containing_transaction(self):
+        # type: () -> Optional[Transaction]
+        return None
+
+    def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+        # type: (str, **Any) -> NoOpSpan
+        return NoOpSpan()
+
+    def new_span(self, **kwargs):
+        # type: (**Any) -> NoOpSpan
+        return self.start_child(**kwargs)
+
+    def to_traceparent(self):
+        # type: () -> str
+        return ""
+
+    def to_baggage(self):
+        # type: () -> Optional[Baggage]
+        return None
+
+    def get_baggage(self):
+        # type: () -> Optional[Baggage]
+        return None
+
+    def iter_headers(self):
+        # type: () -> Iterator[Tuple[str, str]]
+        return iter(())
+
+    def set_tag(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_data(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def set_status(self, value):
+        # type: (str) -> None
+        pass
+
+    def set_http_status(self, http_status):
+        # type: (int) -> None
+        pass
+
+    def is_success(self):
+        # type: () -> bool
+        return True
+
+    def to_json(self):
+        # type: () -> Dict[str, Any]
+        return {}
+
+    def get_trace_context(self):
+        # type: () -> Any
+        return {}
+
+    def finish(self, hub=None, end_timestamp=None):
+        # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
+        pass
+
+    def set_measurement(self, name, value, unit=""):
+        # type: (str, float, MeasurementUnit) -> None
+        pass
+
+    def set_context(self, key, value):
+        # type: (str, Any) -> None
+        pass
+
+    def init_span_recorder(self, maxlen):
+        # type: (int) -> None
+        pass
+
+    def _set_initial_sampling_decision(self, sampling_context):
+        # type: (SamplingContext) -> None
+        pass
+
+
+if TYPE_CHECKING:
+
+    @overload
+    def trace(func=None):
+        # type: (None) -> Callable[[Callable[P, R]], Callable[P, R]]
+        pass
+
+    @overload
+    def trace(func):
+        # type: (Callable[P, R]) -> Callable[P, R]
+        pass
+
+
+def trace(func=None):
+    # type: (Optional[Callable[P, R]]) -> Union[Callable[P, R], Callable[[Callable[P, R]], Callable[P, R]]]
+    """
+    Decorator to start a child span under the existing current transaction.
+    If there is no current transaction, then nothing will be traced.
+
+    .. code-block::
+        :caption: Usage
+
+        import sentry_sdk
+
+        @sentry_sdk.trace
+        def my_function():
+            ...
+
+        @sentry_sdk.trace
+        async def my_async_function():
+            ...
+    """
+    if PY2:
+        from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
+    else:
+        from sentry_sdk.tracing_utils_py3 import start_child_span_decorator
+
+    # This patterns allows usage of both @sentry_traced and @sentry_traced(...)
+    # See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278
+    if func:
+        return start_child_span_decorator(func)
+    else:
+        return start_child_span_decorator
+
+
 # Circular imports
 
 from sentry_sdk.tracing_utils import (
+    Baggage,
     EnvironHeaders,
-    compute_tracestate_entry,
     extract_sentrytrace_data,
-    extract_tracestate_data,
-    has_tracestate_enabled,
     has_tracing_enabled,
-    is_valid_sample_rate,
     maybe_create_breadcrumbs_from_span,
-    has_custom_measurements_enabled,
 )
+from sentry_sdk.metrics import LocalAggregator
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 2d31b9903e..98cdec5e38 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -1,38 +1,41 @@
-import re
 import contextlib
-import json
-import math
-
-from numbers import Real
+import os
+import re
+import sys
 
 import sentry_sdk
-
+from sentry_sdk.consts import OP, SPANDATA
 from sentry_sdk.utils import (
     capture_internal_exceptions,
+    filename_for_module,
     Dsn,
-    logger,
-    safe_str,
-    to_base64,
+    match_regex_list,
     to_string,
-    from_base64,
+    is_sentry_url,
+    _is_external_source,
+    _module_in_list,
 )
-from sentry_sdk._compat import PY2
-from sentry_sdk._types import MYPY
+from sentry_sdk._compat import PY2, duration_in_milliseconds, iteritems
+from sentry_sdk._types import TYPE_CHECKING
 
 if PY2:
     from collections import Mapping
+    from urllib import quote, unquote
 else:
     from collections.abc import Mapping
+    from urllib.parse import quote, unquote
 
-if MYPY:
+if TYPE_CHECKING:
     import typing
 
-    from typing import Generator
-    from typing import Optional
     from typing import Any
     from typing import Dict
+    from typing import Generator
+    from typing import Optional
     from typing import Union
 
+    from types import FrameType
+
 
 SENTRY_TRACE_REGEX = re.compile(
     "^[ \t]*"  # whitespace
@@ -53,27 +56,6 @@
     "([a-zA-Z0-9+/]{2,3})?"
 )
 
-# comma-delimited list of entries of the form `xxx=yyy`
-tracestate_entry = "[^=]+=[^=]+"
-TRACESTATE_ENTRIES_REGEX = re.compile(
-    # one or more xxxxx=yyyy entries
-    "^({te})+"
-    # each entry except the last must be followed by a comma
-    "(,|$)".format(te=tracestate_entry)
-)
-
-# this doesn't check that the value is valid, just that there's something there
-# of the form `sentry=xxxx`
-SENTRY_TRACESTATE_ENTRY_REGEX = re.compile(
-    # either sentry is the first entry or there's stuff immediately before it,
-    # ending in a comma (this prevents matching something like `coolsentry=xxx`)
-    "(?:^|.+,)"
-    # sentry's part, not including the potential comma
-    "(sentry=[^,]*)"
-    # either there's a comma and another vendor's entry or we end
-    "(?:,.+|$)"
-)
-
 
 class EnvironHeaders(Mapping):  # type: ignore
     def __init__(
@@ -107,47 +89,21 @@ def __iter__(self):
 
 
 def has_tracing_enabled(options):
-    # type: (Dict[str, Any]) -> bool
+    # type: (Optional[Dict[str, Any]]) -> bool
     """
     Returns True if either traces_sample_rate or traces_sampler is
-    defined, False otherwise.
-    """
-
-    return bool(
-        options.get("traces_sample_rate") is not None
-        or options.get("traces_sampler") is not None
-    )
-
-
-def is_valid_sample_rate(rate):
-    # type: (Any) -> bool
+    defined and enable_tracing is set and not false.
     """
-    Checks the given sample rate to make sure it is valid type and value (a
-    boolean or a number between 0 and 1, inclusive).
-    """
-
-    # both booleans and NaN are instances of Real, so a) checking for Real
-    # checks for the possibility of a boolean also, and b) we have to check
-    # separately for NaN
-    if not isinstance(rate, Real) or math.isnan(rate):
-        logger.warning(
-            "[Tracing] Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
-                rate=rate, type=type(rate)
-            )
-        )
+    if options is None:
         return False
 
-    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
-    rate = float(rate)
-    if rate < 0 or rate > 1:
-        logger.warning(
-            "[Tracing] Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
-                rate=rate
-            )
+    return bool(
+        options.get("enable_tracing") is not False
+        and (
+            options.get("traces_sample_rate") is not None
+            or options.get("traces_sampler") is not None
         )
-        return False
-
-    return True
+    )
 
 
 @contextlib.contextmanager
@@ -158,8 +114,9 @@ def record_sql_queries(
     params_list,  # type:  Any
     paramstyle,  # type: Optional[str]
     executemany,  # type: bool
+    record_cursor_repr=False,  # type: bool
 ):
-    # type: (...) -> Generator[Span, None, None]
+    # type: (...) -> Generator[sentry_sdk.tracing.Span, None, None]
 
     # TODO: Bring back capturing of params by default
     if hub.client and hub.client.options["_experiments"].get(
@@ -183,23 +140,25 @@ def record_sql_queries(
         data["db.paramstyle"] = paramstyle
     if executemany:
         data["db.executemany"] = True
+    if record_cursor_repr and cursor is not None:
+        data["db.cursor"] = cursor
 
     with capture_internal_exceptions():
         hub.add_breadcrumb(message=query, category="query", data=data)
 
-    with hub.start_span(op="db", description=query) as span:
+    with hub.start_span(op=OP.DB, description=query) as span:
         for k, v in data.items():
             span.set_data(k, v)
         yield span
 
 
 def maybe_create_breadcrumbs_from_span(hub, span):
-    # type: (sentry_sdk.Hub, Span) -> None
-    if span.op == "redis":
+    # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None
+    if span.op == OP.DB_REDIS:
         hub.add_breadcrumb(
             message=span.description, type="redis", category="redis", data=span._tags
         )
-    elif span.op == "http":
+    elif span.op == OP.HTTP_CLIENT:
         hub.add_breadcrumb(type="http", category="httplib", data=span._data)
     elif span.op == "subprocess":
         hub.add_breadcrumb(
@@ -210,28 +169,134 @@ def maybe_create_breadcrumbs_from_span(hub, span):
         )
 
 
+def add_query_source(hub, span):
+    # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None
+    """
+    Adds OTel compatible source code information to the span
+    """
+    client = hub.client
+    if client is None:
+        return
+
+    if span.timestamp is None or span.start_timestamp is None:
+        return
+
+    should_add_query_source = client.options.get("enable_db_query_source", True)
+    if not should_add_query_source:
+        return
+
+    duration = span.timestamp - span.start_timestamp
+    threshold = client.options.get("db_query_source_threshold_ms", 0)
+    slow_query = duration_in_milliseconds(duration) > threshold
+
+    if not slow_query:
+        return
+
+    project_root = client.options["project_root"]
+    in_app_include = client.options.get("in_app_include")
+    in_app_exclude = client.options.get("in_app_exclude")
+
+    # Find the correct frame
+    frame = sys._getframe()  # type: Union[FrameType, None]
+    while frame is not None:
+        try:
+            abs_path = frame.f_code.co_filename
+            if abs_path and PY2:
+                abs_path = os.path.abspath(abs_path)
+        except Exception:
+            abs_path = ""
+
+        try:
+            namespace = frame.f_globals.get("__name__")  # type: Optional[str]
+        except Exception:
+            namespace = None
+
+        is_sentry_sdk_frame = namespace is not None and namespace.startswith(
+            "sentry_sdk."
+        )
+
+        should_be_included = not _is_external_source(abs_path)
+        if namespace is not None:
+            if in_app_exclude and _module_in_list(namespace, in_app_exclude):
+                should_be_included = False
+            if in_app_include and _module_in_list(namespace, in_app_include):
+                # in_app_include takes precedence over in_app_exclude, so doing it
+                # at the end
+                should_be_included = True
+
+        if (
+            abs_path.startswith(project_root)
+            and should_be_included
+            and not is_sentry_sdk_frame
+        ):
+            break
+
+        frame = frame.f_back
+    else:
+        frame = None
+
+    # Set the data
+    if frame is not None:
+        try:
+            lineno = frame.f_lineno
+        except Exception:
+            lineno = None
+        if lineno is not None:
+            span.set_data(SPANDATA.CODE_LINENO, frame.f_lineno)
+
+        try:
+            namespace = frame.f_globals.get("__name__")
+        except Exception:
+            namespace = None
+        if namespace is not None:
+            span.set_data(SPANDATA.CODE_NAMESPACE, namespace)
+
+        try:
+            filepath = frame.f_code.co_filename
+        except Exception:
+            filepath = None
+        if filepath is not None:
+            if namespace is not None and not PY2:
+                in_app_path = filename_for_module(namespace, filepath)
+            elif project_root is not None and filepath.startswith(project_root):
+                in_app_path = filepath.replace(project_root, "").lstrip(os.sep)
+            else:
+                in_app_path = filepath
+            span.set_data(SPANDATA.CODE_FILEPATH, in_app_path)
+
+        try:
+            code_function = frame.f_code.co_name
+        except Exception:
+            code_function = None
+
+        if code_function is not None:
+            span.set_data(SPANDATA.CODE_FUNCTION, frame.f_code.co_name)
+
+
 def extract_sentrytrace_data(header):
-    # type: (Optional[str]) -> typing.Mapping[str, Union[str, bool, None]]
+    # type: (Optional[str]) -> Optional[Dict[str, Union[str, bool, None]]]
     """
     Given a `sentry-trace` header string, return a dictionary of data.
     """
-    trace_id = parent_span_id = parent_sampled = None
+    if not header:
+        return None
 
-    if header:
-        if header.startswith("00-") and header.endswith("-00"):
-            header = header[3:-3]
+    if header.startswith("00-") and header.endswith("-00"):
+        header = header[3:-3]
 
-        match = SENTRY_TRACE_REGEX.match(header)
+    match = SENTRY_TRACE_REGEX.match(header)
+    if not match:
+        return None
 
-        if match:
-            trace_id, parent_span_id, sampled_str = match.groups()
+    trace_id, parent_span_id, sampled_str = match.groups()
+    parent_sampled = None
 
-            if trace_id:
-                trace_id = "{:032x}".format(int(trace_id, 16))
-            if parent_span_id:
-                parent_span_id = "{:016x}".format(int(parent_span_id, 16))
-            if sampled_str:
-                parent_sampled = sampled_str != "0"
+    if trace_id:
+        trace_id = "{:032x}".format(int(trace_id, 16))
+    if parent_span_id:
+        parent_span_id = "{:016x}".format(int(parent_span_id, 16))
+    if sampled_str:
+        parent_sampled = sampled_str != "0"
 
     return {
         "trace_id": trace_id,
@@ -240,180 +305,218 @@ def extract_sentrytrace_data(header):
     }
 
 
-def extract_tracestate_data(header):
-    # type: (Optional[str]) -> typing.Mapping[str, Optional[str]]
+def _format_sql(cursor, sql):
+    # type: (Any, str) -> Optional[str]
+
+    real_sql = None
+
+    # If we're using psycopg2, it could be that we're
+    # looking at a query that uses Composed objects. Use psycopg2's mogrify
+    # function to format the query. We lose per-parameter trimming but gain
+    # accuracy in formatting.
+    try:
+        if hasattr(cursor, "mogrify"):
+            real_sql = cursor.mogrify(sql)
+            if isinstance(real_sql, bytes):
+                real_sql = real_sql.decode(cursor.connection.encoding)
+    except Exception:
+        real_sql = None
+
+    return real_sql or to_string(sql)
+
+
+class Baggage(object):
     """
-    Extracts the sentry tracestate value and any third-party data from the given
-    tracestate header, returning a dictionary of data.
+    The W3C Baggage header information (see https://www.w3.org/TR/baggage/).
     """
-    sentry_entry = third_party_entry = None
-    before = after = ""
-
-    if header:
-        # find sentry's entry, if any
-        sentry_match = SENTRY_TRACESTATE_ENTRY_REGEX.search(header)
-
-        if sentry_match:
-            sentry_entry = sentry_match.group(1)
-
-            # remove the commas after the split so we don't end up with
-            # `xxx=yyy,,zzz=qqq` (double commas) when we put them back together
-            before, after = map(lambda s: s.strip(","), header.split(sentry_entry))
-
-            # extract sentry's value from its entry and test to make sure it's
-            # valid; if it isn't, discard the entire entry so that a new one
-            # will be created
-            sentry_value = sentry_entry.replace("sentry=", "")
-            if not re.search("^{b64}$".format(b64=base64_stripped), sentry_value):
-                sentry_entry = None
-        else:
-            after = header
-
-        # if either part is invalid or empty, remove it before gluing them together
-        third_party_entry = (
-            ",".join(filter(TRACESTATE_ENTRIES_REGEX.search, [before, after])) or None
-        )
 
-    return {
-        "sentry_tracestate": sentry_entry,
-        "third_party_tracestate": third_party_entry,
-    }
+    __slots__ = ("sentry_items", "third_party_items", "mutable")
 
+    SENTRY_PREFIX = "sentry-"
+    SENTRY_PREFIX_REGEX = re.compile("^sentry-")
 
-def compute_tracestate_value(data):
-    # type: (typing.Mapping[str, str]) -> str
-    """
-    Computes a new tracestate value using the given data.
+    def __init__(
+        self,
+        sentry_items,  # type: Dict[str, str]
+        third_party_items="",  # type: str
+        mutable=True,  # type: bool
+    ):
+        self.sentry_items = sentry_items
+        self.third_party_items = third_party_items
+        self.mutable = mutable
+
+    @classmethod
+    def from_incoming_header(cls, header):
+        # type: (Optional[str]) -> Baggage
+        """
+        freeze if incoming header already has sentry baggage
+        """
+        sentry_items = {}
+        third_party_items = ""
+        mutable = True
+
+        if header:
+            for item in header.split(","):
+                if "=" not in item:
+                    continue
+
+                with capture_internal_exceptions():
+                    item = item.strip()
+                    key, val = item.split("=")
+                    if Baggage.SENTRY_PREFIX_REGEX.match(key):
+                        baggage_key = unquote(key.split("-")[1])
+                        sentry_items[baggage_key] = unquote(val)
+                        mutable = False
+                    else:
+                        third_party_items += ("," if third_party_items else "") + item
+
+        return Baggage(sentry_items, third_party_items, mutable)
+
+    @classmethod
+    def from_options(cls, scope):
+        # type: (sentry_sdk.scope.Scope) -> Optional[Baggage]
+
+        sentry_items = {}  # type: Dict[str, str]
+        third_party_items = ""
+        mutable = False
+
+        client = sentry_sdk.Hub.current.client
+
+        if client is None or scope._propagation_context is None:
+            return Baggage(sentry_items)
 
-    Note: Returns just the base64-encoded data, NOT the full `sentry=...`
-    tracestate entry.
-    """
+        options = client.options
+        propagation_context = scope._propagation_context
 
-    tracestate_json = json.dumps(data, default=safe_str)
+        if propagation_context is not None and "trace_id" in propagation_context:
+            sentry_items["trace_id"] = propagation_context["trace_id"]
 
-    # Base64-encoded strings always come out with a length which is a multiple
-    # of 4. In order to achieve this, the end is padded with one or more `=`
-    # signs. Because the tracestate standard calls for using `=` signs between
-    # vendor name and value (`sentry=xxx,dogsaregreat=yyy`), to avoid confusion
-    # we strip the `=`
-    return (to_base64(tracestate_json) or "").rstrip("=")
+        if options.get("environment"):
+            sentry_items["environment"] = options["environment"]
 
+        if options.get("release"):
+            sentry_items["release"] = options["release"]
 
-def compute_tracestate_entry(span):
-    # type: (Span) -> Optional[str]
-    """
-    Computes a new sentry tracestate for the span. Includes the `sentry=`.
+        if options.get("dsn"):
+            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
 
-    Will return `None` if there's no client and/or no DSN.
-    """
-    data = {}
+        if options.get("traces_sample_rate"):
+            sentry_items["sample_rate"] = options["traces_sample_rate"]
 
-    hub = span.hub or sentry_sdk.Hub.current
+        user = (scope and scope._user) or {}
+        if user.get("segment"):
+            sentry_items["user_segment"] = user["segment"]
 
-    client = hub.client
-    scope = hub.scope
+        return Baggage(sentry_items, third_party_items, mutable)
 
-    if client and client.options.get("dsn"):
-        options = client.options
-        user = scope._user
+    @classmethod
+    def populate_from_transaction(cls, transaction):
+        # type: (sentry_sdk.tracing.Transaction) -> Baggage
+        """
+        Populate fresh baggage entry with sentry_items and make it immutable
+        if this is the head SDK which originates traces.
+        """
+        hub = transaction.hub or sentry_sdk.Hub.current
+        client = hub.client
+        sentry_items = {}  # type: Dict[str, str]
 
-        data = {
-            "trace_id": span.trace_id,
-            "environment": options["environment"],
-            "release": options.get("release"),
-            "public_key": Dsn(options["dsn"]).public_key,
-        }
+        if not client:
+            return Baggage(sentry_items)
 
-        if user and (user.get("id") or user.get("segment")):
-            user_data = {}
+        options = client.options or {}
+        user = (hub.scope and hub.scope._user) or {}
 
-            if user.get("id"):
-                user_data["id"] = user["id"]
+        sentry_items["trace_id"] = transaction.trace_id
 
-            if user.get("segment"):
-                user_data["segment"] = user["segment"]
+        if options.get("environment"):
+            sentry_items["environment"] = options["environment"]
 
-            data["user"] = user_data
+        if options.get("release"):
+            sentry_items["release"] = options["release"]
 
-        if span.containing_transaction:
-            data["transaction"] = span.containing_transaction.name
+        if options.get("dsn"):
+            sentry_items["public_key"] = Dsn(options["dsn"]).public_key
 
-        return "sentry=" + compute_tracestate_value(data)
+        if (
+            transaction.name
+            and transaction.source not in LOW_QUALITY_TRANSACTION_SOURCES
+        ):
+            sentry_items["transaction"] = transaction.name
 
-    return None
+        if user.get("segment"):
+            sentry_items["user_segment"] = user["segment"]
 
+        if transaction.sample_rate is not None:
+            sentry_items["sample_rate"] = str(transaction.sample_rate)
 
-def reinflate_tracestate(encoded_tracestate):
-    # type: (str) -> typing.Optional[Mapping[str, str]]
-    """
-    Given a sentry tracestate value in its encoded form, translate it back into
-    a dictionary of data.
-    """
-    inflated_tracestate = None
-
-    if encoded_tracestate:
-        # Base64-encoded strings always come out with a length which is a
-        # multiple of 4. In order to achieve this, the end is padded with one or
-        # more `=` signs. Because the tracestate standard calls for using `=`
-        # signs between vendor name and value (`sentry=xxx,dogsaregreat=yyy`),
-        # to avoid confusion we strip the `=` when the data is initially
-        # encoded. Python's decoding function requires they be put back.
-        # Fortunately, it doesn't complain if there are too many, so we just
-        # attach two `=` on spec (there will never be more than 2, see
-        # https://en.wikipedia.org/wiki/Base64#Decoding_Base64_without_padding).
-        tracestate_json = from_base64(encoded_tracestate + "==")
+        if transaction.sampled is not None:
+            sentry_items["sampled"] = "true" if transaction.sampled else "false"
 
-        try:
-            assert tracestate_json is not None
-            inflated_tracestate = json.loads(tracestate_json)
-        except Exception as err:
-            logger.warning(
-                (
-                    "Unable to attach tracestate data to envelope header: {err}"
-                    + "\nTracestate value is {encoded_tracestate}"
-                ).format(err=err, encoded_tracestate=encoded_tracestate),
-            )
+        # there's an existing baggage but it was mutable,
+        # which is why we are creating this new baggage.
+        # However, if by chance the user put some sentry items in there, give them precedence.
+        if transaction._baggage and transaction._baggage.sentry_items:
+            sentry_items.update(transaction._baggage.sentry_items)
 
-    return inflated_tracestate
+        return Baggage(sentry_items, mutable=False)
 
+    def freeze(self):
+        # type: () -> None
+        self.mutable = False
 
-def _format_sql(cursor, sql):
-    # type: (Any, str) -> Optional[str]
+    def dynamic_sampling_context(self):
+        # type: () -> Dict[str, str]
+        header = {}
 
-    real_sql = None
+        for key, item in iteritems(self.sentry_items):
+            header[key] = item
 
-    # If we're using psycopg2, it could be that we're
-    # looking at a query that uses Composed objects. Use psycopg2's mogrify
-    # function to format the query. We lose per-parameter trimming but gain
-    # accuracy in formatting.
-    try:
-        if hasattr(cursor, "mogrify"):
-            real_sql = cursor.mogrify(sql)
-            if isinstance(real_sql, bytes):
-                real_sql = real_sql.decode(cursor.connection.encoding)
-    except Exception:
-        real_sql = None
+        return header
 
-    return real_sql or to_string(sql)
+    def serialize(self, include_third_party=False):
+        # type: (bool) -> str
+        items = []
 
+        for key, val in iteritems(self.sentry_items):
+            with capture_internal_exceptions():
+                item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val))
+                items.append(item)
 
-def has_tracestate_enabled(span=None):
-    # type: (Optional[Span]) -> bool
+        if include_third_party:
+            items.append(self.third_party_items)
 
-    client = ((span and span.hub) or sentry_sdk.Hub.current).client
-    options = client and client.options
+        return ",".join(items)
 
-    return bool(options and options["_experiments"].get("propagate_tracestate"))
 
+def should_propagate_trace(hub, url):
+    # type: (sentry_sdk.Hub, str) -> bool
+    """
+    Returns True if url matches trace_propagation_targets configured in the given hub. Otherwise, returns False.
+    """
+    client = hub.client  # type: Any
+    trace_propagation_targets = client.options["trace_propagation_targets"]
 
-def has_custom_measurements_enabled():
-    # type: () -> bool
-    client = sentry_sdk.Hub.current.client
-    options = client and client.options
-    return bool(options and options["_experiments"].get("custom_measurements"))
+    if is_sentry_url(hub, url):
+        return False
 
+    return match_regex_list(url, trace_propagation_targets, substring_matching=True)
 
-# Circular imports
 
-if MYPY:
-    from sentry_sdk.tracing import Span
+def normalize_incoming_data(incoming_data):
+    # type: (Dict[str, Any]) -> Dict[str, Any]
+    """
+    Normalizes incoming data so the keys are all lowercase with dashes instead of underscores and stripped from known prefixes.
+    """
+    data = {}
+    for key, value in incoming_data.items():
+        if key.startswith("HTTP_"):
+            key = key[5:]
+
+        key = key.replace("_", "-").lower()
+        data[key] = value
+
+    return data
+
+
+# Circular imports
+from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES
diff --git a/sentry_sdk/tracing_utils_py2.py b/sentry_sdk/tracing_utils_py2.py
new file mode 100644
index 0000000000..a251ab41be
--- /dev/null
+++ b/sentry_sdk/tracing_utils_py2.py
@@ -0,0 +1,45 @@
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import get_current_span
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import logger, qualname_from_function
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+def start_child_span_decorator(func):
+    # type: (Any) -> Any
+    """
+    Decorator to add child spans for functions.
+
+    This is the Python 2 compatible version of the decorator.
+    Duplicated code from ``sentry_sdk.tracing_utils_python3.start_child_span_decorator``.
+
+    See also ``sentry_sdk.tracing.trace()``.
+    """
+
+    @wraps(func)
+    def func_with_tracing(*args, **kwargs):
+        # type: (*Any, **Any) -> Any
+
+        span = get_current_span(sentry_sdk.Hub.current)
+
+        if span is None:
+            logger.warning(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                qualname_from_function(func),
+            )
+            return func(*args, **kwargs)
+
+        with span.start_child(
+            op=OP.FUNCTION,
+            description=qualname_from_function(func),
+        ):
+            return func(*args, **kwargs)
+
+    return func_with_tracing
diff --git a/sentry_sdk/tracing_utils_py3.py b/sentry_sdk/tracing_utils_py3.py
new file mode 100644
index 0000000000..d58d5f7cb4
--- /dev/null
+++ b/sentry_sdk/tracing_utils_py3.py
@@ -0,0 +1,72 @@
+import inspect
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import get_current_span
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import OP
+from sentry_sdk.utils import logger, qualname_from_function
+
+
+if TYPE_CHECKING:
+    from typing import Any
+
+
+def start_child_span_decorator(func):
+    # type: (Any) -> Any
+    """
+    Decorator to add child spans for functions.
+
+    This is the Python 3 compatible version of the decorator.
+    For Python 2 there is duplicated code here: ``sentry_sdk.tracing_utils_python2.start_child_span_decorator()``.
+
+    See also ``sentry_sdk.tracing.trace()``.
+    """
+
+    # Asynchronous case
+    if inspect.iscoroutinefunction(func):
+
+        @wraps(func)
+        async def func_with_tracing(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+
+            span = get_current_span(sentry_sdk.Hub.current)
+
+            if span is None:
+                logger.warning(
+                    "Can not create a child span for %s. "
+                    "Please start a Sentry transaction before calling this function.",
+                    qualname_from_function(func),
+                )
+                return await func(*args, **kwargs)
+
+            with span.start_child(
+                op=OP.FUNCTION,
+                description=qualname_from_function(func),
+            ):
+                return await func(*args, **kwargs)
+
+    # Synchronous case
+    else:
+
+        @wraps(func)
+        def func_with_tracing(*args, **kwargs):
+            # type: (*Any, **Any) -> Any
+
+            span = get_current_span(sentry_sdk.Hub.current)
+
+            if span is None:
+                logger.warning(
+                    "Can not create a child span for %s. "
+                    "Please start a Sentry transaction before calling this function.",
+                    qualname_from_function(func),
+                )
+                return func(*args, **kwargs)
+
+            with span.start_child(
+                op=OP.FUNCTION,
+                description=qualname_from_function(func),
+            ):
+                return func(*args, **kwargs)
+
+    return func_with_tracing
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
index fca6fa8aec..d2fc734f7c 100644
--- a/sentry_sdk/transport.py
+++ b/sentry_sdk/transport.py
@@ -1,32 +1,35 @@
 from __future__ import print_function
 
 import io
-import urllib3  # type: ignore
-import certifi
 import gzip
+import socket
 import time
-
-from datetime import datetime, timedelta
+from datetime import timedelta
 from collections import defaultdict
 
+import urllib3
+import certifi
+
 from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps
 from sentry_sdk.worker import BackgroundWorker
 from sentry_sdk.envelope import Envelope, Item, PayloadRef
+from sentry_sdk._compat import datetime_utcnow
+from sentry_sdk._types import TYPE_CHECKING
 
-from sentry_sdk._types import MYPY
-
-if MYPY:
+if TYPE_CHECKING:
+    from datetime import datetime
     from typing import Any
     from typing import Callable
     from typing import Dict
     from typing import Iterable
+    from typing import List
     from typing import Optional
     from typing import Tuple
     from typing import Type
     from typing import Union
     from typing import DefaultDict
 
-    from urllib3.poolmanager import PoolManager  # type: ignore
+    from urllib3.poolmanager import PoolManager
     from urllib3.poolmanager import ProxyManager
 
     from sentry_sdk._types import Event, EndpointType
@@ -39,6 +42,21 @@
     from urllib import getproxies  # type: ignore
 
 
+KEEP_ALIVE_SOCKET_OPTIONS = []
+for option in [
+    (socket.SOL_SOCKET, lambda: getattr(socket, "SO_KEEPALIVE"), 1),  # noqa: B009
+    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPIDLE"), 45),  # noqa: B009
+    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPINTVL"), 10),  # noqa: B009
+    (socket.SOL_TCP, lambda: getattr(socket, "TCP_KEEPCNT"), 6),  # noqa: B009
+]:
+    try:
+        KEEP_ALIVE_SOCKET_OPTIONS.append((option[0], option[1](), option[2]))
+    except AttributeError:
+        # a specific option might not be available on specific systems,
+        # e.g. TCP_KEEPIDLE doesn't exist on macOS
+        pass
+
+
 class Transport(object):
     """Baseclass for all transports.
 
@@ -107,6 +125,10 @@ def record_lost_event(
         """
         return None
 
+    def is_healthy(self):
+        # type: () -> bool
+        return True
+
     def __del__(self):
         # type: () -> None
         try:
@@ -118,14 +140,26 @@ def __del__(self):
 def _parse_rate_limits(header, now=None):
     # type: (Any, Optional[datetime]) -> Iterable[Tuple[DataCategory, datetime]]
     if now is None:
-        now = datetime.utcnow()
+        now = datetime_utcnow()
 
     for limit in header.split(","):
         try:
-            retry_after, categories, _ = limit.strip().split(":", 2)
+            parameters = limit.strip().split(":")
+            retry_after, categories = parameters[:2]
+
             retry_after = now + timedelta(seconds=int(retry_after))
             for category in categories and categories.split(";") or (None,):
-                yield category, retry_after
+                if category == "metric_bucket":
+                    try:
+                        namespaces = parameters[4].split(";")
+                    except IndexError:
+                        namespaces = []
+
+                    if not namespaces or "custom" in namespaces:
+                        yield category, retry_after
+
+                else:
+                    yield category, retry_after
         except (LookupError, ValueError):
             continue
 
@@ -151,11 +185,20 @@ def __init__(
         )  # type: DefaultDict[Tuple[str, str], int]
         self._last_client_report_sent = time.time()
 
+        compresslevel = options.get("_experiments", {}).get(
+            "transport_zlib_compression_level"
+        )
+        self._compresslevel = 9 if compresslevel is None else int(compresslevel)
+
+        num_pools = options.get("_experiments", {}).get("transport_num_pools")
+        self._num_pools = 2 if num_pools is None else int(num_pools)
+
         self._pool = self._make_pool(
             self.parsed_dsn,
             http_proxy=options["http_proxy"],
             https_proxy=options["https_proxy"],
             ca_certs=options["ca_certs"],
+            proxy_headers=options["proxy_headers"],
         )
 
         from sentry_sdk import Hub
@@ -179,13 +222,14 @@ def record_lost_event(
                 # quantity of 0 is actually 1 as we do not want to count
                 # empty attachments as actually empty.
                 quantity = len(item.get_bytes()) or 1
+
         elif data_category is None:
             raise TypeError("data category not provided")
 
         self._discarded_events[data_category, reason] += quantity
 
     def _update_rate_limits(self, response):
-        # type: (urllib3.HTTPResponse) -> None
+        # type: (urllib3.BaseHTTPResponse) -> None
 
         # new sentries with more rate limit insights.  We honor this header
         # no matter of the status code to update our internal rate limits.
@@ -199,7 +243,7 @@ def _update_rate_limits(self, response):
         # sentries if a proxy in front wants to globally slow things down.
         elif response.status == 429:
             logger.warning("Rate-limited via 429")
-            self._disabled_until[None] = datetime.utcnow() + timedelta(
+            self._disabled_until[None] = datetime_utcnow() + timedelta(
                 seconds=self._retry.get_retry_after(response) or 60
             )
 
@@ -305,11 +349,30 @@ def _check_disabled(self, category):
         # type: (str) -> bool
         def _disabled(bucket):
             # type: (Any) -> bool
+
+            # The envelope item type used for metrics is statsd
+            # whereas the rate limit category is metric_bucket
+            if bucket == "statsd":
+                bucket = "metric_bucket"
+
             ts = self._disabled_until.get(bucket)
-            return ts is not None and ts > datetime.utcnow()
+
+            return ts is not None and ts > datetime_utcnow()
 
         return _disabled(category) or _disabled(None)
 
+    def _is_rate_limited(self):
+        # type: () -> bool
+        return any(ts > datetime_utcnow() for ts in self._disabled_until.values())
+
+    def _is_worker_full(self):
+        # type: () -> bool
+        return self._worker.full()
+
+    def is_healthy(self):
+        # type: () -> bool
+        return not (self._is_worker_full() or self._is_rate_limited())
+
     def _send_event(
         self, event  # type: Event
     ):
@@ -321,8 +384,13 @@ def _send_event(
             return None
 
         body = io.BytesIO()
-        with gzip.GzipFile(fileobj=body, mode="w") as f:
-            f.write(json_dumps(event))
+        if self._compresslevel == 0:
+            body.write(json_dumps(event))
+        else:
+            with gzip.GzipFile(
+                fileobj=body, mode="w", compresslevel=self._compresslevel
+            ) as f:
+                f.write(json_dumps(event))
 
         assert self.parsed_dsn is not None
         logger.debug(
@@ -335,10 +403,14 @@ def _send_event(
                 self.parsed_dsn.host,
             )
         )
-        self._send_request(
-            body.getvalue(),
-            headers={"Content-Type": "application/json", "Content-Encoding": "gzip"},
-        )
+
+        headers = {
+            "Content-Type": "application/json",
+        }
+        if self._compresslevel > 0:
+            headers["Content-Encoding"] = "gzip"
+
+        self._send_request(body.getvalue(), headers=headers)
         return None
 
     def _send_envelope(
@@ -350,7 +422,7 @@ def _send_envelope(
         new_items = []
         for item in envelope.items:
             if self._check_disabled(item.data_category):
-                if item.data_category in ("transaction", "error", "default"):
+                if item.data_category in ("transaction", "error", "default", "statsd"):
                     self.on_dropped_event("self_rate_limits")
                 self.record_lost_event("ratelimit_backoff", item=item)
             else:
@@ -373,8 +445,13 @@ def _send_envelope(
             envelope.items.append(client_report_item)
 
         body = io.BytesIO()
-        with gzip.GzipFile(fileobj=body, mode="w") as f:
-            envelope.serialize_into(f)
+        if self._compresslevel == 0:
+            envelope.serialize_into(body)
+        else:
+            with gzip.GzipFile(
+                fileobj=body, mode="w", compresslevel=self._compresslevel
+            ) as f:
+                envelope.serialize_into(f)
 
         assert self.parsed_dsn is not None
         logger.debug(
@@ -384,12 +461,15 @@ def _send_envelope(
             self.parsed_dsn.host,
         )
 
+        headers = {
+            "Content-Type": "application/x-sentry-envelope",
+        }
+        if self._compresslevel > 0:
+            headers["Content-Encoding"] = "gzip"
+
         self._send_request(
             body.getvalue(),
-            headers={
-                "Content-Type": "application/x-sentry-envelope",
-                "Content-Encoding": "gzip",
-            },
+            headers=headers,
             endpoint_type="envelope",
             envelope=envelope,
         )
@@ -397,12 +477,31 @@ def _send_envelope(
 
     def _get_pool_options(self, ca_certs):
         # type: (Optional[Any]) -> Dict[str, Any]
-        return {
-            "num_pools": 2,
+        options = {
+            "num_pools": self._num_pools,
             "cert_reqs": "CERT_REQUIRED",
             "ca_certs": ca_certs or certifi.where(),
         }
 
+        socket_options = None  # type: Optional[List[Tuple[int, int, int | bytes]]]
+
+        if self.options["socket_options"] is not None:
+            socket_options = self.options["socket_options"]
+
+        if self.options["keep_alive"]:
+            if socket_options is None:
+                socket_options = []
+
+            used_options = {(o[0], o[1]) for o in socket_options}
+            for default_option in KEEP_ALIVE_SOCKET_OPTIONS:
+                if (default_option[0], default_option[1]) not in used_options:
+                    socket_options.append(default_option)
+
+        if socket_options is not None:
+            options["socket_options"] = socket_options
+
+        return options
+
     def _in_no_proxy(self, parsed_dsn):
         # type: (Dsn) -> bool
         no_proxy = getproxies().get("no")
@@ -420,6 +519,7 @@ def _make_pool(
         http_proxy,  # type: Optional[str]
         https_proxy,  # type: Optional[str]
         ca_certs,  # type: Optional[Any]
+        proxy_headers,  # type: Optional[Dict[str, str]]
     ):
         # type: (...) -> Union[PoolManager, ProxyManager]
         proxy = None
@@ -436,7 +536,27 @@ def _make_pool(
         opts = self._get_pool_options(ca_certs)
 
         if proxy:
-            return urllib3.ProxyManager(proxy, **opts)
+            if proxy_headers:
+                opts["proxy_headers"] = proxy_headers
+
+            if proxy.startswith("socks"):
+                use_socks_proxy = True
+                try:
+                    # Check if PySocks depencency is available
+                    from urllib3.contrib.socks import SOCKSProxyManager
+                except ImportError:
+                    use_socks_proxy = False
+                    logger.warning(
+                        "You have configured a SOCKS proxy (%s) but support for SOCKS proxies is not installed. Disabling proxy support. Please add `PySocks` (or `urllib3` with the `[socks]` extra) to your dependencies.",
+                        proxy,
+                    )
+
+                if use_socks_proxy:
+                    return SOCKSProxyManager(proxy, **opts)
+                else:
+                    return urllib3.PoolManager(**opts)
+            else:
+                return urllib3.ProxyManager(proxy, **opts)
         else:
             return urllib3.PoolManager(**opts)
 
@@ -521,7 +641,7 @@ def make_transport(options):
     elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
         transport_cls = ref_transport
     elif callable(ref_transport):
-        return _FunctionTransport(ref_transport)  # type: ignore
+        return _FunctionTransport(ref_transport)
 
     # if a transport class is given only instantiate it if the dsn is not
     # empty or None
diff --git a/sentry_sdk/types.py b/sentry_sdk/types.py
new file mode 100644
index 0000000000..16c57ceea4
--- /dev/null
+++ b/sentry_sdk/types.py
@@ -0,0 +1,23 @@
+"""
+This module contains type definitions for the Sentry SDK's public API.
+The types are re-exported from the internal module `sentry_sdk._types`.
+
+Disclaimer: Since types are a form of documentation, type definitions
+may change in minor releases. Removing a type would be considered a
+breaking change, and so we will only remove type definitions in major
+releases.
+"""
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from sentry_sdk._types import Event, Hint
+else:
+    from typing import Any
+
+    # The lines below allow the types to be imported from outside `if TYPE_CHECKING`
+    # guards. The types in this module are only intended to be used for type hints.
+    Event = Any
+    Hint = Any
+
+__all__ = ("Event", "Hint")
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index 0a735a1e20..efacd6161b 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -2,46 +2,93 @@
 import json
 import linecache
 import logging
+import math
 import os
+import random
+import re
+import subprocess
 import sys
 import threading
-import subprocess
-import re
+import time
+from collections import namedtuple
+from copy import copy
+from decimal import Decimal
+from numbers import Real
+
+try:
+    # Python 3
+    from urllib.parse import parse_qs
+    from urllib.parse import unquote
+    from urllib.parse import urlencode
+    from urllib.parse import urlsplit
+    from urllib.parse import urlunsplit
+except ImportError:
+    # Python 2
+    from cgi import parse_qs  # type: ignore
+    from urllib import unquote  # type: ignore
+    from urllib import urlencode  # type: ignore
+    from urlparse import urlsplit  # type: ignore
+    from urlparse import urlunsplit  # type: ignore
+
+try:
+    # Python 3
+    FileNotFoundError
+except NameError:
+    # Python 2
+    FileNotFoundError = IOError
+
+try:
+    # Python 3.11
+    from builtins import BaseExceptionGroup
+except ImportError:
+    # Python 3.10 and below
+    BaseExceptionGroup = None  # type: ignore
 
 from datetime import datetime
+from functools import partial
 
-import sentry_sdk
-from sentry_sdk._compat import urlparse, text_type, implements_str, PY2
+try:
+    from functools import partialmethod
 
-from sentry_sdk._types import MYPY
+    _PARTIALMETHOD_AVAILABLE = True
+except ImportError:
+    _PARTIALMETHOD_AVAILABLE = False
 
-if MYPY:
-    from types import FrameType
-    from types import TracebackType
-    from typing import Any
-    from typing import Callable
-    from typing import Dict
-    from typing import ContextManager
-    from typing import Iterator
-    from typing import List
-    from typing import Optional
-    from typing import Set
-    from typing import Tuple
-    from typing import Union
-    from typing import Type
+import sentry_sdk
+from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH
+
+if TYPE_CHECKING:
+    from types import FrameType, TracebackType
+    from typing import (
+        Any,
+        Callable,
+        ContextManager,
+        Dict,
+        Iterator,
+        List,
+        Optional,
+        Set,
+        Tuple,
+        Type,
+        Union,
+    )
 
-    from sentry_sdk._types import ExcInfo, EndpointType
+    from sentry_sdk._types import EndpointType, Event, ExcInfo
 
 
 epoch = datetime(1970, 1, 1)
 
-
 # The logger is created here but initialized in the debug support module
 logger = logging.getLogger("sentry_sdk.errors")
 
-MAX_STRING_LENGTH = 512
+_installed_modules = None
+
 BASE64_ALPHABET = re.compile(r"^[a-zA-Z0-9/+=]*$")
 
+SENSITIVE_DATA_SUBSTITUTE = "[Filtered]"
+
 
 def json_dumps(data):
     # type: (Any) -> bytes
@@ -55,18 +102,20 @@ def _get_debug_hub():
     pass
 
 
-def get_default_release():
+def get_git_revision():
     # type: () -> Optional[str]
-    """Try to guess a default release."""
-    release = os.environ.get("SENTRY_RELEASE")
-    if release:
-        return release
-
-    with open(os.path.devnull, "w+") as null:
-        try:
-            release = (
+    try:
+        with open(os.path.devnull, "w+") as null:
+            # prevent command prompt windows from popping up on windows
+            startupinfo = None
+            if sys.platform == "win32" or sys.platform == "cygwin":
+                startupinfo = subprocess.STARTUPINFO()
+                startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+
+            revision = (
                 subprocess.Popen(
                     ["git", "rev-parse", "HEAD"],
+                    startupinfo=startupinfo,
                     stdout=subprocess.PIPE,
                     stderr=null,
                     stdin=null,
@@ -75,11 +124,22 @@ def get_default_release():
                 .strip()
                 .decode("utf-8")
             )
-        except (OSError, IOError):
-            pass
+    except (OSError, IOError, FileNotFoundError):
+        return None
 
-        if release:
-            return release
+    return revision
+
+
+def get_default_release():
+    # type: () -> Optional[str]
+    """Try to guess a default release."""
+    release = os.environ.get("SENTRY_RELEASE")
+    if release:
+        return release
+
+    release = get_git_revision()
+    if release:
+        return release
 
     for var in (
         "HEROKU_SLUG_COMMIT",
@@ -94,6 +154,40 @@ def get_default_release():
     return None
 
 
+def get_sdk_name(installed_integrations):
+    # type: (List[str]) -> str
+    """Return the SDK name including the name of the used web framework."""
+
+    # Note: I can not use for example sentry_sdk.integrations.django.DjangoIntegration.identifier
+    # here because if django is not installed the integration is not accessible.
+    framework_integrations = [
+        "django",
+        "flask",
+        "fastapi",
+        "bottle",
+        "falcon",
+        "quart",
+        "sanic",
+        "starlette",
+        "chalice",
+        "serverless",
+        "pyramid",
+        "tornado",
+        "aiohttp",
+        "aws_lambda",
+        "gcp",
+        "beam",
+        "asgi",
+        "wsgi",
+    ]
+
+    for integration in framework_integrations:
+        if integration in installed_integrations:
+            return "sentry.python.{}".format(integration)
+
+    return "sentry.python"
+
+
 class CaptureInternalException(object):
     __slots__ = ()
 
@@ -270,12 +364,10 @@ def get_api_url(
             type,
         )
 
-    def to_header(self, timestamp=None):
-        # type: (Optional[datetime]) -> str
+    def to_header(self):
+        # type: () -> str
         """Returns the auth header a string."""
         rv = [("sentry_key", self.public_key), ("sentry_version", self.version)]
-        if timestamp is not None:
-            rv.append(("sentry_timestamp", str(to_timestamp(timestamp))))
         if self.client is not None:
             rv.append(("sentry_client", self.client))
         if self.secret_key is not None:
@@ -284,6 +376,13 @@ def to_header(self, timestamp=None):
 
 
 class AnnotatedValue(object):
+    """
+    Meta information for a data field in the event payload.
+    This is to tell Relay that we have tampered with the fields value.
+    See:
+    https://github.com/getsentry/relay/blob/be12cd49a0f06ea932ed9b9f93a655de5d6ad6d1/relay-general/src/types/meta.rs#L407-L423
+    """
+
     __slots__ = ("value", "metadata")
 
     def __init__(self, value, metadata):
@@ -291,8 +390,63 @@ def __init__(self, value, metadata):
         self.value = value
         self.metadata = metadata
 
+    def __eq__(self, other):
+        # type: (Any) -> bool
+        if not isinstance(other, AnnotatedValue):
+            return False
+
+        return self.value == other.value and self.metadata == other.metadata
+
+    @classmethod
+    def removed_because_raw_data(cls):
+        # type: () -> AnnotatedValue
+        """The value was removed because it could not be parsed. This is done for request body values that are not json nor a form."""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!raw",  # Unparsable raw data
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
+    @classmethod
+    def removed_because_over_size_limit(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because the size of the field exceeded the configured maximum size (specified with the max_request_body_size sdk option)"""
+        return AnnotatedValue(
+            value="",
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of configured maximum size
+                        "x",  # The fields original value was removed
+                    ]
+                ]
+            },
+        )
+
+    @classmethod
+    def substituted_because_contains_sensitive_data(cls):
+        # type: () -> AnnotatedValue
+        """The actual value was removed because it contained sensitive information."""
+        return AnnotatedValue(
+            value=SENSITIVE_DATA_SUBSTITUTE,
+            metadata={
+                "rem": [  # Remark
+                    [
+                        "!config",  # Because of SDK configuration (in this case the config is the hard coded removal of certain django cookies)
+                        "s",  # The fields original value was substituted
+                    ]
+                ]
+            },
+        )
+
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import TypeVar
 
     T = TypeVar("T")
@@ -343,6 +497,7 @@ def iter_stacks(tb):
 def get_lines_from_file(
     filename,  # type: str
     lineno,  # type: int
+    max_length=None,  # type: Optional[int]
     loader=None,  # type: Optional[Any]
     module=None,  # type: Optional[str]
 ):
@@ -371,11 +526,12 @@ def get_lines_from_file(
 
     try:
         pre_context = [
-            strip_string(line.strip("\r\n")) for line in source[lower_bound:lineno]
+            strip_string(line.strip("\r\n"), max_length=max_length)
+            for line in source[lower_bound:lineno]
         ]
-        context_line = strip_string(source[lineno].strip("\r\n"))
+        context_line = strip_string(source[lineno].strip("\r\n"), max_length=max_length)
         post_context = [
-            strip_string(line.strip("\r\n"))
+            strip_string(line.strip("\r\n"), max_length=max_length)
             for line in source[(lineno + 1) : upper_bound]
         ]
         return pre_context, context_line, post_context
@@ -387,6 +543,7 @@ def get_lines_from_file(
 def get_source_context(
     frame,  # type: FrameType
     tb_lineno,  # type: int
+    max_value_length=None,  # type: Optional[int]
 ):
     # type: (...) -> Tuple[List[Annotated[str]], Optional[Annotated[str]], List[Annotated[str]]]
     try:
@@ -403,7 +560,9 @@ def get_source_context(
         loader = None
     lineno = tb_lineno - 1
     if lineno is not None and abs_path:
-        return get_lines_from_file(abs_path, lineno, loader, module)
+        return get_lines_from_file(
+            abs_path, lineno, max_value_length, loader=loader, module=module
+        )
     return [], None, []
 
 
@@ -476,8 +635,14 @@ def filename_for_module(module, abs_path):
         return abs_path
 
 
-def serialize_frame(frame, tb_lineno=None, with_locals=True):
-    # type: (FrameType, Optional[int], bool) -> Dict[str, Any]
+def serialize_frame(
+    frame,
+    tb_lineno=None,
+    include_local_variables=True,
+    include_source_context=True,
+    max_value_length=None,
+):
+    # type: (FrameType, Optional[int], bool, bool, Optional[int]) -> Dict[str, Any]
     f_code = getattr(frame, "f_code", None)
     if not f_code:
         abs_path = None
@@ -493,33 +658,45 @@ def serialize_frame(frame, tb_lineno=None, with_locals=True):
     if tb_lineno is None:
         tb_lineno = frame.f_lineno
 
-    pre_context, context_line, post_context = get_source_context(frame, tb_lineno)
-
     rv = {
         "filename": filename_for_module(module, abs_path) or None,
         "abs_path": os.path.abspath(abs_path) if abs_path else None,
         "function": function or "",
         "module": module,
         "lineno": tb_lineno,
-        "pre_context": pre_context,
-        "context_line": context_line,
-        "post_context": post_context,
     }  # type: Dict[str, Any]
-    if with_locals:
-        rv["vars"] = frame.f_locals
+
+    if include_source_context:
+        rv["pre_context"], rv["context_line"], rv["post_context"] = get_source_context(
+            frame, tb_lineno, max_value_length
+        )
+
+    if include_local_variables:
+        rv["vars"] = copy(frame.f_locals)
 
     return rv
 
 
-def current_stacktrace(with_locals=True):
-    # type: (bool) -> Any
+def current_stacktrace(
+    include_local_variables=True,  # type: bool
+    include_source_context=True,  # type: bool
+    max_value_length=None,  # type: Optional[int]
+):
+    # type: (...) -> Dict[str, Any]
     __tracebackhide__ = True
     frames = []
 
     f = sys._getframe()  # type: Optional[FrameType]
     while f is not None:
         if not should_hide_frame(f):
-            frames.append(serialize_frame(f, with_locals=with_locals))
+            frames.append(
+                serialize_frame(
+                    f,
+                    include_local_variables=include_local_variables,
+                    include_source_context=include_source_context,
+                    max_value_length=max_value_length,
+                )
+            )
         f = f.f_back
 
     frames.reverse()
@@ -532,46 +709,94 @@ def get_errno(exc_value):
     return getattr(exc_value, "errno", None)
 
 
+def get_error_message(exc_value):
+    # type: (Optional[BaseException]) -> str
+    return (
+        getattr(exc_value, "message", "")
+        or getattr(exc_value, "detail", "")
+        or safe_str(exc_value)
+    )
+
+
 def single_exception_from_error_tuple(
     exc_type,  # type: Optional[type]
     exc_value,  # type: Optional[BaseException]
     tb,  # type: Optional[TracebackType]
     client_options=None,  # type: Optional[Dict[str, Any]]
     mechanism=None,  # type: Optional[Dict[str, Any]]
+    exception_id=None,  # type: Optional[int]
+    parent_id=None,  # type: Optional[int]
+    source=None,  # type: Optional[str]
 ):
     # type: (...) -> Dict[str, Any]
+    """
+    Creates a dict that goes into the events `exception.values` list and is ingestible by Sentry.
+
+    See the Exception Interface documentation for more details:
+    https://develop.sentry.dev/sdk/event-payloads/exception/
+    """
+    exception_value = {}  # type: Dict[str, Any]
+    exception_value["mechanism"] = (
+        mechanism.copy() if mechanism else {"type": "generic", "handled": True}
+    )
+    if exception_id is not None:
+        exception_value["mechanism"]["exception_id"] = exception_id
+
     if exc_value is not None:
         errno = get_errno(exc_value)
     else:
         errno = None
 
     if errno is not None:
-        mechanism = mechanism or {"type": "generic"}
-        mechanism.setdefault("meta", {}).setdefault("errno", {}).setdefault(
-            "number", errno
-        )
+        exception_value["mechanism"].setdefault("meta", {}).setdefault(
+            "errno", {}
+        ).setdefault("number", errno)
+
+    if source is not None:
+        exception_value["mechanism"]["source"] = source
+
+    is_root_exception = exception_id == 0
+    if not is_root_exception and parent_id is not None:
+        exception_value["mechanism"]["parent_id"] = parent_id
+        exception_value["mechanism"]["type"] = "chained"
+
+    if is_root_exception and "type" not in exception_value["mechanism"]:
+        exception_value["mechanism"]["type"] = "generic"
+
+    is_exception_group = BaseExceptionGroup is not None and isinstance(
+        exc_value, BaseExceptionGroup
+    )
+    if is_exception_group:
+        exception_value["mechanism"]["is_exception_group"] = True
+
+    exception_value["module"] = get_type_module(exc_type)
+    exception_value["type"] = get_type_name(exc_type)
+    exception_value["value"] = get_error_message(exc_value)
 
     if client_options is None:
-        with_locals = True
+        include_local_variables = True
+        include_source_context = True
+        max_value_length = DEFAULT_MAX_VALUE_LENGTH  # fallback
     else:
-        with_locals = client_options["with_locals"]
+        include_local_variables = client_options["include_local_variables"]
+        include_source_context = client_options["include_source_context"]
+        max_value_length = client_options["max_value_length"]
 
     frames = [
-        serialize_frame(tb.tb_frame, tb_lineno=tb.tb_lineno, with_locals=with_locals)
+        serialize_frame(
+            tb.tb_frame,
+            tb_lineno=tb.tb_lineno,
+            include_local_variables=include_local_variables,
+            include_source_context=include_source_context,
+            max_value_length=max_value_length,
+        )
         for tb in iter_stacks(tb)
     ]
 
-    rv = {
-        "module": get_type_module(exc_type),
-        "type": get_type_name(exc_type),
-        "value": safe_str(exc_value),
-        "mechanism": mechanism,
-    }
-
     if frames:
-        rv["stacktrace"] = {"frames": frames}
+        exception_value["stacktrace"] = {"frames": frames}
 
-    return rv
+    return exception_value
 
 
 HAS_CHAINED_EXCEPTIONS = hasattr(Exception, "__suppress_context__")
@@ -615,6 +840,102 @@ def walk_exception_chain(exc_info):
         yield exc_info
 
 
+def exceptions_from_error(
+    exc_type,  # type: Optional[type]
+    exc_value,  # type: Optional[BaseException]
+    tb,  # type: Optional[TracebackType]
+    client_options=None,  # type: Optional[Dict[str, Any]]
+    mechanism=None,  # type: Optional[Dict[str, Any]]
+    exception_id=0,  # type: int
+    parent_id=0,  # type: int
+    source=None,  # type: Optional[str]
+):
+    # type: (...) -> Tuple[int, List[Dict[str, Any]]]
+    """
+    Creates the list of exceptions.
+    This can include chained exceptions and exceptions from an ExceptionGroup.
+
+    See the Exception Interface documentation for more details:
+    https://develop.sentry.dev/sdk/event-payloads/exception/
+    """
+
+    parent = single_exception_from_error_tuple(
+        exc_type=exc_type,
+        exc_value=exc_value,
+        tb=tb,
+        client_options=client_options,
+        mechanism=mechanism,
+        exception_id=exception_id,
+        parent_id=parent_id,
+        source=source,
+    )
+    exceptions = [parent]
+
+    parent_id = exception_id
+    exception_id += 1
+
+    should_supress_context = hasattr(exc_value, "__suppress_context__") and exc_value.__suppress_context__  # type: ignore
+    if should_supress_context:
+        # Add direct cause.
+        # The field `__cause__` is set when raised with the exception (using the `from` keyword).
+        exception_has_cause = (
+            exc_value
+            and hasattr(exc_value, "__cause__")
+            and exc_value.__cause__ is not None
+        )
+        if exception_has_cause:
+            cause = exc_value.__cause__  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(cause),
+                exc_value=cause,
+                tb=getattr(cause, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                source="__cause__",
+            )
+            exceptions.extend(child_exceptions)
+
+    else:
+        # Add indirect cause.
+        # The field `__context__` is assigned if another exception occurs while handling the exception.
+        exception_has_content = (
+            exc_value
+            and hasattr(exc_value, "__context__")
+            and exc_value.__context__ is not None
+        )
+        if exception_has_content:
+            context = exc_value.__context__  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(context),
+                exc_value=context,
+                tb=getattr(context, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                source="__context__",
+            )
+            exceptions.extend(child_exceptions)
+
+    # Add exceptions from an ExceptionGroup.
+    is_exception_group = exc_value and hasattr(exc_value, "exceptions")
+    if is_exception_group:
+        for idx, e in enumerate(exc_value.exceptions):  # type: ignore
+            (exception_id, child_exceptions) = exceptions_from_error(
+                exc_type=type(e),
+                exc_value=e,
+                tb=getattr(e, "__traceback__", None),
+                client_options=client_options,
+                mechanism=mechanism,
+                exception_id=exception_id,
+                parent_id=parent_id,
+                source="exceptions[%s]" % idx,
+            )
+            exceptions.extend(child_exceptions)
+
+    return (exception_id, exceptions)
+
+
 def exceptions_from_error_tuple(
     exc_info,  # type: ExcInfo
     client_options=None,  # type: Optional[Dict[str, Any]]
@@ -622,17 +943,34 @@ def exceptions_from_error_tuple(
 ):
     # type: (...) -> List[Dict[str, Any]]
     exc_type, exc_value, tb = exc_info
-    rv = []
-    for exc_type, exc_value, tb in walk_exception_chain(exc_info):
-        rv.append(
-            single_exception_from_error_tuple(
-                exc_type, exc_value, tb, client_options, mechanism
-            )
+
+    is_exception_group = BaseExceptionGroup is not None and isinstance(
+        exc_value, BaseExceptionGroup
+    )
+
+    if is_exception_group:
+        (_, exceptions) = exceptions_from_error(
+            exc_type=exc_type,
+            exc_value=exc_value,
+            tb=tb,
+            client_options=client_options,
+            mechanism=mechanism,
+            exception_id=0,
+            parent_id=0,
         )
 
-    rv.reverse()
+    else:
+        exceptions = []
+        for exc_type, exc_value, tb in walk_exception_chain(exc_info):
+            exceptions.append(
+                single_exception_from_error_tuple(
+                    exc_type, exc_value, tb, client_options, mechanism
+                )
+            )
 
-    return rv
+    exceptions.reverse()
+
+    return exceptions
 
 
 def to_string(value):
@@ -644,7 +982,7 @@ def to_string(value):
 
 
 def iter_event_stacktraces(event):
-    # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+    # type: (Event) -> Iterator[Dict[str, Any]]
     if "stacktrace" in event:
         yield event["stacktrace"]
     if "threads" in event:
@@ -658,50 +996,60 @@ def iter_event_stacktraces(event):
 
 
 def iter_event_frames(event):
-    # type: (Dict[str, Any]) -> Iterator[Dict[str, Any]]
+    # type: (Event) -> Iterator[Dict[str, Any]]
     for stacktrace in iter_event_stacktraces(event):
         for frame in stacktrace.get("frames") or ():
             yield frame
 
 
-def handle_in_app(event, in_app_exclude=None, in_app_include=None):
-    # type: (Dict[str, Any], Optional[List[str]], Optional[List[str]]) -> Dict[str, Any]
+def handle_in_app(event, in_app_exclude=None, in_app_include=None, project_root=None):
+    # type: (Event, Optional[List[str]], Optional[List[str]], Optional[str]) -> Event
     for stacktrace in iter_event_stacktraces(event):
-        handle_in_app_impl(
+        set_in_app_in_frames(
             stacktrace.get("frames"),
             in_app_exclude=in_app_exclude,
             in_app_include=in_app_include,
+            project_root=project_root,
         )
 
     return event
 
 
-def handle_in_app_impl(frames, in_app_exclude, in_app_include):
-    # type: (Any, Optional[List[str]], Optional[List[str]]) -> Optional[Any]
+def set_in_app_in_frames(frames, in_app_exclude, in_app_include, project_root=None):
+    # type: (Any, Optional[List[str]], Optional[List[str]], Optional[str]) -> Optional[Any]
     if not frames:
         return None
 
-    any_in_app = False
     for frame in frames:
-        in_app = frame.get("in_app")
-        if in_app is not None:
-            if in_app:
-                any_in_app = True
+        # if frame has already been marked as in_app, skip it
+        current_in_app = frame.get("in_app")
+        if current_in_app is not None:
             continue
 
         module = frame.get("module")
-        if not module:
-            continue
-        elif _module_in_set(module, in_app_include):
+
+        # check if module in frame is in the list of modules to include
+        if _module_in_list(module, in_app_include):
             frame["in_app"] = True
-            any_in_app = True
-        elif _module_in_set(module, in_app_exclude):
+            continue
+
+        # check if module in frame is in the list of modules to exclude
+        if _module_in_list(module, in_app_exclude):
+            frame["in_app"] = False
+            continue
+
+        # if frame has no abs_path, skip further checks
+        abs_path = frame.get("abs_path")
+        if abs_path is None:
+            continue
+
+        if _is_external_source(abs_path):
             frame["in_app"] = False
+            continue
 
-    if not any_in_app:
-        for frame in frames:
-            if frame.get("in_app") is None:
-                frame["in_app"] = True
+        if _is_in_project_root(abs_path, project_root):
+            frame["in_app"] = True
+            continue
 
     return frames
 
@@ -733,7 +1081,7 @@ def event_from_exception(
     client_options=None,  # type: Optional[Dict[str, Any]]
     mechanism=None,  # type: Optional[Dict[str, Any]]
 ):
-    # type: (...) -> Tuple[Dict[str, Any], Dict[str, Any]]
+    # type: (...) -> Tuple[Event, Dict[str, Any]]
     exc_info = exc_info_from_error(exc_info)
     hint = event_hint_with_exc_info(exc_info)
     return (
@@ -749,37 +1097,156 @@ def event_from_exception(
     )
 
 
-def _module_in_set(name, set):
+def _module_in_list(name, items):
     # type: (str, Optional[List[str]]) -> bool
-    if not set:
+    if name is None:
+        return False
+
+    if not items:
         return False
-    for item in set or ():
+
+    for item in items:
         if item == name or name.startswith(item + "."):
             return True
+
     return False
 
 
+def _is_external_source(abs_path):
+    # type: (str) -> bool
+    # check if frame is in 'site-packages' or 'dist-packages'
+    external_source = (
+        re.search(r"[\\/](?:dist|site)-packages[\\/]", abs_path) is not None
+    )
+    return external_source
+
+
+def _is_in_project_root(abs_path, project_root):
+    # type: (str, Optional[str]) -> bool
+    if project_root is None:
+        return False
+
+    # check if path is in the project root
+    if abs_path.startswith(project_root):
+        return True
+
+    return False
+
+
+def _truncate_by_bytes(string, max_bytes):
+    # type: (str, int) -> str
+    """
+    Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes.
+    """
+    # This function technically supports bytes, but only for Python 2 compat.
+    # XXX remove support for bytes when we drop Python 2
+    if isinstance(string, bytes):
+        truncated = string[: max_bytes - 3]
+    else:
+        truncated = string.encode("utf-8")[: max_bytes - 3].decode(
+            "utf-8", errors="ignore"
+        )
+
+    return truncated + "..."
+
+
+def _get_size_in_bytes(value):
+    # type: (str) -> Optional[int]
+    # This function technically supports bytes, but only for Python 2 compat.
+    # XXX remove support for bytes when we drop Python 2
+    if not isinstance(value, (bytes, text_type)):
+        return None
+
+    if isinstance(value, bytes):
+        return len(value)
+
+    try:
+        return len(value.encode("utf-8"))
+    except (UnicodeEncodeError, UnicodeDecodeError):
+        return None
+
+
 def strip_string(value, max_length=None):
     # type: (str, Optional[int]) -> Union[AnnotatedValue, str]
-    # TODO: read max_length from config
     if not value:
         return value
 
     if max_length is None:
-        # This is intentionally not just the default such that one can patch `MAX_STRING_LENGTH` and affect `strip_string`.
-        max_length = MAX_STRING_LENGTH
+        max_length = DEFAULT_MAX_VALUE_LENGTH
+
+    byte_size = _get_size_in_bytes(value)
+    text_size = None
+    if isinstance(value, text_type):
+        text_size = len(value)
+
+    if byte_size is not None and byte_size > max_length:
+        # truncate to max_length bytes, preserving code points
+        truncated_value = _truncate_by_bytes(value, max_length)
+    elif text_size is not None and text_size > max_length:
+        # fallback to truncating by string length
+        truncated_value = value[: max_length - 3] + "..."
+    else:
+        return value
+
+    return AnnotatedValue(
+        value=truncated_value,
+        metadata={
+            "len": byte_size or text_size,
+            "rem": [["!limit", "x", max_length - 3, max_length]],
+        },
+    )
 
-    length = len(value)
 
-    if length > max_length:
-        return AnnotatedValue(
-            value=value[: max_length - 3] + "...",
-            metadata={
-                "len": length,
-                "rem": [["!limit", "x", max_length - 3, max_length]],
-            },
+def parse_version(version):
+    # type: (str) -> Optional[Tuple[int, ...]]
+    """
+    Parses a version string into a tuple of integers.
+    This uses the parsing loging from PEP 440:
+    https://peps.python.org/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
+    """
+    VERSION_PATTERN = r"""  # noqa: N806
+        v?
+        (?:
+            (?:(?P[0-9]+)!)?                           # epoch
+            (?P[0-9]+(?:\.[0-9]+)*)                  # release segment
+            (?P
                                          # pre-release
+                [-_\.]?
+                (?P(a|b|c|rc|alpha|beta|pre|preview))
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
+            (?P                                         # post release
+                (?:-(?P[0-9]+))
+                |
+                (?:
+                    [-_\.]?
+                    (?Ppost|rev|r)
+                    [-_\.]?
+                    (?P[0-9]+)?
+                )
+            )?
+            (?P                                          # dev release
+                [-_\.]?
+                (?Pdev)
+                [-_\.]?
+                (?P[0-9]+)?
+            )?
         )
-    return value
+        (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+    """
+
+    pattern = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    try:
+        release = pattern.match(version).groupdict()["release"]  # type: ignore
+        release_tuple = tuple(map(int, release.split(".")[:3]))  # type: Tuple[int, ...]
+    except (TypeError, ValueError, AttributeError):
+        return None
+
+    return release_tuple
 
 
 def _is_contextvars_broken():
@@ -815,9 +1282,18 @@ def _is_contextvars_broken():
         pass
 
     try:
+        import greenlet  # type: ignore
         from eventlet.patcher import is_monkey_patched  # type: ignore
 
-        if is_monkey_patched("thread"):
+        greenlet_version = parse_version(greenlet.__version__)
+
+        if greenlet_version is None:
+            logger.error(
+                "Internal error in Sentry SDK: Could not parse Greenlet version from greenlet.__version__."
+            )
+            return False
+
+        if is_monkey_patched("thread") and greenlet_version < (0, 5):
             return True
     except ImportError:
         pass
@@ -830,24 +1306,49 @@ def _make_threadlocal_contextvars(local):
     class ContextVar(object):
         # Super-limited impl of ContextVar
 
-        def __init__(self, name):
-            # type: (str) -> None
+        def __init__(self, name, default=None):
+            # type: (str, Any) -> None
             self._name = name
+            self._default = default
             self._local = local()
+            self._original_local = local()
 
-        def get(self, default):
+        def get(self, default=None):
             # type: (Any) -> Any
-            return getattr(self._local, "value", default)
+            return getattr(self._local, "value", default or self._default)
 
         def set(self, value):
-            # type: (Any) -> None
+            # type: (Any) -> Any
+            token = str(random.getrandbits(64))
+            original_value = self.get()
+            setattr(self._original_local, token, original_value)
             self._local.value = value
+            return token
+
+        def reset(self, token):
+            # type: (Any) -> None
+            self._local.value = getattr(self._original_local, token)
+            del self._original_local[token]
 
     return ContextVar
 
 
+def _make_noop_copy_context():
+    # type: () -> Callable[[], Any]
+    class NoOpContext:
+        def run(self, func, *args, **kwargs):
+            # type: (Callable[..., Any], *Any, **Any) -> Any
+            return func(*args, **kwargs)
+
+    def copy_context():
+        # type: () -> NoOpContext
+        return NoOpContext()
+
+    return copy_context
+
+
 def _get_contextvars():
-    # type: () -> Tuple[bool, type]
+    # type: () -> Tuple[bool, type, Callable[[], Any]]
     """
     Figure out the "right" contextvars installation to use. Returns a
     `contextvars.ContextVar`-like class with a limited API.
@@ -863,17 +1364,17 @@ def _get_contextvars():
             # `aiocontextvars` is absolutely required for functional
             # contextvars on Python 3.6.
             try:
-                from aiocontextvars import ContextVar  # noqa
+                from aiocontextvars import ContextVar, copy_context
 
-                return True, ContextVar
+                return True, ContextVar, copy_context
             except ImportError:
                 pass
         else:
             # On Python 3.7 contextvars are functional.
             try:
-                from contextvars import ContextVar
+                from contextvars import ContextVar, copy_context
 
-                return True, ContextVar
+                return True, ContextVar, copy_context
             except ImportError:
                 pass
 
@@ -881,10 +1382,10 @@ def _get_contextvars():
 
     from threading import local
 
-    return False, _make_threadlocal_contextvars(local)
+    return False, _make_threadlocal_contextvars(local), _make_noop_copy_context()
 
 
-HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
+HAS_REAL_CONTEXTVARS, ContextVar, copy_context = _get_contextvars()
 
 CONTEXTVARS_ERROR_MESSAGE = """
 
@@ -896,9 +1397,12 @@ def _get_contextvars():
 """
 
 
-def transaction_from_function(func):
+def qualname_from_function(func):
     # type: (Callable[..., Any]) -> Optional[str]
-    # Methods in Python 2
+    """Return the qualified name of func. Works with regular function, lambda, partial and partialmethod."""
+    func_qualname = None  # type: Optional[str]
+
+    # Python 2
     try:
         return "%s.%s.%s" % (
             func.im_class.__module__,  # type: ignore
@@ -908,30 +1412,42 @@ def transaction_from_function(func):
     except Exception:
         pass
 
-    func_qualname = (
-        getattr(func, "__qualname__", None) or getattr(func, "__name__", None) or None
-    )  # type: Optional[str]
-
-    if not func_qualname:
-        # No idea what it is
-        return None
+    prefix, suffix = "", ""
 
-    # Methods in Python 3
-    # Functions
-    # Classes
-    try:
-        return "%s.%s" % (func.__module__, func_qualname)
-    except Exception:
-        pass
+    if (
+        _PARTIALMETHOD_AVAILABLE
+        and hasattr(func, "_partialmethod")
+        and isinstance(func._partialmethod, partialmethod)
+    ):
+        prefix, suffix = "partialmethod()"
+        func = func._partialmethod.func
+    elif isinstance(func, partial) and hasattr(func.func, "__name__"):
+        prefix, suffix = "partial()"
+        func = func.func
+
+    if hasattr(func, "__qualname__"):
+        func_qualname = func.__qualname__
+    elif hasattr(func, "__name__"):  # Python 2.7 has no __qualname__
+        func_qualname = func.__name__
+
+    # Python 3: methods, functions, classes
+    if func_qualname is not None:
+        if hasattr(func, "__module__"):
+            func_qualname = func.__module__ + "." + func_qualname
+        func_qualname = prefix + func_qualname + suffix
 
-    # Possibly a lambda
     return func_qualname
 
 
+def transaction_from_function(func):
+    # type: (Callable[..., Any]) -> Optional[str]
+    return qualname_from_function(func)
+
+
 disable_capture_event = ContextVar("disable_capture_event")
 
 
-class ServerlessTimeoutWarning(Exception):
+class ServerlessTimeoutWarning(Exception):  # noqa: N818
     """Raised when a serverless method is about to reach its timeout."""
 
     pass
@@ -1012,3 +1528,296 @@ def from_base64(base64_string):
         )
 
     return utf8_string
+
+
+Components = namedtuple("Components", ["scheme", "netloc", "path", "query", "fragment"])
+
+
+def sanitize_url(url, remove_authority=True, remove_query_values=True, split=False):
+    # type: (str, bool, bool, bool) -> Union[str, Components]
+    """
+    Removes the authority and query parameter values from a given URL.
+    """
+    parsed_url = urlsplit(url)
+    query_params = parse_qs(parsed_url.query, keep_blank_values=True)
+
+    # strip username:password (netloc can be usr:pwd@example.com)
+    if remove_authority:
+        netloc_parts = parsed_url.netloc.split("@")
+        if len(netloc_parts) > 1:
+            netloc = "%s:%s@%s" % (
+                SENSITIVE_DATA_SUBSTITUTE,
+                SENSITIVE_DATA_SUBSTITUTE,
+                netloc_parts[-1],
+            )
+        else:
+            netloc = parsed_url.netloc
+    else:
+        netloc = parsed_url.netloc
+
+    # strip values from query string
+    if remove_query_values:
+        query_string = unquote(
+            urlencode({key: SENSITIVE_DATA_SUBSTITUTE for key in query_params})
+        )
+    else:
+        query_string = parsed_url.query
+
+    components = Components(
+        scheme=parsed_url.scheme,
+        netloc=netloc,
+        query=query_string,
+        path=parsed_url.path,
+        fragment=parsed_url.fragment,
+    )
+
+    if split:
+        return components
+    else:
+        return urlunsplit(components)
+
+
+ParsedUrl = namedtuple("ParsedUrl", ["url", "query", "fragment"])
+
+
+def parse_url(url, sanitize=True):
+    # type: (str, bool) -> ParsedUrl
+    """
+    Splits a URL into a url (including path), query and fragment. If sanitize is True, the query
+    parameters will be sanitized to remove sensitive data. The autority (username and password)
+    in the URL will always be removed.
+    """
+    parsed_url = sanitize_url(
+        url, remove_authority=True, remove_query_values=sanitize, split=True
+    )
+
+    base_url = urlunsplit(
+        Components(
+            scheme=parsed_url.scheme,  # type: ignore
+            netloc=parsed_url.netloc,  # type: ignore
+            query="",
+            path=parsed_url.path,  # type: ignore
+            fragment="",
+        )
+    )
+
+    return ParsedUrl(
+        url=base_url,
+        query=parsed_url.query,  # type: ignore
+        fragment=parsed_url.fragment,  # type: ignore
+    )
+
+
+def is_valid_sample_rate(rate, source):
+    # type: (Any, str) -> bool
+    """
+    Checks the given sample rate to make sure it is valid type and value (a
+    boolean or a number between 0 and 1, inclusive).
+    """
+
+    # both booleans and NaN are instances of Real, so a) checking for Real
+    # checks for the possibility of a boolean also, and b) we have to check
+    # separately for NaN and Decimal does not derive from Real so need to check that too
+    if not isinstance(rate, (Real, Decimal)) or math.isnan(rate):
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be a boolean or a number between 0 and 1. Got {rate} of type {type}.".format(
+                source=source, rate=rate, type=type(rate)
+            )
+        )
+        return False
+
+    # in case rate is a boolean, it will get cast to 1 if it's True and 0 if it's False
+    rate = float(rate)
+    if rate < 0 or rate > 1:
+        logger.warning(
+            "{source} Given sample rate is invalid. Sample rate must be between 0 and 1. Got {rate}.".format(
+                source=source, rate=rate
+            )
+        )
+        return False
+
+    return True
+
+
+def match_regex_list(item, regex_list=None, substring_matching=False):
+    # type: (str, Optional[List[str]], bool) -> bool
+    if regex_list is None:
+        return False
+
+    for item_matcher in regex_list:
+        if not substring_matching and item_matcher[-1] != "$":
+            item_matcher += "$"
+
+        matched = re.search(item_matcher, item)
+        if matched:
+            return True
+
+    return False
+
+
+def is_sentry_url(hub, url):
+    # type: (sentry_sdk.Hub, str) -> bool
+    """
+    Determines whether the given URL matches the Sentry DSN.
+    """
+    return (
+        hub.client is not None
+        and hub.client.transport is not None
+        and hub.client.transport.parsed_dsn is not None
+        and hub.client.transport.parsed_dsn.netloc in url
+    )
+
+
+def _generate_installed_modules():
+    # type: () -> Iterator[Tuple[str, str]]
+    try:
+        from importlib import metadata
+
+        yielded = set()
+        for dist in metadata.distributions():
+            name = dist.metadata["Name"]
+            # `metadata` values may be `None`, see:
+            # https://github.com/python/cpython/issues/91216
+            # and
+            # https://github.com/python/importlib_metadata/issues/371
+            if name is not None:
+                normalized_name = _normalize_module_name(name)
+                if dist.version is not None and normalized_name not in yielded:
+                    yield normalized_name, dist.version
+                    yielded.add(normalized_name)
+
+    except ImportError:
+        # < py3.8
+        try:
+            import pkg_resources
+        except ImportError:
+            return
+
+        for info in pkg_resources.working_set:
+            yield _normalize_module_name(info.key), info.version
+
+
+def _normalize_module_name(name):
+    # type: (str) -> str
+    return name.lower()
+
+
+def _get_installed_modules():
+    # type: () -> Dict[str, str]
+    global _installed_modules
+    if _installed_modules is None:
+        _installed_modules = dict(_generate_installed_modules())
+    return _installed_modules
+
+
+def package_version(package):
+    # type: (str) -> Optional[Tuple[int, ...]]
+    installed_packages = _get_installed_modules()
+    version = installed_packages.get(package)
+    if version is None:
+        return None
+
+    return parse_version(version)
+
+
+if PY37:
+
+    def nanosecond_time():
+        # type: () -> int
+        return time.perf_counter_ns()
+
+elif PY33:
+
+    def nanosecond_time():
+        # type: () -> int
+        return int(time.perf_counter() * 1e9)
+
+else:
+
+    def nanosecond_time():
+        # type: () -> int
+        return int(time.time() * 1e9)
+
+
+if PY2:
+
+    def now():
+        # type: () -> float
+        return time.time()
+
+else:
+
+    def now():
+        # type: () -> float
+        return time.perf_counter()
+
+
+try:
+    from gevent import get_hub as get_gevent_hub
+    from gevent.monkey import is_module_patched
+except ImportError:
+
+    def get_gevent_hub():
+        # type: () -> Any
+        return None
+
+    def is_module_patched(*args, **kwargs):
+        # type: (*Any, **Any) -> bool
+        # unable to import from gevent means no modules have been patched
+        return False
+
+
+def is_gevent():
+    # type: () -> bool
+    return is_module_patched("threading") or is_module_patched("_thread")
+
+
+def get_current_thread_meta(thread=None):
+    # type: (Optional[threading.Thread]) -> Tuple[Optional[int], Optional[str]]
+    """
+    Try to get the id of the current thread, with various fall backs.
+    """
+
+    # if a thread is specified, that takes priority
+    if thread is not None:
+        try:
+            thread_id = thread.ident
+            thread_name = thread.name
+            if thread_id is not None:
+                return thread_id, thread_name
+        except AttributeError:
+            pass
+
+    # if the app is using gevent, we should look at the gevent hub first
+    # as the id there differs from what the threading module reports
+    if is_gevent():
+        gevent_hub = get_gevent_hub()
+        if gevent_hub is not None:
+            try:
+                # this is undocumented, so wrap it in try except to be safe
+                return gevent_hub.thread_ident, None
+            except AttributeError:
+                pass
+
+    # use the current thread's id if possible
+    try:
+        thread = threading.current_thread()
+        thread_id = thread.ident
+        thread_name = thread.name
+        if thread_id is not None:
+            return thread_id, thread_name
+    except AttributeError:
+        pass
+
+    # if we can't get the current thread id, fall back to the main thread id
+    try:
+        thread = threading.main_thread()
+        thread_id = thread.ident
+        thread_name = thread.name
+        if thread_id is not None:
+            return thread_id, thread_name
+    except AttributeError:
+        pass
+
+    # we've tried everything, time to give up
+    return None, None
diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py
index a06fb8f0d1..27b2f2f69c 100644
--- a/sentry_sdk/worker.py
+++ b/sentry_sdk/worker.py
@@ -2,14 +2,13 @@
 import threading
 
 from time import sleep, time
-from sentry_sdk._compat import check_thread_support
-from sentry_sdk._queue import Queue, Full
+from sentry_sdk._queue import Queue, FullError
 from sentry_sdk.utils import logger
 from sentry_sdk.consts import DEFAULT_QUEUE_SIZE
 
-from sentry_sdk._types import MYPY
+from sentry_sdk._types import TYPE_CHECKING
 
-if MYPY:
+if TYPE_CHECKING:
     from typing import Any
     from typing import Optional
     from typing import Callable
@@ -21,7 +20,6 @@
 class BackgroundWorker(object):
     def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
         # type: (int) -> None
-        check_thread_support()
         self._queue = Queue(queue_size)  # type: Queue
         self._lock = threading.Lock()
         self._thread = None  # type: Optional[threading.Thread]
@@ -67,8 +65,14 @@ def start(self):
                     target=self._target, name="raven-sentry.BackgroundWorker"
                 )
                 self._thread.daemon = True
-                self._thread.start()
-                self._thread_for_pid = os.getpid()
+                try:
+                    self._thread.start()
+                    self._thread_for_pid = os.getpid()
+                except RuntimeError:
+                    # At this point we can no longer start because the interpreter
+                    # is already shutting down.  Sadly at this point we can no longer
+                    # send out events.
+                    self._thread = None
 
     def kill(self):
         # type: () -> None
@@ -81,7 +85,7 @@ def kill(self):
             if self._thread:
                 try:
                     self._queue.put_nowait(_TERMINATOR)
-                except Full:
+                except FullError:
                     logger.debug("background worker queue full, kill failed")
 
                 self._thread = None
@@ -95,6 +99,10 @@ def flush(self, timeout, callback=None):
                 self._wait_flush(timeout, callback)
         logger.debug("background worker flushed")
 
+    def full(self):
+        # type: () -> bool
+        return self._queue.full()
+
     def _wait_flush(self, timeout, callback):
         # type: (float, Optional[Any]) -> None
         initial_timeout = min(0.1, timeout)
@@ -114,7 +122,7 @@ def submit(self, callback):
         try:
             self._queue.put_nowait(callback)
             return True
-        except Full:
+        except FullError:
             return False
 
     def _target(self):
diff --git a/setup.py b/setup.py
index e7aeef2398..ad227bfc46 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
 
 setup(
     name="sentry-sdk",
-    version="1.5.12",
+    version="1.45.1",
     author="Sentry Team and Contributors",
     author_email="hello@sentry.io",
     url="https://github.com/getsentry/sentry-python",
@@ -36,25 +36,53 @@ def get_file_text(file_name):
     # PEP 561
     package_data={"sentry_sdk": ["py.typed"]},
     zip_safe=False,
-    license="BSD",
-    install_requires=["urllib3>=1.10.0", "certifi"],
+    license="MIT",
+    install_requires=[
+        'urllib3>=1.25.7; python_version<="3.4"',
+        'urllib3>=1.26.9; python_version=="3.5"',
+        'urllib3>=1.26.11; python_version>="3.6"',
+        "certifi",
+    ],
     extras_require={
-        "flask": ["flask>=0.11", "blinker>=1.1"],
-        "quart": ["quart>=0.16.1", "blinker>=1.1"],
+        "aiohttp": ["aiohttp>=3.5"],
+        "arq": ["arq>=0.23"],
+        "asyncpg": ["asyncpg>=0.23"],
+        "beam": ["apache-beam>=2.12"],
         "bottle": ["bottle>=0.12.13"],
-        "falcon": ["falcon>=1.4"],
-        "django": ["django>=1.8"],
-        "sanic": ["sanic>=0.8"],
         "celery": ["celery>=3"],
-        "beam": ["apache-beam>=2.12"],
-        "rq": ["rq>=0.6"],
-        "aiohttp": ["aiohttp>=3.5"],
-        "tornado": ["tornado>=5"],
-        "sqlalchemy": ["sqlalchemy>=1.2"],
-        "pyspark": ["pyspark>=2.4.4"],
-        "pure_eval": ["pure_eval", "executing", "asttokens"],
+        "celery-redbeat": ["celery-redbeat>=2"],
         "chalice": ["chalice>=1.16.0"],
+        "clickhouse-driver": ["clickhouse-driver>=0.2.0"],
+        "django": ["django>=1.8"],
+        "falcon": ["falcon>=1.4"],
+        "fastapi": ["fastapi>=0.79.0"],
+        "flask": ["flask>=0.11", "blinker>=1.1", "markupsafe"],
+        "grpcio": ["grpcio>=1.21.1"],
         "httpx": ["httpx>=0.16.0"],
+        "huey": ["huey>=2"],
+        "loguru": ["loguru>=0.5"],
+        "openai": ["openai>=1.0.0", "tiktoken>=0.3.0"],
+        "opentelemetry": ["opentelemetry-distro>=0.35b0"],
+        "opentelemetry-experimental": [
+            "opentelemetry-distro~=0.40b0",
+            "opentelemetry-instrumentation-aiohttp-client~=0.40b0",
+            "opentelemetry-instrumentation-django~=0.40b0",
+            "opentelemetry-instrumentation-fastapi~=0.40b0",
+            "opentelemetry-instrumentation-flask~=0.40b0",
+            "opentelemetry-instrumentation-requests~=0.40b0",
+            "opentelemetry-instrumentation-sqlite3~=0.40b0",
+            "opentelemetry-instrumentation-urllib~=0.40b0",
+        ],
+        "pure_eval": ["pure_eval", "executing", "asttokens"],
+        "pymongo": ["pymongo>=3.1"],
+        "pyspark": ["pyspark>=2.4.4"],
+        "quart": ["quart>=0.16.1", "blinker>=1.1"],
+        "rq": ["rq>=0.6"],
+        "sanic": ["sanic>=0.8"],
+        "sqlalchemy": ["sqlalchemy>=1.2"],
+        "starlette": ["starlette>=0.19.1"],
+        "starlite": ["starlite>=1.48"],
+        "tornado": ["tornado>=5"],
     },
     classifiers=[
         "Development Status :: 5 - Production/Stable",
@@ -73,6 +101,8 @@ def get_file_text(file_name):
         "Programming Language :: Python :: 3.8",
         "Programming Language :: Python :: 3.9",
         "Programming Language :: Python :: 3.10",
+        "Programming Language :: Python :: 3.11",
+        "Programming Language :: Python :: 3.12",
         "Topic :: Software Development :: Libraries :: Python Modules",
     ],
     options={"bdist_wheel": {"universal": "1"}},
diff --git a/test-requirements.txt b/test-requirements.txt
index 746b10b9b4..c9324e753b 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,15 +1,15 @@
-pytest<7
+pip  # always use newest pip
+mock ; python_version<'3.3'
+pytest
+pytest-cov==2.8.1
 pytest-forked<=1.4.0
+pytest-localserver==0.5.1  # TODO(py3): 0.6.0 drops 2.7 support: https://github.com/pytest-dev/pytest-localserver/releases/tag/v0.6.0
 pytest-watch==4.2.0
 tox==3.7.0
-Werkzeug<2.1.0
-pytest-localserver==0.5.0
-pytest-cov==2.8.1
 jsonschema==3.2.0
 pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
-mock # for testing under python < 3.3
-
-gevent
-
-executing
+executing<2.0.0  # TODO(py3): 2.0.0 requires python3
 asttokens
+responses
+pysocks
+ipdb
diff --git a/tests/__init__.py b/tests/__init__.py
index cac15f9333..2e4df719d5 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,6 +1,5 @@
 import sys
-
-import pytest
+import warnings
 
 # This is used in _capture_internal_warnings. We need to run this at import
 # time because that's where many deprecation warnings might get thrown.
@@ -9,5 +8,5 @@
 # gets loaded too late.
 assert "sentry_sdk" not in sys.modules
 
-_warning_recorder_mgr = pytest.warns(None)
+_warning_recorder_mgr = warnings.catch_warnings(record=True)
 _warning_recorder = _warning_recorder_mgr.__enter__()
diff --git a/tests/conftest.py b/tests/conftest.py
index 61f25d98ee..c87111cbf7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,8 @@
-import os
 import json
+import os
+import socket
+from threading import Thread
+from contextlib import contextmanager
 
 import pytest
 import jsonschema
@@ -14,14 +17,38 @@
 except ImportError:
     eventlet = None
 
+try:
+    # Python 2
+    import BaseHTTPServer
+
+    HTTPServer = BaseHTTPServer.HTTPServer
+    BaseHTTPRequestHandler = BaseHTTPServer.BaseHTTPRequestHandler
+except Exception:
+    # Python 3
+    from http.server import BaseHTTPRequestHandler, HTTPServer
+
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
 import sentry_sdk
-from sentry_sdk._compat import reraise, string_types, iteritems
-from sentry_sdk.transport import Transport
+from sentry_sdk._compat import iteritems, reraise, string_types, PY2
 from sentry_sdk.envelope import Envelope
+from sentry_sdk.integrations import _processed_integrations  # noqa: F401
+from sentry_sdk.profiler import teardown_profiler
+from sentry_sdk.transport import Transport
 from sentry_sdk.utils import capture_internal_exceptions
 
 from tests import _warning_recorder, _warning_recorder_mgr
 
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from typing import Optional
+    from collections.abc import Iterator
+
 
 SENTRY_EVENT_SCHEMA = "./checkouts/data-schemas/relay/event.schema.json"
 
@@ -54,7 +81,7 @@ def _capture_internal_exception(self, exc_info):
 
     @request.addfinalizer
     def _():
-        # rerasise the errors so that this just acts as a pass-through (that
+        # reraise the errors so that this just acts as a pass-through (that
         # happens to keep track of the errors which pass through it)
         for e in errors:
             reraise(*e)
@@ -142,11 +169,11 @@ def check_string_keys(map):
 
     def check_envelope(envelope):
         with capture_internal_exceptions():
-            # Assert error events are sent without envelope to server, for compat.
-            # This does not apply if any item in the envelope is an attachment.
-            if not any(x.type == "attachment" for x in envelope.items):
-                assert not any(item.data_category == "error" for item in envelope.items)
-                assert not any(item.get_event() is not None for item in envelope.items)
+            # There used to be a check here for errors are not sent in envelopes.
+            # We changed the behaviour to send errors in envelopes when tracing is enabled.
+            # This is checked in test_client.py::test_sending_events_with_tracing
+            # and test_client.py::test_sending_events_with_no_tracing
+            pass
 
     def inner(client):
         monkeypatch.setattr(
@@ -165,6 +192,17 @@ def inner(event):
     return inner
 
 
+@pytest.fixture
+def reset_integrations():
+    """
+    Use with caution, sometimes we really need to start
+    with a clean slate to ensure monkeypatching works well,
+    but this also means some other stuff will be monkeypatched twice.
+    """
+    global _processed_integrations
+    _processed_integrations.clear()
+
+
 @pytest.fixture
 def sentry_init(monkeypatch_test_transport, request):
     def inner(*a, **kw):
@@ -285,20 +323,21 @@ def flush(timeout=None, callback=None):
         monkeypatch.setattr(test_client.transport, "capture_event", append)
         monkeypatch.setattr(test_client, "flush", flush)
 
-        return EventStreamReader(events_r)
+        return EventStreamReader(events_r, events_w)
 
     return inner
 
 
 class EventStreamReader(object):
-    def __init__(self, file):
-        self.file = file
+    def __init__(self, read_file, write_file):
+        self.read_file = read_file
+        self.write_file = write_file
 
     def read_event(self):
-        return json.loads(self.file.readline().decode("utf-8"))
+        return json.loads(self.read_file.readline().decode("utf-8"))
 
     def read_flush(self):
-        assert self.file.readline() == b"flush\n"
+        assert self.read_file.readline() == b"flush\n"
 
 
 # scope=session ensures that fixture is run earlier
@@ -388,7 +427,7 @@ def __init__(self, substring):
             try:
                 # the `unicode` type only exists in python 2, so if this blows up,
                 # we must be in py3 and have the `bytes` type
-                self.valid_types = (str, unicode)  # noqa
+                self.valid_types = (str, unicode)
             except NameError:
                 self.valid_types = (str, bytes)
 
@@ -542,3 +581,86 @@ def __ne__(self, test_obj):
             return not self.__eq__(test_obj)
 
     return ObjectDescribedBy
+
+
+@pytest.fixture
+def teardown_profiling():
+    yield
+    teardown_profiler()
+
+
+class MockServerRequestHandler(BaseHTTPRequestHandler):
+    def do_GET(self):  # noqa: N802
+        # Process an HTTP GET request and return a response with an HTTP 200 status.
+        self.send_response(200)
+        self.end_headers()
+        return
+
+
+def get_free_port():
+    s = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM)
+    s.bind(("localhost", 0))
+    _, port = s.getsockname()
+    s.close()
+    return port
+
+
+def create_mock_http_server():
+    # Start a mock server to test outgoing http requests
+    mock_server_port = get_free_port()
+    mock_server = HTTPServer(("localhost", mock_server_port), MockServerRequestHandler)
+    mock_server_thread = Thread(target=mock_server.serve_forever)
+    mock_server_thread.setDaemon(True)
+    mock_server_thread.start()
+
+    return mock_server_port
+
+
+def unpack_werkzeug_response(response):
+    # werkzeug < 2.1 returns a tuple as client response, newer versions return
+    # an object
+    try:
+        return response.get_data(), response.status, response.headers
+    except AttributeError:
+        content, status, headers = response
+        return b"".join(content), status, headers
+
+
+def werkzeug_set_cookie(client, servername, key, value):
+    # client.set_cookie has a different signature in different werkzeug versions
+    try:
+        client.set_cookie(servername, key, value)
+    except TypeError:
+        client.set_cookie(key, value)
+
+
+@contextmanager
+def patch_start_tracing_child(fake_transaction_is_none=False):
+    # type: (bool) -> Iterator[Optional[mock.MagicMock]]
+    if not fake_transaction_is_none:
+        fake_transaction = mock.MagicMock()
+        fake_start_child = mock.MagicMock()
+        fake_transaction.start_child = fake_start_child
+    else:
+        fake_transaction = None
+        fake_start_child = None
+
+    version = "2" if PY2 else "3"
+
+    with mock.patch(
+        "sentry_sdk.tracing_utils_py%s.get_current_span" % version,
+        return_value=fake_transaction,
+    ):
+        yield fake_start_child
+
+
+class ApproxDict(dict):
+    def __eq__(self, other):
+        # For an ApproxDict to equal another dict, the other dict just needs to contain
+        # all the keys from the ApproxDict with the same values.
+        #
+        # The other dict may contain additional keys with any value.
+        return all(key in other and other[key] == value for key, value in self.items())
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
diff --git a/tests/crons/__init__.py b/tests/crons/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/crons/test_crons.py b/tests/crons/test_crons.py
new file mode 100644
index 0000000000..1f50a33751
--- /dev/null
+++ b/tests/crons/test_crons.py
@@ -0,0 +1,343 @@
+import pytest
+import uuid
+
+import sentry_sdk
+from sentry_sdk import Hub, configure_scope, set_level
+from sentry_sdk.crons import capture_checkin
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+@sentry_sdk.monitor(monitor_slug="abc123")
+def _hello_world(name):
+    return "Hello, {}".format(name)
+
+
+@sentry_sdk.monitor(monitor_slug="def456")
+def _break_world(name):
+    1 / 0
+    return "Hello, {}".format(name)
+
+
+def _hello_world_contextmanager(name):
+    with sentry_sdk.monitor(monitor_slug="abc123"):
+        return "Hello, {}".format(name)
+
+
+def _break_world_contextmanager(name):
+    with sentry_sdk.monitor(monitor_slug="def456"):
+        1 / 0
+        return "Hello, {}".format(name)
+
+
+@sentry_sdk.monitor(monitor_slug="ghi789", monitor_config=None)
+def _no_monitor_config():
+    return
+
+
+@sentry_sdk.monitor(
+    monitor_slug="ghi789",
+    monitor_config={
+        "schedule": {"type": "crontab", "value": "0 0 * * *"},
+        "failure_issue_threshold": 5,
+    },
+)
+def _with_monitor_config():
+    return
+
+
+def test_decorator(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        result = _hello_world("Grace")
+        assert result == "Hello, Grace"
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="abc123", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+        assert fake_capture_checkin.call_args[1]["status"] == "ok"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+def test_decorator_error(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        with pytest.raises(ZeroDivisionError):
+            result = _break_world("Grace")
+
+        assert "result" not in locals()
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="def456", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+        assert fake_capture_checkin.call_args[1]["status"] == "error"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+def test_contextmanager(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        result = _hello_world_contextmanager("Grace")
+        assert result == "Hello, Grace"
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="abc123", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+        assert fake_capture_checkin.call_args[1]["status"] == "ok"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+def test_contextmanager_error(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        with pytest.raises(ZeroDivisionError):
+            result = _break_world_contextmanager("Grace")
+
+        assert "result" not in locals()
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="def456", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+        assert fake_capture_checkin.call_args[1]["status"] == "error"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+def test_capture_checkin_simple(sentry_init):
+    sentry_init()
+
+    check_in_id = capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        status=None,
+        duration=None,
+    )
+    assert check_in_id == "112233"
+
+
+def test_sample_rate_doesnt_affect_crons(sentry_init, capture_envelopes):
+    sentry_init(sample_rate=0)
+    envelopes = capture_envelopes()
+
+    capture_checkin(check_in_id="112233")
+
+    assert len(envelopes) == 1
+
+    check_in = envelopes[0].items[0].payload.json
+    assert check_in["check_in_id"] == "112233"
+
+
+def test_capture_checkin_new_id(sentry_init):
+    sentry_init()
+
+    with mock.patch("uuid.uuid4") as mock_uuid:
+        mock_uuid.return_value = uuid.UUID("a8098c1a-f86e-11da-bd1a-00112444be1e")
+        check_in_id = capture_checkin(
+            monitor_slug="abc123",
+            check_in_id=None,
+            status=None,
+            duration=None,
+        )
+
+        assert check_in_id == "a8098c1af86e11dabd1a00112444be1e"
+
+
+def test_end_to_end(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        duration=123,
+        status="ok",
+    )
+
+    check_in = envelopes[0].items[0].payload.json
+
+    # Check for final checkin
+    assert check_in["check_in_id"] == "112233"
+    assert check_in["monitor_slug"] == "abc123"
+    assert check_in["status"] == "ok"
+    assert check_in["duration"] == 123
+
+
+def test_monitor_config(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    monitor_config = {
+        "schedule": {"type": "crontab", "value": "0 0 * * *"},
+        "failure_issue_threshold": 5,
+        "recovery_threshold": 5,
+    }
+
+    capture_checkin(monitor_slug="abc123", monitor_config=monitor_config)
+    check_in = envelopes[0].items[0].payload.json
+
+    # Check for final checkin
+    assert check_in["monitor_slug"] == "abc123"
+    assert check_in["monitor_config"] == monitor_config
+
+    # Without passing a monitor_config the field is not in the checkin
+    capture_checkin(monitor_slug="abc123")
+    check_in = envelopes[1].items[0].payload.json
+
+    assert check_in["monitor_slug"] == "abc123"
+    assert "monitor_config" not in check_in
+
+
+def test_decorator_monitor_config(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    _with_monitor_config()
+
+    assert len(envelopes) == 2
+
+    for check_in_envelope in envelopes:
+        assert len(check_in_envelope.items) == 1
+        check_in = check_in_envelope.items[0].payload.json
+
+        assert check_in["monitor_slug"] == "ghi789"
+        assert check_in["monitor_config"] == {
+            "schedule": {"type": "crontab", "value": "0 0 * * *"},
+            "failure_issue_threshold": 5,
+        }
+
+
+def test_decorator_no_monitor_config(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    _no_monitor_config()
+
+    assert len(envelopes) == 2
+
+    for check_in_envelope in envelopes:
+        assert len(check_in_envelope.items) == 1
+        check_in = check_in_envelope.items[0].payload.json
+
+        assert check_in["monitor_slug"] == "ghi789"
+        assert "monitor_config" not in check_in
+
+
+def test_capture_checkin_sdk_not_initialized():
+    # Tests that the capture_checkin does not raise an error when Sentry SDK is not initialized.
+    # sentry_init() is intentionally omitted.
+    check_in_id = capture_checkin(
+        monitor_slug="abc123",
+        check_in_id="112233",
+        status=None,
+        duration=None,
+    )
+    assert check_in_id == "112233"
+
+
+def test_scope_data_in_checkin(sentry_init, capture_envelopes):
+    sentry_init()
+    envelopes = capture_envelopes()
+
+    valid_keys = [
+        # Mandatory event keys
+        "type",
+        "event_id",
+        "timestamp",
+        "platform",
+        # Optional event keys
+        "release",
+        "environment",
+        # Mandatory check-in specific keys
+        "check_in_id",
+        "monitor_slug",
+        "status",
+        # Optional check-in specific keys
+        "duration",
+        "monitor_config",
+        "contexts",  # an event processor adds this
+        # TODO: These fields need to be checked if valid for checkin:
+        "_meta",
+        "tags",
+        "extra",  # an event processor adds this
+        "modules",
+        "server_name",
+        "sdk",
+    ]
+
+    hub = Hub.current
+    with configure_scope() as scope:
+        # Add some data to the scope
+        set_level("warning")
+        hub.add_breadcrumb(message="test breadcrumb")
+        scope.set_tag("test_tag", "test_value")
+        scope.set_extra("test_extra", "test_value")
+        scope.set_context("test_context", {"test_key": "test_value"})
+
+        capture_checkin(
+            monitor_slug="abc123",
+            check_in_id="112233",
+            status="ok",
+            duration=123,
+        )
+
+        (envelope,) = envelopes
+        check_in_event = envelope.items[0].payload.json
+
+        invalid_keys = []
+        for key in check_in_event.keys():
+            if key not in valid_keys:
+                invalid_keys.append(key)
+
+        assert len(invalid_keys) == 0, "Unexpected keys found in checkin: {}".format(
+            invalid_keys
+        )
diff --git a/tests/crons/test_crons_async_py3.py b/tests/crons/test_crons_async_py3.py
new file mode 100644
index 0000000000..53ec96d713
--- /dev/null
+++ b/tests/crons/test_crons_async_py3.py
@@ -0,0 +1,144 @@
+import pytest
+
+import sentry_sdk
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+@sentry_sdk.monitor(monitor_slug="abc123")
+async def _hello_world(name):
+    return "Hello, {}".format(name)
+
+
+@sentry_sdk.monitor(monitor_slug="def456")
+async def _break_world(name):
+    1 / 0
+    return "Hello, {}".format(name)
+
+
+async def my_coroutine():
+    return
+
+
+async def _hello_world_contextmanager(name):
+    with sentry_sdk.monitor(monitor_slug="abc123"):
+        await my_coroutine()
+        return "Hello, {}".format(name)
+
+
+async def _break_world_contextmanager(name):
+    with sentry_sdk.monitor(monitor_slug="def456"):
+        await my_coroutine()
+        1 / 0
+        return "Hello, {}".format(name)
+
+
+@pytest.mark.asyncio
+async def test_decorator(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        result = await _hello_world("Grace")
+        assert result == "Hello, Grace"
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="abc123", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+        assert fake_capture_checkin.call_args[1]["status"] == "ok"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_decorator_error(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        with pytest.raises(ZeroDivisionError):
+            result = await _break_world("Grace")
+
+        assert "result" not in locals()
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="def456", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+        assert fake_capture_checkin.call_args[1]["status"] == "error"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_contextmanager(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        result = await _hello_world_contextmanager("Grace")
+        assert result == "Hello, Grace"
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="abc123", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+        assert fake_capture_checkin.call_args[1]["status"] == "ok"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_contextmanager_error(sentry_init):
+    sentry_init()
+
+    with mock.patch(
+        "sentry_sdk.crons.decorator.capture_checkin"
+    ) as fake_capture_checkin:
+        with pytest.raises(ZeroDivisionError):
+            result = await _break_world_contextmanager("Grace")
+
+        assert "result" not in locals()
+
+        # Check for initial checkin
+        fake_capture_checkin.assert_has_calls(
+            [
+                mock.call(
+                    monitor_slug="def456", status="in_progress", monitor_config=None
+                ),
+            ]
+        )
+
+        # Check for final checkin
+        assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+        assert fake_capture_checkin.call_args[1]["status"] == "error"
+        assert fake_capture_checkin.call_args[1]["duration"]
+        assert fake_capture_checkin.call_args[1]["check_in_id"]
diff --git a/tests/integrations/aiohttp/__init__.py b/tests/integrations/aiohttp/__init__.py
index b4711aadba..0e1409fda0 100644
--- a/tests/integrations/aiohttp/__init__.py
+++ b/tests/integrations/aiohttp/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-aiohttp = pytest.importorskip("aiohttp")
+pytest.importorskip("aiohttp")
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
index 5c590bcdfa..c9c7b67805 100644
--- a/tests/integrations/aiohttp/test_aiohttp.py
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -7,7 +7,9 @@
 from aiohttp.client import ServerDisconnectedError
 from aiohttp.web_request import Request
 
+from sentry_sdk import capture_message, start_transaction
 from sentry_sdk.integrations.aiohttp import AioHttpIntegration
+from tests.conftest import ApproxDict
 
 try:
     from unittest import mock  # python 3.3 and above
@@ -15,7 +17,8 @@
     import mock  # python < 3.3
 
 
-async def test_basic(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_basic(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     async def hello(request):
@@ -52,10 +55,13 @@ async def hello(request):
         "Accept-Encoding": "gzip, deflate",
         "Host": host,
         "User-Agent": request["headers"]["User-Agent"],
+        "baggage": mock.ANY,
+        "sentry-trace": mock.ANY,
     }
 
 
-async def test_post_body_not_read(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_post_body_not_read(sentry_init, aiohttp_client, capture_events):
     from sentry_sdk.integrations.aiohttp import BODY_NOT_READ_MESSAGE
 
     sentry_init(integrations=[AioHttpIntegration()])
@@ -84,7 +90,8 @@ async def hello(request):
     assert request["data"] == BODY_NOT_READ_MESSAGE
 
 
-async def test_post_body_read(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_post_body_read(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     body = {"some": "value"}
@@ -112,7 +119,8 @@ async def hello(request):
     assert request["data"] == json.dumps(body)
 
 
-async def test_403_not_captured(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_403_not_captured(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
 
     async def hello(request):
@@ -130,8 +138,9 @@ async def hello(request):
     assert not events
 
 
+@pytest.mark.asyncio
 async def test_cancelled_error_not_captured(
-    sentry_init, aiohttp_client, loop, capture_events
+    sentry_init, aiohttp_client, capture_events
 ):
     sentry_init(integrations=[AioHttpIntegration()])
 
@@ -152,7 +161,8 @@ async def hello(request):
     assert not events
 
 
-async def test_half_initialized(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_half_initialized(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()])
     sentry_init()
 
@@ -171,7 +181,8 @@ async def hello(request):
     assert events == []
 
 
-async def test_tracing(sentry_init, aiohttp_client, loop, capture_events):
+@pytest.mark.asyncio
+async def test_tracing(sentry_init, aiohttp_client, capture_events):
     sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
 
     async def hello(request):
@@ -195,18 +206,32 @@ async def hello(request):
     )
 
 
+@pytest.mark.asyncio
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
+    "url,transaction_style,expected_transaction,expected_source",
     [
         (
+            "/message",
             "handler_name",
             "tests.integrations.aiohttp.test_aiohttp.test_transaction_style..hello",
+            "component",
+        ),
+        (
+            "/message",
+            "method_and_path_pattern",
+            "GET /{var}",
+            "route",
         ),
-        ("method_and_path_pattern", "GET /{var}"),
     ],
 )
 async def test_transaction_style(
-    sentry_init, aiohttp_client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    aiohttp_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(
         integrations=[AioHttpIntegration(transaction_style=transaction_style)],
@@ -222,15 +247,47 @@ async def hello(request):
     events = capture_events()
 
     client = await aiohttp_client(app)
-    resp = await client.get("/1")
+    resp = await client.get(url)
     assert resp.status == 200
 
     (event,) = events
 
     assert event["type"] == "transaction"
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
+@pytest.mark.tests_internal_exceptions
+@pytest.mark.asyncio
+async def test_tracing_unparseable_url(sentry_init, aiohttp_client, capture_events):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        return web.Response(text="hello")
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    with mock.patch(
+        "sentry_sdk.integrations.aiohttp.parse_url", side_effect=ValueError
+    ):
+        resp = await client.get("/")
+
+    assert resp.status == 200
+
+    (event,) = events
+
+    assert event["type"] == "transaction"
+    assert (
+        event["transaction"]
+        == "tests.integrations.aiohttp.test_aiohttp.test_tracing_unparseable_url..hello"
+    )
 
 
+@pytest.mark.asyncio
 async def test_traces_sampler_gets_request_object_in_sampling_context(
     sentry_init,
     aiohttp_client,
@@ -261,3 +318,252 @@ async def kangaroo_handler(request):
             }
         )
     )
+
+
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_enabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 500
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_disabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    assert resp.status == 500
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        error_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_enabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()], traces_sample_rate=1.0)
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
+    # Get the sentry-trace header from the request so we can later compare with transaction events.
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
+    trace_id = sentry_trace_header.split("-")[0]
+
+    assert resp.status == 500
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_disabled(
+    sentry_init, aiohttp_client, capture_events
+):
+    sentry_init(integrations=[AioHttpIntegration()])
+
+    async def hello(request):
+        capture_message("It's a good day to try dividing by 0")
+        1 / 0
+
+    app = web.Application()
+    app.router.add_get("/", hello)
+
+    events = capture_events()
+
+    # The aiohttp_client is instrumented so will generate the sentry-trace header and add request.
+    # Get the sentry-trace header from the request so we can later compare with transaction events.
+    client = await aiohttp_client(app)
+    resp = await client.get("/")
+    sentry_trace_header = resp.request_info.headers.get("sentry-trace")
+    trace_id = sentry_trace_header.split("-")[0]
+
+    assert resp.status == 500
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+async def test_crumb_capture(
+    sentry_init, aiohttp_raw_server, aiohttp_client, loop, capture_events
+):
+    def before_breadcrumb(crumb, hint):
+        crumb["data"]["extra"] = "foo"
+        return crumb
+
+    sentry_init(
+        integrations=[AioHttpIntegration()], before_breadcrumb=before_breadcrumb
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction():
+        events = capture_events()
+
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/")
+        assert resp.status == 200
+        capture_message("Testing!")
+
+        (event,) = events
+
+        crumb = event["breadcrumbs"]["values"][0]
+        assert crumb["type"] == "http"
+        assert crumb["category"] == "httplib"
+        assert crumb["data"] == ApproxDict(
+            {
+                "url": "http://127.0.0.1:{}/".format(raw_server.port),
+                "http.fragment": "",
+                "http.method": "GET",
+                "http.query": "",
+                "http.response.status_code": 200,
+                "reason": "OK",
+                "extra": "foo",
+            }
+        )
+
+
+@pytest.mark.asyncio
+async def test_outgoing_trace_headers(sentry_init, aiohttp_raw_server, aiohttp_client):
+    sentry_init(
+        integrations=[AioHttpIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        # make trace_id difference between transactions
+        trace_id="0123456789012345678901234567890",
+    ) as transaction:
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/")
+        request_span = transaction._span_recorder.spans[-1]
+
+        assert resp.request_info.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+
+
+@pytest.mark.asyncio
+async def test_outgoing_trace_headers_append_to_baggage(
+    sentry_init, aiohttp_raw_server, aiohttp_client
+):
+    sentry_init(
+        integrations=[AioHttpIntegration()],
+        traces_sample_rate=1.0,
+        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
+    )
+
+    async def handler(request):
+        return web.Response(text="OK")
+
+    raw_server = await aiohttp_raw_server(handler)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="0123456789012345678901234567890",
+    ):
+        client = await aiohttp_client(raw_server)
+        resp = await client.get("/", headers={"bagGage": "custom=value"})
+
+        assert (
+            resp.request_info.headers["baggage"]
+            == "custom=value,sentry-trace_id=0123456789012345678901234567890,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
+        )
diff --git a/tests/integrations/ariadne/__init__.py b/tests/integrations/ariadne/__init__.py
new file mode 100644
index 0000000000..6d592b7a41
--- /dev/null
+++ b/tests/integrations/ariadne/__init__.py
@@ -0,0 +1,5 @@
+import pytest
+
+pytest.importorskip("ariadne")
+pytest.importorskip("fastapi")
+pytest.importorskip("flask")
diff --git a/tests/integrations/ariadne/test_ariadne.py b/tests/integrations/ariadne/test_ariadne.py
new file mode 100644
index 0000000000..2c3b086aa5
--- /dev/null
+++ b/tests/integrations/ariadne/test_ariadne.py
@@ -0,0 +1,276 @@
+from ariadne import gql, graphql_sync, ObjectType, QueryType, make_executable_schema
+from ariadne.asgi import GraphQL
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+from flask import Flask, request, jsonify
+
+from sentry_sdk.integrations.ariadne import AriadneIntegration
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+from sentry_sdk.integrations.flask import FlaskIntegration
+from sentry_sdk.integrations.starlette import StarletteIntegration
+
+
+def schema_factory():
+    type_defs = gql(
+        """
+        type Query {
+            greeting(name: String): Greeting
+            error: String
+        }
+
+        type Greeting {
+            name: String
+        }
+    """
+    )
+
+    query = QueryType()
+    greeting = ObjectType("Greeting")
+
+    @query.field("greeting")
+    def resolve_greeting(*_, **kwargs):
+        name = kwargs.pop("name")
+        return {"name": name}
+
+    @query.field("error")
+    def resolve_error(obj, *_):
+        raise RuntimeError("resolver failed")
+
+    @greeting.field("name")
+    def resolve_name(obj, *_):
+        return "Hello, {}!".format(obj["name"])
+
+    return make_executable_schema(type_defs, query)
+
+
+def test_capture_request_and_response_if_send_pii_is_on_async(
+    sentry_init, capture_events
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            AriadneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    async_app = FastAPI()
+    async_app.mount("/graphql/", GraphQL(schema))
+
+    query = {"query": "query ErrorQuery {error}"}
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
+    assert event["contexts"]["response"] == {
+        "data": {
+            "data": {"error": None},
+            "errors": [
+                {
+                    "locations": [{"column": 19, "line": 1}],
+                    "message": "resolver failed",
+                    "path": ["error"],
+                }
+            ],
+        }
+    }
+    assert event["request"]["api_target"] == "graphql"
+    assert event["request"]["data"] == query
+
+
+def test_capture_request_and_response_if_send_pii_is_on_sync(
+    sentry_init, capture_events
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[AriadneIntegration(), FlaskIntegration()],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server():
+        data = request.get_json()
+        success, result = graphql_sync(schema, data)
+        return jsonify(result), 200
+
+    query = {"query": "query ErrorQuery {error}"}
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
+    assert event["contexts"]["response"] == {
+        "data": {
+            "data": {"error": None},
+            "errors": [
+                {
+                    "locations": [{"column": 19, "line": 1}],
+                    "message": "resolver failed",
+                    "path": ["error"],
+                }
+            ],
+        }
+    }
+    assert event["request"]["api_target"] == "graphql"
+    assert event["request"]["data"] == query
+
+
+def test_do_not_capture_request_and_response_if_send_pii_is_off_async(
+    sentry_init, capture_events
+):
+    sentry_init(
+        integrations=[
+            AriadneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    async_app = FastAPI()
+    async_app.mount("/graphql/", GraphQL(schema))
+
+    query = {"query": "query ErrorQuery {error}"}
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
+    assert "data" not in event["request"]
+    assert "response" not in event["contexts"]
+
+
+def test_do_not_capture_request_and_response_if_send_pii_is_off_sync(
+    sentry_init, capture_events
+):
+    sentry_init(
+        integrations=[AriadneIntegration(), FlaskIntegration()],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server():
+        data = request.get_json()
+        success, result = graphql_sync(schema, data)
+        return jsonify(result), 200
+
+    query = {"query": "query ErrorQuery {error}"}
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
+    assert "data" not in event["request"]
+    assert "response" not in event["contexts"]
+
+
+def test_capture_validation_error(sentry_init, capture_events):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            AriadneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    async_app = FastAPI()
+    async_app.mount("/graphql/", GraphQL(schema))
+
+    query = {"query": "query ErrorQuery {doesnt_exist}"}
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "ariadne"
+    assert event["contexts"]["response"] == {
+        "data": {
+            "errors": [
+                {
+                    "locations": [{"column": 19, "line": 1}],
+                    "message": "Cannot query field 'doesnt_exist' on type 'Query'.",
+                }
+            ]
+        }
+    }
+    assert event["request"]["api_target"] == "graphql"
+    assert event["request"]["data"] == query
+
+
+def test_no_event_if_no_errors_async(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            AriadneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    async_app = FastAPI()
+    async_app.mount("/graphql/", GraphQL(schema))
+
+    query = {
+        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
+        "variables": {"name": "some name"},
+    }
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 0
+
+
+def test_no_event_if_no_errors_sync(sentry_init, capture_events):
+    sentry_init(
+        integrations=[AriadneIntegration(), FlaskIntegration()],
+    )
+    events = capture_events()
+
+    schema = schema_factory()
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server():
+        data = request.get_json()
+        success, result = graphql_sync(schema, data)
+        return jsonify(result), 200
+
+    query = {
+        "query": "query GreetingQuery($name: String) { greeting(name: $name) {name} }",
+        "variables": {"name": "some name"},
+    }
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 0
diff --git a/tests/integrations/arq/__init__.py b/tests/integrations/arq/__init__.py
new file mode 100644
index 0000000000..f0b4712255
--- /dev/null
+++ b/tests/integrations/arq/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("arq")
diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py
new file mode 100644
index 0000000000..4c4bc95163
--- /dev/null
+++ b/tests/integrations/arq/test_arq.py
@@ -0,0 +1,254 @@
+import asyncio
+import pytest
+
+from sentry_sdk import start_transaction, Hub
+from sentry_sdk.integrations.arq import ArqIntegration
+
+import arq.worker
+from arq import cron
+from arq.connections import ArqRedis
+from arq.jobs import Job
+from arq.utils import timestamp_ms
+
+from fakeredis.aioredis import FakeRedis
+
+
+def async_partial(async_fn, *args, **kwargs):
+    # asyncio.iscoroutinefunction (Used in the integration code) in Python < 3.8
+    # does not detect async functions in functools.partial objects.
+    # This partial implementation returns a coroutine instead.
+    async def wrapped(ctx):
+        return await async_fn(ctx, *args, **kwargs)
+
+    return wrapped
+
+
+@pytest.fixture(autouse=True)
+def patch_fakeredis_info_command():
+    from fakeredis._fakesocket import FakeSocket
+
+    if not hasattr(FakeSocket, "info"):
+        from fakeredis._commands import command
+        from fakeredis._helpers import SimpleString
+
+        @command((SimpleString,), name="info")
+        def info(self, section):
+            return section
+
+        FakeSocket.info = info
+
+
+@pytest.fixture
+def init_arq(sentry_init):
+    def inner(
+        cls_functions=None,
+        cls_cron_jobs=None,
+        kw_functions=None,
+        kw_cron_jobs=None,
+        allow_abort_jobs_=False,
+    ):
+        cls_functions = cls_functions or []
+        cls_cron_jobs = cls_cron_jobs or []
+
+        kwargs = {}
+        if kw_functions is not None:
+            kwargs["functions"] = kw_functions
+        if kw_cron_jobs is not None:
+            kwargs["cron_jobs"] = kw_cron_jobs
+
+        sentry_init(
+            integrations=[ArqIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+            debug=True,
+        )
+
+        server = FakeRedis()
+        pool = ArqRedis(pool_or_conn=server.connection_pool)
+
+        class WorkerSettings:
+            functions = cls_functions
+            cron_jobs = cls_cron_jobs
+            redis_pool = pool
+            allow_abort_jobs = allow_abort_jobs_
+
+        if not WorkerSettings.functions:
+            del WorkerSettings.functions
+        if not WorkerSettings.cron_jobs:
+            del WorkerSettings.cron_jobs
+
+        worker = arq.worker.create_worker(WorkerSettings, **kwargs)
+
+        return pool, worker
+
+    return inner
+
+
+@pytest.mark.asyncio
+async def test_job_result(init_arq):
+    async def increase(ctx, num):
+        return num + 1
+
+    increase.__qualname__ = increase.__name__
+
+    pool, worker = init_arq([increase])
+
+    job = await pool.enqueue_job("increase", 3)
+
+    assert isinstance(job, Job)
+
+    await worker.run_job(job.job_id, timestamp_ms())
+    result = await job.result()
+    job_result = await job.result_info()
+
+    assert result == 4
+    assert job_result.result == 4
+
+
+@pytest.mark.asyncio
+async def test_job_retry(capture_events, init_arq):
+    async def retry_job(ctx):
+        if ctx["job_try"] < 2:
+            raise arq.worker.Retry
+
+    retry_job.__qualname__ = retry_job.__name__
+
+    pool, worker = init_arq([retry_job])
+
+    job = await pool.enqueue_job("retry_job")
+
+    events = capture_events()
+
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    event = events.pop(0)
+    assert event["contexts"]["trace"]["status"] == "aborted"
+    assert event["transaction"] == "retry_job"
+    assert event["tags"]["arq_task_id"] == job.job_id
+    assert event["extra"]["arq-job"]["retry"] == 1
+
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    event = events.pop(0)
+    assert event["contexts"]["trace"]["status"] == "ok"
+    assert event["transaction"] == "retry_job"
+    assert event["tags"]["arq_task_id"] == job.job_id
+    assert event["extra"]["arq-job"]["retry"] == 2
+
+
+@pytest.mark.parametrize(
+    "source", [("cls_functions", "cls_cron_jobs"), ("kw_functions", "kw_cron_jobs")]
+)
+@pytest.mark.parametrize("job_fails", [True, False], ids=["error", "success"])
+@pytest.mark.asyncio
+async def test_job_transaction(capture_events, init_arq, source, job_fails):
+    async def division(_, a, b=0):
+        return a / b
+
+    division.__qualname__ = division.__name__
+
+    cron_func = async_partial(division, a=1, b=int(not job_fails))
+    cron_func.__qualname__ = division.__name__
+
+    cron_job = cron(cron_func, minute=0, run_at_startup=True)
+
+    functions_key, cron_jobs_key = source
+    pool, worker = init_arq(**{functions_key: [division], cron_jobs_key: [cron_job]})
+
+    events = capture_events()
+
+    job = await pool.enqueue_job("division", 1, b=int(not job_fails))
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    loop = asyncio.get_event_loop()
+    task = loop.create_task(worker.async_run())
+    await asyncio.sleep(1)
+
+    task.cancel()
+
+    await worker.close()
+
+    if job_fails:
+        error_func_event = events.pop(0)
+        error_cron_event = events.pop(1)
+
+        assert error_func_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_func_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
+
+        func_extra = error_func_event["extra"]["arq-job"]
+        assert func_extra["task"] == "division"
+
+        assert error_cron_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_cron_event["exception"]["values"][0]["mechanism"]["type"] == "arq"
+
+        cron_extra = error_cron_event["extra"]["arq-job"]
+        assert cron_extra["task"] == "cron:division"
+
+    [func_event, cron_event] = events
+
+    assert func_event["type"] == "transaction"
+    assert func_event["transaction"] == "division"
+    assert func_event["transaction_info"] == {"source": "task"}
+
+    assert "arq_task_id" in func_event["tags"]
+    assert "arq_task_retry" in func_event["tags"]
+
+    func_extra = func_event["extra"]["arq-job"]
+
+    assert func_extra["task"] == "division"
+    assert func_extra["kwargs"] == {"b": int(not job_fails)}
+    assert func_extra["retry"] == 1
+
+    assert cron_event["type"] == "transaction"
+    assert cron_event["transaction"] == "cron:division"
+    assert cron_event["transaction_info"] == {"source": "task"}
+
+    assert "arq_task_id" in cron_event["tags"]
+    assert "arq_task_retry" in cron_event["tags"]
+
+    cron_extra = cron_event["extra"]["arq-job"]
+
+    assert cron_extra["task"] == "cron:division"
+    assert cron_extra["kwargs"] == {}
+    assert cron_extra["retry"] == 1
+
+
+@pytest.mark.parametrize("source", ["cls_functions", "kw_functions"])
+@pytest.mark.asyncio
+async def test_enqueue_job(capture_events, init_arq, source):
+    async def dummy_job(_):
+        pass
+
+    pool, _ = init_arq(**{source: [dummy_job]})
+
+    events = capture_events()
+
+    with start_transaction() as transaction:
+        await pool.enqueue_job("dummy_job")
+
+    (event,) = events
+
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert event["contexts"]["trace"]["span_id"] == transaction.span_id
+
+    assert len(event["spans"])
+    assert event["spans"][0]["op"] == "queue.submit.arq"
+    assert event["spans"][0]["description"] == "dummy_job"
+
+
+@pytest.mark.asyncio
+async def test_execute_job_without_integration(init_arq):
+    async def dummy_job(_ctx):
+        pass
+
+    dummy_job.__qualname__ = dummy_job.__name__
+
+    pool, worker = init_arq([dummy_job])
+    # remove the integration to trigger the edge case
+    Hub.current.client.integrations.pop("arq")
+
+    job = await pool.enqueue_job("dummy_job")
+
+    await worker.run_job(job.job_id, timestamp_ms())
+
+    assert await job.result() is None
diff --git a/tests/integrations/asgi/__init__.py b/tests/integrations/asgi/__init__.py
index c89ddf99a8..ecc2bcfe95 100644
--- a/tests/integrations/asgi/__init__.py
+++ b/tests/integrations/asgi/__init__.py
@@ -1,3 +1,5 @@
 import pytest
 
-pytest.importorskip("starlette")
+pytest.importorskip("asyncio")
+pytest.importorskip("pytest_asyncio")
+pytest.importorskip("async_asgi_testclient")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index 5383b1a308..d60991e99e 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -1,321 +1,393 @@
-from collections import Counter
 import sys
 
+from collections import Counter
+
 import pytest
-from sentry_sdk import Hub, capture_message, last_event_id
 import sentry_sdk
-from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from starlette.applications import Starlette
-from starlette.responses import PlainTextResponse
-from starlette.testclient import TestClient
-from starlette.websockets import WebSocket
+from sentry_sdk import capture_message
+from sentry_sdk.integrations._asgi_common import _get_ip, _get_headers
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware, _looks_like_asgi3
 
-try:
-    from unittest import mock  # python 3.3 and above
-except ImportError:
-    import mock  # python < 3.3
+from async_asgi_testclient import TestClient
 
 
-@pytest.fixture
-def app():
-    app = Starlette()
-
-    @app.route("/sync-message")
-    def hi(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
+minimum_python_36 = pytest.mark.skipif(
+    sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
+)
 
-    @app.route("/async-message")
-    async def hi2(request):
-        capture_message("hi", level="error")
-        return PlainTextResponse("ok")
 
-    app.add_middleware(SentryAsgiMiddleware)
+@pytest.fixture
+def asgi3_app():
+    async def app(scope, receive, send):
+        if scope["type"] == "lifespan":
+            while True:
+                message = await receive()
+                if message["type"] == "lifespan.startup":
+                    await send({"type": "lifespan.startup.complete"})
+                elif message["type"] == "lifespan.shutdown":
+                    await send({"type": "lifespan.shutdown.complete"})
+                    return
+        elif (
+            scope["type"] == "http"
+            and "route" in scope
+            and scope["route"] == "/trigger/error"
+        ):
+            1 / 0
 
-    return app
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
 
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
 
-@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
-def test_sync_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+    return app
 
-    client = TestClient(app)
-    response = client.get("/sync-message?foo=bar", headers={"Foo": "ä"})
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
-        "foo",
-    }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/sync-message")
-    assert event["request"]["method"] == "GET"
 
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
+@pytest.fixture
+def asgi3_app_with_error():
+    async def send_with_error(event):
+        1 / 0
 
-    assert "request" not in event
-    assert "transaction" not in event
+    async def app(scope, receive, send):
+        if scope["type"] == "lifespan":
+            while True:
+                message = await receive()
+                if message["type"] == "lifespan.startup":
+                    ...  # Do some startup here!
+                    await send({"type": "lifespan.startup.complete"})
+                elif message["type"] == "lifespan.shutdown":
+                    ...  # Do some shutdown here!
+                    await send({"type": "lifespan.shutdown.complete"})
+                    return
+        else:
+            await send_with_error(
+                {
+                    "type": "http.response.start",
+                    "status": 200,
+                    "headers": [
+                        [b"content-type", b"text/plain"],
+                    ],
+                }
+            )
+            await send_with_error(
+                {
+                    "type": "http.response.body",
+                    "body": b"Hello, world!",
+                }
+            )
 
+    return app
 
-def test_async_request_data(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
 
-    client = TestClient(app)
-    response = client.get("/async-message?foo=bar")
+@pytest.fixture
+def asgi3_app_with_error_and_msg():
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "http.response.start",
+                "status": 200,
+                "headers": [
+                    [b"content-type", b"text/plain"],
+                ],
+            }
+        )
 
-    assert response.status_code == 200
+        capture_message("Let's try dividing by 0")
+        1 / 0
 
-    (event,) = events
-    assert event["transaction"] == "tests.integrations.asgi.test_asgi.app..hi2"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert set(event["request"]["headers"]) == {
-        "accept",
-        "accept-encoding",
-        "connection",
-        "host",
-        "user-agent",
-    }
-    assert event["request"]["query_string"] == "foo=bar"
-    assert event["request"]["url"].endswith("/async-message")
-    assert event["request"]["method"] == "GET"
+        await send(
+            {
+                "type": "http.response.body",
+                "body": b"Hello, world!",
+            }
+        )
 
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
+    return app
 
-    assert "request" not in event
-    assert "transaction" not in event
 
+@pytest.fixture
+def asgi3_ws_app():
+    def message():
+        capture_message("Some message to the world!")
+        raise ValueError("Oh no")
 
-def test_errors(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+    async def app(scope, receive, send):
+        await send(
+            {
+                "type": "websocket.send",
+                "text": message(),
+            }
+        )
 
-    @app.route("/error")
-    def myerror(request):
-        raise ValueError("oh no")
+    return app
 
-    client = TestClient(app, raise_server_exceptions=False)
-    response = client.get("/error")
 
-    assert response.status_code == 500
+@minimum_python_36
+def test_invalid_transaction_style(asgi3_app):
+    with pytest.raises(ValueError) as exp:
+        SentryAsgiMiddleware(asgi3_app, transaction_style="URL")
 
-    (event,) = events
     assert (
-        event["transaction"]
-        == "tests.integrations.asgi.test_asgi.test_errors..myerror"
+        str(exp.value)
+        == "Invalid value for transaction_style: URL (must be in ('endpoint', 'url'))"
     )
-    (exception,) = event["exception"]["values"]
 
-    assert exception["type"] == "ValueError"
-    assert exception["value"] == "oh no"
-    assert any(
-        frame["filename"].endswith("tests/integrations/asgi/test_asgi.py")
-        for frame in exception["stacktrace"]["frames"]
-    )
 
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction(
+    sentry_init,
+    asgi3_app,
+    capture_events,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
+
+    async with TestClient(app) as client:
+        events = capture_events()
+        await client.get("/some_url?somevalue=123")
+
+    (transaction_event,) = events
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["transaction"] == "/some_url"
+    assert transaction_event["transaction_info"] == {"source": "url"}
+    assert transaction_event["contexts"]["trace"]["op"] == "http.server"
+    assert transaction_event["request"] == {
+        "headers": {
+            "host": "localhost",
+            "remote-addr": "127.0.0.1",
+            "user-agent": "ASGI-Test-Client",
+        },
+        "method": "GET",
+        "query_string": "somevalue=123",
+        "url": "http://localhost/some_url",
+    }
 
-def test_websocket(sentry_init, capture_events, request):
-    sentry_init(debug=True, send_default_pii=True)
 
-    # Bind client to main thread because context propagation for the websocket
-    # client does not work.
-    Hub.main.bind_client(Hub.current.client)
-    request.addfinalizer(lambda: Hub.main.bind_client(None))
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_capture_transaction_with_error(
+    sentry_init,
+    asgi3_app_with_error,
+    capture_events,
+    DictionaryContaining,  # noqa: N803
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error)
 
     events = capture_events()
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            await client.get("/some_url")
+
+    (
+        error_event,
+        transaction_event,
+    ) = events
+
+    assert error_event["transaction"] == "/some_url"
+    assert error_event["transaction_info"] == {"source": "url"}
+    assert error_event["contexts"]["trace"]["op"] == "http.server"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asgi"
+
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
+        error_event["contexts"]["trace"]
+    )
+    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
+    assert transaction_event["transaction"] == error_event["transaction"]
+    assert transaction_event["request"] == error_event["request"]
 
-    from starlette.testclient import TestClient
-
-    def message():
-        capture_message("hi")
-        raise ValueError("oh no")
 
-    async def app(scope, receive, send):
-        assert scope["type"] == "websocket"
-        websocket = WebSocket(scope, receive=receive, send=send)
-        await websocket.accept()
-        await websocket.send_text(message())
-        await websocket.close()
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_enabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
 
-    app = SentryAsgiMiddleware(app)
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
 
-    client = TestClient(app)
-    with client.websocket_connect("/") as websocket:
-        with pytest.raises(ValueError):
-            websocket.receive_text()
+    msg_event, error_event, transaction_event = events
 
-    msg_event, error_event = events
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
 
-    assert msg_event["message"] == "hi"
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
 
-    (exc,) = error_event["exception"]["values"]
-    assert exc["type"] == "ValueError"
-    assert exc["value"] == "oh no"
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
 
     assert (
-        msg_event["request"]
-        == error_event["request"]
-        == {
-            "env": {"REMOTE_ADDR": "testclient"},
-            "headers": {
-                "accept": "*/*",
-                "accept-encoding": "gzip, deflate",
-                "connection": "upgrade",
-                "host": "testserver",
-                "sec-websocket-key": "testserver==",
-                "sec-websocket-version": "13",
-                "user-agent": "testclient",
-            },
-            "method": None,
-            "query_string": None,
-            "url": "ws://testserver/",
-        }
+        error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == msg_event["contexts"]["trace"]["trace_id"]
     )
 
 
-def test_starlette_last_event_id(app, sentry_init, capture_events, request):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_has_trace_if_performance_disabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init()
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
 
-    @app.route("/handlederror")
-    def handlederror(request):
-        raise ValueError("oh no")
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/")
 
-    @app.exception_handler(500)
-    def handler(*args, **kwargs):
-        return PlainTextResponse(last_event_id(), status_code=500)
+    msg_event, error_event = events
 
-    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
-    response = client.get("/handlederror")
-    assert response.status_code == 500
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
 
-    (event,) = events
-    assert response.content.strip().decode("ascii") == event["event_id"]
-    (exception,) = event["exception"]["values"]
-    assert exception["type"] == "ValueError"
-    assert exception["value"] == "oh no"
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
 
 
-def test_transaction(app, sentry_init, capture_events):
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_enabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
     sentry_init(traces_sample_rate=1.0)
-    events = capture_events()
-
-    @app.route("/tricks/kangaroo")
-    def kangaroo_handler(request):
-        return PlainTextResponse("dogs are great")
-
-    client = TestClient(app)
-    client.get("/tricks/kangaroo")
-
-    event = events[0]
-    assert event["type"] == "transaction"
-    assert (
-        event["transaction"]
-        == "tests.integrations.asgi.test_asgi.test_transaction..kangaroo_handler"
-    )
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
 
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
 
-def test_traces_sampler_gets_scope_in_sampling_context(
-    app, sentry_init, DictionaryContaining  # noqa: N803
-):
-    traces_sampler = mock.Mock()
-    sentry_init(traces_sampler=traces_sampler)
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/", headers={"sentry-trace": sentry_trace_header})
 
-    @app.route("/tricks/kangaroo")
-    def kangaroo_handler(request):
-        return PlainTextResponse("dogs are great")
+    msg_event, error_event, transaction_event = events
 
-    client = TestClient(app)
-    client.get("/tricks/kangaroo")
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
 
-    traces_sampler.assert_any_call(
-        DictionaryContaining(
-            {
-                # starlette just uses a dictionary to hold the scope
-                "asgi_scope": DictionaryContaining(
-                    {"method": "GET", "path": "/tricks/kangaroo"}
-                )
-            }
-        )
-    )
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
 
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
 
-def test_x_forwarded_for(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
 
-    client = TestClient(app)
-    response = client.get("/sync-message", headers={"X-Forwarded-For": "testproxy"})
 
-    assert response.status_code == 200
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_trace_from_headers_if_performance_disabled(
+    sentry_init,
+    asgi3_app_with_error_and_msg,
+    capture_events,
+):
+    sentry_init()
+    app = SentryAsgiMiddleware(asgi3_app_with_error_and_msg)
 
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy"}
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
 
+    with pytest.raises(ZeroDivisionError):
+        async with TestClient(app) as client:
+            events = capture_events()
+            await client.get("/", headers={"sentry-trace": sentry_trace_header})
 
-def test_x_forwarded_for_multiple_entries(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
+    msg_event, error_event = events
 
-    client = TestClient(app)
-    response = client.get(
-        "/sync-message", headers={"X-Forwarded-For": "testproxy1,testproxy2,testproxy3"}
-    )
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
 
-    assert response.status_code == 200
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
 
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testproxy1"}
 
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
+    sentry_init(debug=True, send_default_pii=True)
 
-def test_x_real_ip(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
     events = capture_events()
 
-    client = TestClient(app)
-    response = client.get("/sync-message", headers={"X-Real-IP": "1.2.3.4"})
+    asgi3_ws_app = SentryAsgiMiddleware(asgi3_ws_app)
 
-    assert response.status_code == 200
+    scope = {
+        "type": "websocket",
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+        "route": "some_url",
+        "headers": [
+            ("accept", "*/*"),
+        ],
+    }
 
-    (event,) = events
-    assert event["request"]["env"] == {"REMOTE_ADDR": "1.2.3.4"}
+    with pytest.raises(ValueError):
+        async with TestClient(asgi3_ws_app, scope=scope) as client:
+            async with client.websocket_connect("/ws") as ws:
+                await ws.receive_text()
 
+    msg_event, error_event = events
 
-def test_auto_session_tracking_with_aggregates(app, sentry_init, capture_envelopes):
-    """
-    Test for correct session aggregates in auto session tracking.
-    """
+    assert msg_event["message"] == "Some message to the world!"
 
-    @app.route("/dogs/are/great/")
-    @app.route("/trigger/an/error/")
-    def great_dogs_handler(request):
-        if request["path"] != "/dogs/are/great/":
-            1 / 0
-        return PlainTextResponse("dogs are great")
+    (exc,) = error_event["exception"]["values"]
+    assert exc["type"] == "ValueError"
+    assert exc["value"] == "Oh no"
 
-    sentry_init(traces_sample_rate=1.0)
-    envelopes = capture_envelopes()
 
-    app = SentryAsgiMiddleware(app)
-    client = TestClient(app, raise_server_exceptions=False)
-    client.get("/dogs/are/great/")
-    client.get("/dogs/are/great/")
-    client.get("/trigger/an/error/")
+@minimum_python_36
+@pytest.mark.asyncio
+async def test_auto_session_tracking_with_aggregates(
+    sentry_init, asgi3_app, capture_envelopes
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app)
+
+    scope = {
+        "endpoint": asgi3_app,
+        "client": ("127.0.0.1", 60457),
+    }
+    with pytest.raises(ZeroDivisionError):
+        envelopes = capture_envelopes()
+        async with TestClient(app, scope=scope) as client:
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/some/fine/url"
+            await client.get("/some/fine/url")
+            scope["route"] = "/trigger/error"
+            await client.get("/trigger/error")
 
     sentry_sdk.flush()
 
@@ -332,3 +404,304 @@ def great_dogs_handler(request):
     assert session_aggregates[0]["exited"] == 2
     assert session_aggregates[0]["crashed"] == 1
     assert len(session_aggregates) == 1
+
+
+@minimum_python_36
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "generic ASGI request",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.asgi.test_asgi.asgi3_app..app",
+            "component",
+        ),
+    ],
+)
+@pytest.mark.asyncio
+async def test_transaction_style(
+    sentry_init,
+    asgi3_app,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(send_default_pii=True, traces_sample_rate=1.0)
+    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
+
+    scope = {
+        "endpoint": asgi3_app,
+        "route": url,
+        "client": ("127.0.0.1", 60457),
+    }
+
+    async with TestClient(app, scope=scope) as client:
+        events = capture_events()
+        await client.get(url)
+
+    (transaction_event,) = events
+
+    assert transaction_event["transaction"] == expected_transaction
+    assert transaction_event["transaction_info"] == {"source": expected_source}
+
+
+def mock_asgi2_app():
+    pass
+
+
+class MockAsgi2App:
+    def __call__():
+        pass
+
+
+class MockAsgi3App(MockAsgi2App):
+    def __await__():
+        pass
+
+    async def __call__():
+        pass
+
+
+@minimum_python_36
+def test_looks_like_asgi3(asgi3_app):
+    # branch: inspect.isclass(app)
+    assert _looks_like_asgi3(MockAsgi3App)
+    assert not _looks_like_asgi3(MockAsgi2App)
+
+    # branch: inspect.isfunction(app)
+    assert _looks_like_asgi3(asgi3_app)
+    assert not _looks_like_asgi3(mock_asgi2_app)
+
+    # breanch: else
+    asgi3 = MockAsgi3App()
+    assert _looks_like_asgi3(asgi3)
+    asgi2 = MockAsgi2App()
+    assert not _looks_like_asgi3(asgi2)
+
+
+@minimum_python_36
+def test_get_ip_x_forwarded_for():
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # when multiple x-forwarded-for headers are, the first is taken
+    headers = [
+        (b"x-forwarded-for", b"5.5.5.5"),
+        (b"x-forwarded-for", b"6.6.6.6"),
+        (b"x-forwarded-for", b"7.7.7.7"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "5.5.5.5"
+
+
+@minimum_python_36
+def test_get_ip_x_real_ip():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "10.10.10.10"
+
+    # x-forwarded-for overrides x-real-ip
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "8.8.8.8"
+
+
+@minimum_python_36
+def test_get_ip():
+    # if now headers are provided the ip is taken from the client.
+    headers = []
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "127.0.0.1"
+
+    # x-forwarded-for header overides the ip from client
+    headers = [
+        (b"x-forwarded-for", b"8.8.8.8"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "8.8.8.8"
+
+    # x-real-for header overides the ip from client
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    ip = _get_ip(scope)
+    assert ip == "10.10.10.10"
+
+
+@minimum_python_36
+def test_get_headers():
+    headers = [
+        (b"x-real-ip", b"10.10.10.10"),
+        (b"some_header", b"123"),
+        (b"some_header", b"abc"),
+    ]
+    scope = {
+        "client": ("127.0.0.1", 60457),
+        "headers": headers,
+    }
+    headers = _get_headers(scope)
+    assert headers == {
+        "x-real-ip": "10.10.10.10",
+        "some_header": "123, abc",
+    }
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "/message/123456",
+            "url",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/123456",
+            "url",
+        ),
+    ],
+)
+async def test_transaction_name(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    asgi3_app,
+    capture_envelopes,
+):
+    """
+    Tests that the transaction name is something meaningful.
+    """
+    sentry_init(
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    envelopes = capture_envelopes()
+
+    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
+
+    async with TestClient(app) as client:
+        await client.get(request_url)
+
+    (transaction_envelope,) = envelopes
+    transaction_event = transaction_envelope.get_transaction_event()
+
+    assert transaction_event["transaction"] == expected_transaction_name
+    assert (
+        transaction_event["transaction_info"]["source"] == expected_transaction_source
+    )
+
+
+@minimum_python_36
+@pytest.mark.asyncio
+@pytest.mark.parametrize(
+    "request_url, transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "/message/123456",
+            "url",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/123456",
+            "url",
+        ),
+    ],
+)
+async def test_transaction_name_in_traces_sampler(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    asgi3_app,
+):
+    """
+    Tests that a custom traces_sampler has a meaningful transaction name.
+    In this case the URL or endpoint, because we do not have the route yet.
+    """
+
+    def dummy_traces_sampler(sampling_context):
+        assert (
+            sampling_context["transaction_context"]["name"] == expected_transaction_name
+        )
+        assert (
+            sampling_context["transaction_context"]["source"]
+            == expected_transaction_source
+        )
+
+    sentry_init(
+        traces_sampler=dummy_traces_sampler,
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
+
+    async with TestClient(app) as client:
+        await client.get(request_url)
diff --git a/tests/integrations/asgi/test_fastapi.py b/tests/integrations/asgi/test_fastapi.py
deleted file mode 100644
index 518b8544b2..0000000000
--- a/tests/integrations/asgi/test_fastapi.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import sys
-
-import pytest
-from fastapi import FastAPI
-from fastapi.testclient import TestClient
-from sentry_sdk import capture_message
-from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-
-
-@pytest.fixture
-def app():
-    app = FastAPI()
-
-    @app.get("/users/{user_id}")
-    async def get_user(user_id: str):
-        capture_message("hi", level="error")
-        return {"user_id": user_id}
-
-    app.add_middleware(SentryAsgiMiddleware, transaction_style="url")
-
-    return app
-
-
-@pytest.mark.skipif(sys.version_info < (3, 7), reason="requires python3.7 or higher")
-def test_fastapi_transaction_style(sentry_init, app, capture_events):
-    sentry_init(send_default_pii=True)
-    events = capture_events()
-
-    client = TestClient(app)
-    response = client.get("/users/rick")
-
-    assert response.status_code == 200
-
-    (event,) = events
-    assert event["transaction"] == "/users/{user_id}"
-    assert event["request"]["env"] == {"REMOTE_ADDR": "testclient"}
-    assert event["request"]["url"].endswith("/users/rick")
-    assert event["request"]["method"] == "GET"
-
-    # Assert that state is not leaked
-    events.clear()
-    capture_message("foo")
-    (event,) = events
-
-    assert "request" not in event
-    assert "transaction" not in event
diff --git a/tests/integrations/asyncio/__init__.py b/tests/integrations/asyncio/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/asyncio/test_asyncio_py3.py b/tests/integrations/asyncio/test_asyncio_py3.py
new file mode 100644
index 0000000000..c563f37b7d
--- /dev/null
+++ b/tests/integrations/asyncio/test_asyncio_py3.py
@@ -0,0 +1,370 @@
+import asyncio
+import inspect
+import sys
+
+import pytest
+
+import sentry_sdk
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.asyncio import AsyncioIntegration, patch_asyncio
+
+try:
+    from unittest.mock import MagicMock, patch
+except ImportError:
+    from mock import MagicMock, patch
+
+try:
+    from contextvars import Context, ContextVar
+except ImportError:
+    pass  # All tests will be skipped with incompatible versions
+
+
+minimum_python_37 = pytest.mark.skipif(
+    sys.version_info < (3, 7), reason="Asyncio tests need Python >= 3.7"
+)
+
+
+minimum_python_311 = pytest.mark.skipif(
+    sys.version_info < (3, 11),
+    reason="Asyncio task context parameter was introduced in Python 3.11",
+)
+
+
+async def foo():
+    await asyncio.sleep(0.01)
+
+
+async def bar():
+    await asyncio.sleep(0.01)
+
+
+async def boom():
+    1 / 0
+
+
+@pytest.fixture(scope="session")
+def event_loop(request):
+    """Create an instance of the default event loop for each test case."""
+    loop = asyncio.get_event_loop_policy().new_event_loop()
+    yield loop
+    loop.close()
+
+
+def get_sentry_task_factory(mock_get_running_loop):
+    """
+    Patches (mocked) asyncio and gets the sentry_task_factory.
+    """
+    mock_loop = mock_get_running_loop.return_value
+    patch_asyncio()
+    patched_factory = mock_loop.set_task_factory.call_args[0][0]
+
+    return patched_factory
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_create_task(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_create_task"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(foo()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_gather(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_transaction_for_gather"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            await asyncio.gather(foo(), bar(), return_exceptions=True)
+
+        sentry_sdk.flush()
+
+    (transaction_event,) = events
+
+    assert transaction_event["spans"][0]["op"] == "root"
+    assert transaction_event["spans"][0]["description"] == "not so important"
+
+    assert transaction_event["spans"][1]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][1]["description"] == "foo"
+    assert (
+        transaction_event["spans"][1]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+    assert transaction_event["spans"][2]["op"] == OP.FUNCTION
+    assert transaction_event["spans"][2]["description"] == "bar"
+    assert (
+        transaction_event["spans"][2]["parent_span_id"]
+        == transaction_event["spans"][0]["span_id"]
+    )
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_exception(
+    sentry_init,
+    capture_events,
+    event_loop,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    events = capture_events()
+
+    with sentry_sdk.start_transaction(name="test_exception"):
+        with sentry_sdk.start_span(op="root", description="not so important"):
+            tasks = [event_loop.create_task(boom()), event_loop.create_task(bar())]
+            await asyncio.wait(tasks, return_when=asyncio.FIRST_EXCEPTION)
+
+            sentry_sdk.flush()
+
+    (error_event, _) = events
+
+    assert error_event["transaction"] == "test_exception"
+    assert error_event["contexts"]["trace"]["op"] == "function"
+    assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+    assert error_event["exception"]["values"][0]["value"] == "division by zero"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "asyncio"
+
+
+@minimum_python_37
+@pytest.mark.asyncio
+async def test_task_result(sentry_init):
+    sentry_init(
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    async def add(a, b):
+        return a + b
+
+    result = await asyncio.create_task(add(1, 2))
+    assert result == 3, result
+
+
+@minimum_python_311
+@pytest.mark.asyncio
+async def test_task_with_context(sentry_init):
+    """
+    Integration test to ensure working context parameter in Python 3.11+
+    """
+    sentry_init(
+        integrations=[
+            AsyncioIntegration(),
+        ],
+    )
+
+    var = ContextVar("var")
+    var.set("original value")
+
+    async def change_value():
+        var.set("changed value")
+
+    async def retrieve_value():
+        return var.get()
+
+    # Create a context and run both tasks within the context
+    ctx = Context()
+    async with asyncio.TaskGroup() as tg:
+        tg.create_task(change_value(), context=ctx)
+        retrieve_task = tg.create_task(retrieve_value(), context=ctx)
+
+    assert retrieve_task.result() == "changed value"
+
+
+@minimum_python_37
+@patch("asyncio.get_running_loop")
+def test_patch_asyncio(mock_get_running_loop):
+    """
+    Test that the patch_asyncio function will patch the task factory.
+    """
+    mock_loop = mock_get_running_loop.return_value
+
+    patch_asyncio()
+
+    assert mock_loop.set_task_factory.called
+
+    set_task_factory_args, _ = mock_loop.set_task_factory.call_args
+    assert len(set_task_factory_args) == 1
+
+    sentry_task_factory, *_ = set_task_factory_args
+    assert callable(sentry_task_factory)
+
+
+@minimum_python_37
+@pytest.mark.forked
+@patch("asyncio.get_running_loop")
+@patch("sentry_sdk.integrations.asyncio.Task")
+def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop):  # noqa: N803
+    mock_loop = mock_get_running_loop.return_value
+    mock_coro = MagicMock()
+
+    # Set the original task factory to None
+    mock_loop.get_task_factory.return_value = None
+
+    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
+    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
+
+    # The call we are testing
+    ret_val = sentry_task_factory(mock_loop, mock_coro)
+
+    assert MockTask.called
+    assert ret_val == MockTask.return_value
+
+    task_args, task_kwargs = MockTask.call_args
+    assert len(task_args) == 1
+
+    coro_param, *_ = task_args
+    assert inspect.iscoroutine(coro_param)
+
+    assert "loop" in task_kwargs
+    assert task_kwargs["loop"] == mock_loop
+
+
+@minimum_python_37
+@pytest.mark.forked
+@patch("asyncio.get_running_loop")
+def test_sentry_task_factory_with_factory(mock_get_running_loop):
+    mock_loop = mock_get_running_loop.return_value
+    mock_coro = MagicMock()
+
+    # The original task factory will be mocked out here, let's retrieve the value for later
+    orig_task_factory = mock_loop.get_task_factory.return_value
+
+    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
+    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
+
+    # The call we are testing
+    ret_val = sentry_task_factory(mock_loop, mock_coro)
+
+    assert orig_task_factory.called
+    assert ret_val == orig_task_factory.return_value
+
+    task_factory_args, _ = orig_task_factory.call_args
+    assert len(task_factory_args) == 2
+
+    loop_arg, coro_arg = task_factory_args
+    assert loop_arg == mock_loop
+    assert inspect.iscoroutine(coro_arg)
+
+
+@minimum_python_311
+@patch("asyncio.get_running_loop")
+@patch("sentry_sdk.integrations.asyncio.Task")
+def test_sentry_task_factory_context_no_factory(
+    MockTask, mock_get_running_loop  # noqa: N803
+):
+    mock_loop = mock_get_running_loop.return_value
+    mock_coro = MagicMock()
+    mock_context = MagicMock()
+
+    # Set the original task factory to None
+    mock_loop.get_task_factory.return_value = None
+
+    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
+    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
+
+    # The call we are testing
+    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)
+
+    assert MockTask.called
+    assert ret_val == MockTask.return_value
+
+    task_args, task_kwargs = MockTask.call_args
+    assert len(task_args) == 1
+
+    coro_param, *_ = task_args
+    assert inspect.iscoroutine(coro_param)
+
+    assert "loop" in task_kwargs
+    assert task_kwargs["loop"] == mock_loop
+    assert "context" in task_kwargs
+    assert task_kwargs["context"] == mock_context
+
+
+@minimum_python_311
+@patch("asyncio.get_running_loop")
+def test_sentry_task_factory_context_with_factory(mock_get_running_loop):
+    mock_loop = mock_get_running_loop.return_value
+    mock_coro = MagicMock()
+    mock_context = MagicMock()
+
+    # The original task factory will be mocked out here, let's retrieve the value for later
+    orig_task_factory = mock_loop.get_task_factory.return_value
+
+    # Retieve sentry task factory (since it is an inner function within patch_asyncio)
+    sentry_task_factory = get_sentry_task_factory(mock_get_running_loop)
+
+    # The call we are testing
+    ret_val = sentry_task_factory(mock_loop, mock_coro, context=mock_context)
+
+    assert orig_task_factory.called
+    assert ret_val == orig_task_factory.return_value
+
+    task_factory_args, task_factory_kwargs = orig_task_factory.call_args
+    assert len(task_factory_args) == 2
+
+    loop_arg, coro_arg = task_factory_args
+    assert loop_arg == mock_loop
+    assert inspect.iscoroutine(coro_arg)
+
+    assert "context" in task_factory_kwargs
+    assert task_factory_kwargs["context"] == mock_context
diff --git a/tests/integrations/asyncpg/__init__.py b/tests/integrations/asyncpg/__init__.py
new file mode 100644
index 0000000000..d988407a2d
--- /dev/null
+++ b/tests/integrations/asyncpg/__init__.py
@@ -0,0 +1,10 @@
+import os
+import sys
+import pytest
+
+pytest.importorskip("asyncpg")
+pytest.importorskip("pytest_asyncio")
+
+# Load `asyncpg_helpers` into the module search path to test query source path names relative to module. See
+# `test_query_source_with_module_in_search_path`
+sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
diff --git a/tests/integrations/asyncpg/asyncpg_helpers/__init__.py b/tests/integrations/asyncpg/asyncpg_helpers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/asyncpg/asyncpg_helpers/helpers.py b/tests/integrations/asyncpg/asyncpg_helpers/helpers.py
new file mode 100644
index 0000000000..8de809ba1b
--- /dev/null
+++ b/tests/integrations/asyncpg/asyncpg_helpers/helpers.py
@@ -0,0 +1,2 @@
+async def execute_query_in_connection(query, connection):
+    await connection.execute(query)
diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py
new file mode 100644
index 0000000000..611d8ea9d9
--- /dev/null
+++ b/tests/integrations/asyncpg/test_asyncpg.py
@@ -0,0 +1,753 @@
+"""
+Tests need pytest-asyncio installed.
+
+Tests need a local postgresql instance running, this can best be done using
+```sh
+docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres
+```
+
+The tests use the following credentials to establish a database connection.
+"""
+
+import os
+
+
+PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")
+PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "foo")
+PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "bar")
+PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost")
+PG_PORT = 5432
+
+
+from sentry_sdk._compat import PY2
+import datetime
+
+import asyncpg
+import pytest
+
+import pytest_asyncio
+
+from asyncpg import connect, Connection
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.integrations.asyncpg import AsyncPGIntegration
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.tracing_utils import record_sql_queries
+from sentry_sdk._compat import contextmanager
+from tests.conftest import ApproxDict
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
+
+PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format(
+    PG_USER, PG_PASSWORD, PG_HOST, PG_NAME
+)
+CRUMBS_CONNECT = {
+    "category": "query",
+    "data": ApproxDict(
+        {
+            "db.name": PG_NAME,
+            "db.system": "postgresql",
+            "db.user": PG_USER,
+            "server.address": PG_HOST,
+            "server.port": PG_PORT,
+        }
+    ),
+    "message": "connect",
+    "type": "default",
+}
+
+
+@pytest_asyncio.fixture(autouse=True)
+async def _clean_pg():
+    conn = await connect(PG_CONNECTION_URI)
+    await conn.execute("DROP TABLE IF EXISTS users")
+    await conn.execute(
+        """
+            CREATE TABLE users(
+                id serial PRIMARY KEY,
+                name text,
+                password text,
+                dob date
+            )
+        """
+    )
+    await conn.close()
+
+
+@pytest.mark.asyncio
+async def test_connect(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [CRUMBS_CONNECT]
+
+
+@pytest.mark.asyncio
+async def test_execute(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.execute(
+        "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
+    )
+
+    await conn.execute(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        "Bob",
+        "secret_pw",
+        datetime.date(1984, 3, 1),
+    )
+
+    row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
+    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))
+
+    row = await conn.fetchrow("SELECT * FROM users WHERE name = 'Bob'")
+    assert row == (2, "Bob", "secret_pw", datetime.date(1984, 3, 1))
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {},
+            "message": "INSERT INTO users(name, password, dob) VALUES ('Alice', 'pw', '1990-12-25')",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE name = $1",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE name = 'Bob'",
+            "type": "default",
+        },
+    ]
+
+
+@pytest.mark.asyncio
+async def test_execute_many(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.executemany(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        [
+            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
+            ("Alice", "pw", datetime.date(1990, 12, 25)),
+        ],
+    )
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {"db.executemany": True},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+    ]
+
+
+@pytest.mark.asyncio
+async def test_record_params(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration(record_params=True)],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.execute(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        "Bob",
+        "secret_pw",
+        datetime.date(1984, 3, 1),
+    )
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {
+                "db.params": ["Bob", "secret_pw", "datetime.date(1984, 3, 1)"],
+                "db.paramstyle": "format",
+            },
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+    ]
+
+
+@pytest.mark.asyncio
+async def test_cursor(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.executemany(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        [
+            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
+            ("Alice", "pw", datetime.date(1990, 12, 25)),
+        ],
+    )
+
+    async with conn.transaction():
+        # Postgres requires non-scrollable cursors to be created
+        # and used in a transaction.
+        async for record in conn.cursor(
+            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
+        ):
+            print(record)
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {"db.executemany": True},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE dob > $1",
+            "type": "default",
+        },
+        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
+    ]
+
+
+@pytest.mark.asyncio
+async def test_cursor_manual(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.executemany(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        [
+            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
+            ("Alice", "pw", datetime.date(1990, 12, 25)),
+        ],
+    )
+    #
+    async with conn.transaction():
+        # Postgres requires non-scrollable cursors to be created
+        # and used in a transaction.
+        cur = await conn.cursor(
+            "SELECT * FROM users WHERE dob > $1", datetime.date(1970, 1, 1)
+        )
+        record = await cur.fetchrow()
+        print(record)
+        while await cur.forward(1):
+            record = await cur.fetchrow()
+            print(record)
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {"db.executemany": True},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+        {"category": "query", "data": {}, "message": "BEGIN;", "type": "default"},
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE dob > $1",
+            "type": "default",
+        },
+        {"category": "query", "data": {}, "message": "COMMIT;", "type": "default"},
+    ]
+
+
+@pytest.mark.asyncio
+async def test_prepared_stmt(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    conn: Connection = await connect(PG_CONNECTION_URI)
+
+    await conn.executemany(
+        "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+        [
+            ("Bob", "secret_pw", datetime.date(1984, 3, 1)),
+            ("Alice", "pw", datetime.date(1990, 12, 25)),
+        ],
+    )
+
+    stmt = await conn.prepare("SELECT * FROM users WHERE name = $1")
+
+    print(await stmt.fetchval("Bob"))
+    print(await stmt.fetchval("Alice"))
+
+    await conn.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        CRUMBS_CONNECT,
+        {
+            "category": "query",
+            "data": {"db.executemany": True},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE name = $1",
+            "type": "default",
+        },
+    ]
+
+
+@pytest.mark.asyncio
+async def test_connection_pool(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    pool_size = 2
+
+    pool = await asyncpg.create_pool(
+        PG_CONNECTION_URI, min_size=pool_size, max_size=pool_size
+    )
+
+    async with pool.acquire() as conn:
+        await conn.execute(
+            "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "Bob",
+            "secret_pw",
+            datetime.date(1984, 3, 1),
+        )
+
+    async with pool.acquire() as conn:
+        row = await conn.fetchrow("SELECT * FROM users WHERE name = $1", "Bob")
+        assert row == (1, "Bob", "secret_pw", datetime.date(1984, 3, 1))
+
+    await pool.close()
+
+    capture_message("hi")
+
+    (event,) = events
+
+    for crumb in event["breadcrumbs"]["values"]:
+        del crumb["timestamp"]
+
+    assert event["breadcrumbs"]["values"] == [
+        # The connection pool opens pool_size connections so we have the crumbs pool_size times
+        *[CRUMBS_CONNECT] * pool_size,
+        {
+            "category": "query",
+            "data": {},
+            "message": "INSERT INTO users(name, password, dob) VALUES($1, $2, $3)",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT pg_advisory_unlock_all();\n"
+            "CLOSE ALL;\n"
+            "UNLISTEN *;\n"
+            "RESET ALL;",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT * FROM users WHERE name = $1",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {},
+            "message": "SELECT pg_advisory_unlock_all();\n"
+            "CLOSE ALL;\n"
+            "UNLISTEN *;\n"
+            "RESET ALL;",
+            "type": "default",
+        },
+    ]
+
+
+@pytest.mark.asyncio
+async def test_query_source_disabled(sentry_init, capture_events):
+    sentry_options = {
+        "integrations": [AsyncPGIntegration()],
+        "enable_tracing": True,
+        "enable_db_query_source": False,
+        "db_query_source_threshold_ms": 0,
+    }
+
+    sentry_init(**sentry_options)
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        await conn.execute(
+            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+        )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO not in data
+    assert SPANDATA.CODE_NAMESPACE not in data
+    assert SPANDATA.CODE_FILEPATH not in data
+    assert SPANDATA.CODE_FUNCTION not in data
+
+
+@pytest.mark.asyncio
+@pytest.mark.parametrize("enable_db_query_source", [None, True])
+async def test_query_source_enabled(
+    sentry_init, capture_events, enable_db_query_source
+):
+    sentry_options = {
+        "integrations": [AsyncPGIntegration()],
+        "enable_tracing": True,
+        "db_query_source_threshold_ms": 0,
+    }
+    if enable_db_query_source is not None:
+        sentry_options["enable_db_query_source"] = enable_db_query_source
+
+    sentry_init(**sentry_options)
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        await conn.execute(
+            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+        )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO in data
+    assert SPANDATA.CODE_NAMESPACE in data
+    assert SPANDATA.CODE_FILEPATH in data
+    assert SPANDATA.CODE_FUNCTION in data
+
+
+@pytest.mark.asyncio
+async def test_query_source(sentry_init, capture_events):
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        await conn.execute(
+            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+        )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO in data
+    assert SPANDATA.CODE_NAMESPACE in data
+    assert SPANDATA.CODE_FILEPATH in data
+    assert SPANDATA.CODE_FUNCTION in data
+
+    assert type(data.get(SPANDATA.CODE_LINENO)) == int
+    assert data.get(SPANDATA.CODE_LINENO) > 0
+    assert (
+        data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
+    )
+    assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+        "tests/integrations/asyncpg/test_asyncpg.py"
+    )
+
+    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+    assert is_relative_path
+
+    assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
+
+
+@pytest.mark.asyncio
+async def test_query_source_with_module_in_search_path(sentry_init, capture_events):
+    """
+    Test that query source is relative to the path of the module it ran in
+    """
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+
+    events = capture_events()
+
+    from asyncpg_helpers.helpers import execute_query_in_connection
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        await execute_query_in_connection(
+            "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+            conn,
+        )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO in data
+    assert SPANDATA.CODE_NAMESPACE in data
+    assert SPANDATA.CODE_FILEPATH in data
+    assert SPANDATA.CODE_FUNCTION in data
+
+    assert type(data.get(SPANDATA.CODE_LINENO)) == int
+    assert data.get(SPANDATA.CODE_LINENO) > 0
+    if not PY2:
+        assert data.get(SPANDATA.CODE_NAMESPACE) == "asyncpg_helpers.helpers"
+        assert data.get(SPANDATA.CODE_FILEPATH) == "asyncpg_helpers/helpers.py"
+
+    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+    assert is_relative_path
+
+    assert data.get(SPANDATA.CODE_FUNCTION) == "execute_query_in_connection"
+
+
+@pytest.mark.asyncio
+async def test_no_query_source_if_duration_too_short(sentry_init, capture_events):
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        @contextmanager
+        def fake_record_sql_queries(*args, **kwargs):
+            with record_sql_queries(*args, **kwargs) as span:
+                pass
+            span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0)
+            span.timestamp = datetime.datetime(2024, 1, 1, microsecond=99999)
+            yield span
+
+        with mock.patch(
+            "sentry_sdk.integrations.asyncpg.record_sql_queries",
+            fake_record_sql_queries,
+        ):
+            await conn.execute(
+                "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+            )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO not in data
+    assert SPANDATA.CODE_NAMESPACE not in data
+    assert SPANDATA.CODE_FILEPATH not in data
+    assert SPANDATA.CODE_FUNCTION not in data
+
+
+@pytest.mark.asyncio
+async def test_query_source_if_duration_over_threshold(sentry_init, capture_events):
+    sentry_init(
+        integrations=[AsyncPGIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        conn: Connection = await connect(PG_CONNECTION_URI)
+
+        @contextmanager
+        def fake_record_sql_queries(*args, **kwargs):
+            with record_sql_queries(*args, **kwargs) as span:
+                pass
+            span.start_timestamp = datetime.datetime(2024, 1, 1, microsecond=0)
+            span.timestamp = datetime.datetime(2024, 1, 1, microsecond=100001)
+            yield span
+
+        with mock.patch(
+            "sentry_sdk.integrations.asyncpg.record_sql_queries",
+            fake_record_sql_queries,
+        ):
+            await conn.execute(
+                "INSERT INTO users(name, password, dob) VALUES ('Alice', 'secret', '1990-12-25')",
+            )
+
+        await conn.close()
+
+    (event,) = events
+
+    span = event["spans"][-1]
+    assert span["description"].startswith("INSERT INTO")
+
+    data = span.get("data", {})
+
+    assert SPANDATA.CODE_LINENO in data
+    assert SPANDATA.CODE_NAMESPACE in data
+    assert SPANDATA.CODE_FILEPATH in data
+    assert SPANDATA.CODE_FUNCTION in data
+
+    assert type(data.get(SPANDATA.CODE_LINENO)) == int
+    assert data.get(SPANDATA.CODE_LINENO) > 0
+    assert (
+        data.get(SPANDATA.CODE_NAMESPACE) == "tests.integrations.asyncpg.test_asyncpg"
+    )
+    assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+        "tests/integrations/asyncpg/test_asyncpg.py"
+    )
+
+    is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+    assert is_relative_path
+
+    assert (
+        data.get(SPANDATA.CODE_FUNCTION)
+        == "test_query_source_if_duration_over_threshold"
+    )
diff --git a/tests/integrations/aws_lambda/__init__.py b/tests/integrations/aws_lambda/__init__.py
new file mode 100644
index 0000000000..71eb245353
--- /dev/null
+++ b/tests/integrations/aws_lambda/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("boto3")
diff --git a/tests/integrations/aws_lambda/client.py b/tests/integrations/aws_lambda/client.py
index 784a4a9006..298ebd920d 100644
--- a/tests/integrations/aws_lambda/client.py
+++ b/tests/integrations/aws_lambda/client.py
@@ -1,61 +1,207 @@
-import sys
+import base64
+import boto3
+import glob
+import hashlib
 import os
-import shutil
-import tempfile
 import subprocess
-import boto3
-import uuid
-import base64
+import sys
+import tempfile
 
+from sentry_sdk.consts import VERSION as SDK_VERSION
+from sentry_sdk.utils import get_git_revision
 
-def get_boto_client():
-    return boto3.client(
-        "lambda",
-        aws_access_key_id=os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
-        aws_secret_access_key=os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
-        region_name="us-east-1",
+AWS_REGION_NAME = "us-east-1"
+AWS_CREDENTIALS = {
+    "aws_access_key_id": os.environ["SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID"],
+    "aws_secret_access_key": os.environ["SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY"],
+}
+AWS_LAMBDA_EXECUTION_ROLE_NAME = "lambda-ex"
+AWS_LAMBDA_EXECUTION_ROLE_ARN = None
+
+
+def _install_dependencies(base_dir, subprocess_kwargs):
+    """
+    Installs dependencies for AWS Lambda function
+    """
+    setup_cfg = os.path.join(base_dir, "setup.cfg")
+    with open(setup_cfg, "w") as f:
+        f.write("[install]\nprefix=")
+
+    # Install requirements for Lambda Layer (these are more limited than the SDK requirements,
+    # because Lambda does not support the newest versions of some packages)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "-m",
+            "pip",
+            "install",
+            "-r",
+            "aws-lambda-layer-requirements.txt",
+            "--target",
+            base_dir,
+        ],
+        **subprocess_kwargs,
+    )
+    # Install requirements used for testing
+    subprocess.check_call(
+        [
+            sys.executable,
+            "-m",
+            "pip",
+            "install",
+            "mock==3.0.0",
+            "funcsigs",
+            "--target",
+            base_dir,
+        ],
+        **subprocess_kwargs,
+    )
+    # Create a source distribution of the Sentry SDK (in parent directory of base_dir)
+    subprocess.check_call(
+        [
+            sys.executable,
+            "setup.py",
+            "sdist",
+            "--dist-dir",
+            os.path.dirname(base_dir),
+        ],
+        **subprocess_kwargs,
+    )
+    # Install the created Sentry SDK source distribution into the target directory
+    # Do not install the dependencies of the SDK, because they where installed by aws-lambda-layer-requirements.txt above
+    source_distribution_archive = glob.glob(
+        "{}/*.tar.gz".format(os.path.dirname(base_dir))
+    )[0]
+    subprocess.check_call(
+        [
+            sys.executable,
+            "-m",
+            "pip",
+            "install",
+            source_distribution_archive,
+            "--no-deps",
+            "--target",
+            base_dir,
+        ],
+        **subprocess_kwargs,
     )
 
 
-def build_no_code_serverless_function_and_layer(
-    client, tmpdir, fn_name, runtime, timeout, initial_handler
-):
+def _create_lambda_function_zip(base_dir):
     """
-    Util function that auto instruments the no code implementation of the python
-    sdk by creating a layer containing the Python-sdk, and then creating a func
-    that uses that layer
+    Zips the given base_dir omitting Python cache files
     """
-    from scripts.build_awslambda_layer import (
-        build_packaged_zip,
+    subprocess.run(
+        [
+            "zip",
+            "-q",
+            "-x",
+            "**/__pycache__/*",
+            "-r",
+            "lambda-function-package.zip",
+            "./",
+        ],
+        cwd=base_dir,
+        check=True,
     )
 
-    build_packaged_zip(dest_abs_path=tmpdir, dest_zip_filename="serverless-ball.zip")
 
-    with open(os.path.join(tmpdir, "serverless-ball.zip"), "rb") as serverless_zip:
-        response = client.publish_layer_version(
-            LayerName="python-serverless-sdk-test",
-            Description="Created as part of testsuite for getsentry/sentry-python",
-            Content={"ZipFile": serverless_zip.read()},
+def _create_lambda_package(
+    base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
+):
+    """
+    Creates deployable packages (as zip files) for AWS Lambda function
+    and optional the accompanying Sentry Lambda layer
+    """
+    if initial_handler:
+        # If Initial handler value is provided i.e. it is not the default
+        # `test_lambda.test_handler`, then create another dir level so that our path is
+        # test_dir.test_lambda.test_handler
+        test_dir_path = os.path.join(base_dir, "test_dir")
+        python_init_file = os.path.join(test_dir_path, "__init__.py")
+        os.makedirs(test_dir_path)
+        with open(python_init_file, "w"):
+            # Create __init__ file to make it a python package
+            pass
+
+        test_lambda_py = os.path.join(base_dir, "test_dir", "test_lambda.py")
+    else:
+        test_lambda_py = os.path.join(base_dir, "test_lambda.py")
+
+    with open(test_lambda_py, "w") as f:
+        f.write(code)
+
+    if syntax_check:
+        # Check file for valid syntax first, and that the integration does not
+        # crash when not running in Lambda (but rather a local deployment tool
+        # such as chalice's)
+        subprocess.check_call([sys.executable, test_lambda_py])
+
+    if layer is None:
+        _install_dependencies(base_dir, subprocess_kwargs)
+        _create_lambda_function_zip(base_dir)
+
+    else:
+        _create_lambda_function_zip(base_dir)
+
+        # Create Lambda layer zip package
+        from scripts.build_aws_lambda_layer import build_packaged_zip
+
+        build_packaged_zip(
+            base_dir=base_dir,
+            make_dist=True,
+            out_zip_filename="lambda-layer-package.zip",
         )
 
-    with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
-        client.create_function(
-            FunctionName=fn_name,
-            Runtime=runtime,
-            Timeout=timeout,
-            Environment={
-                "Variables": {
-                    "SENTRY_INITIAL_HANDLER": initial_handler,
-                    "SENTRY_DSN": "https://123abc@example.com/123",
-                    "SENTRY_TRACES_SAMPLE_RATE": "1.0",
-                }
-            },
-            Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
-            Handler="sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler",
-            Layers=[response["LayerVersionArn"]],
-            Code={"ZipFile": zip.read()},
-            Description="Created as part of testsuite for getsentry/sentry-python",
+
+def _get_or_create_lambda_execution_role():
+    global AWS_LAMBDA_EXECUTION_ROLE_ARN
+
+    policy = """{
+        "Version": "2012-10-17",
+        "Statement": [
+            {
+                "Effect": "Allow",
+                "Principal": {
+                    "Service": "lambda.amazonaws.com"
+                },
+                "Action": "sts:AssumeRole"
+            }
+        ]
+    }
+    """
+    iam_client = boto3.client(
+        "iam",
+        region_name=AWS_REGION_NAME,
+        **AWS_CREDENTIALS,
+    )
+
+    try:
+        response = iam_client.get_role(RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME)
+        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
+    except iam_client.exceptions.NoSuchEntityException:
+        # create role for lambda execution
+        response = iam_client.create_role(
+            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
+            AssumeRolePolicyDocument=policy,
         )
+        AWS_LAMBDA_EXECUTION_ROLE_ARN = response["Role"]["Arn"]
+
+        # attach policy to role
+        iam_client.attach_role_policy(
+            RoleName=AWS_LAMBDA_EXECUTION_ROLE_NAME,
+            PolicyArn="arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole",
+        )
+
+
+def get_boto_client():
+    _get_or_create_lambda_execution_role()
+
+    return boto3.client(
+        "lambda",
+        region_name=AWS_REGION_NAME,
+        **AWS_CREDENTIALS,
+    )
 
 
 def run_lambda_function(
@@ -70,110 +216,129 @@ def run_lambda_function(
     initial_handler=None,
     subprocess_kwargs=(),
 ):
+    """
+    Creates a Lambda function with the given code, and invokes it.
+
+    If the same code is run multiple times the function will NOT be
+    created anew each time but the existing function will be reused.
+    """
     subprocess_kwargs = dict(subprocess_kwargs)
 
-    with tempfile.TemporaryDirectory() as tmpdir:
-        if initial_handler:
-            # If Initial handler value is provided i.e. it is not the default
-            # `test_lambda.test_handler`, then create another dir level so that our path is
-            # test_dir.test_lambda.test_handler
-            test_dir_path = os.path.join(tmpdir, "test_dir")
-            python_init_file = os.path.join(test_dir_path, "__init__.py")
-            os.makedirs(test_dir_path)
-            with open(python_init_file, "w"):
-                # Create __init__ file to make it a python package
-                pass
-
-            test_lambda_py = os.path.join(tmpdir, "test_dir", "test_lambda.py")
-        else:
-            test_lambda_py = os.path.join(tmpdir, "test_lambda.py")
-
-        with open(test_lambda_py, "w") as f:
-            f.write(code)
-
-        if syntax_check:
-            # Check file for valid syntax first, and that the integration does not
-            # crash when not running in Lambda (but rather a local deployment tool
-            # such as chalice's)
-            subprocess.check_call([sys.executable, test_lambda_py])
-
-        fn_name = "test_function_{}".format(uuid.uuid4())
-
-        if layer is None:
-            setup_cfg = os.path.join(tmpdir, "setup.cfg")
-            with open(setup_cfg, "w") as f:
-                f.write("[install]\nprefix=")
-
-            subprocess.check_call(
-                [sys.executable, "setup.py", "sdist", "-d", os.path.join(tmpdir, "..")],
-                **subprocess_kwargs
-            )
+    # Making a unique function name depending on all the code that is run in it (function code plus SDK version)
+    # The name needs to be short so the generated event/envelope json blobs are small enough to be output
+    # in the log result of the Lambda function.
+    rev = get_git_revision() or SDK_VERSION
+    function_hash = hashlib.shake_256((code + rev).encode("utf-8")).hexdigest(6)
+    fn_name = "test_{}".format(function_hash)
+    full_fn_name = "{}_{}".format(
+        fn_name, runtime.replace(".", "").replace("python", "py")
+    )
 
-            subprocess.check_call(
-                "pip install mock==3.0.0 funcsigs -t .",
-                cwd=tmpdir,
-                shell=True,
-                **subprocess_kwargs
-            )
+    function_exists_in_aws = True
+    try:
+        client.get_function(
+            FunctionName=full_fn_name,
+        )
+        print(
+            "Lambda function in AWS already existing, taking it (and do not create a local one)"
+        )
+    except client.exceptions.ResourceNotFoundException:
+        function_exists_in_aws = False
+
+    if not function_exists_in_aws:
+        tmp_base_dir = tempfile.gettempdir()
+        base_dir = os.path.join(tmp_base_dir, fn_name)
+        dir_already_existing = os.path.isdir(base_dir)
+
+        if dir_already_existing:
+            print("Local Lambda function directory already exists, skipping creation")
 
-            # https://docs.aws.amazon.com/lambda/latest/dg/lambda-python-how-to-create-deployment-package.html
-            subprocess.check_call(
-                "pip install ../*.tar.gz -t .",
-                cwd=tmpdir,
-                shell=True,
-                **subprocess_kwargs
+        if not dir_already_existing:
+            os.mkdir(base_dir)
+            _create_lambda_package(
+                base_dir, code, initial_handler, layer, syntax_check, subprocess_kwargs
             )
 
-            shutil.make_archive(os.path.join(tmpdir, "ball"), "zip", tmpdir)
+            @add_finalizer
+            def clean_up():
+                # this closes the web socket so we don't get a
+                #   ResourceWarning: unclosed 
+                # warning on every test
+                # based on https://github.com/boto/botocore/pull/1810
+                # (if that's ever merged, this can just become client.close())
+                session = client._endpoint.http_session
+                managers = [session._manager] + list(session._proxy_managers.values())
+                for manager in managers:
+                    manager.clear()
+
+        layers = []
+        environment = {}
+        handler = initial_handler or "test_lambda.test_handler"
+
+        if layer is not None:
+            with open(
+                os.path.join(base_dir, "lambda-layer-package.zip"), "rb"
+            ) as lambda_layer_zip:
+                response = client.publish_layer_version(
+                    LayerName="python-serverless-sdk-test",
+                    Description="Created as part of testsuite for getsentry/sentry-python",
+                    Content={"ZipFile": lambda_layer_zip.read()},
+                )
+
+            layers = [response["LayerVersionArn"]]
+            handler = (
+                "sentry_sdk.integrations.init_serverless_sdk.sentry_lambda_handler"
+            )
+            environment = {
+                "Variables": {
+                    "SENTRY_INITIAL_HANDLER": initial_handler
+                    or "test_lambda.test_handler",
+                    "SENTRY_DSN": "https://123abc@example.com/123",
+                    "SENTRY_TRACES_SAMPLE_RATE": "1.0",
+                }
+            }
 
-            with open(os.path.join(tmpdir, "ball.zip"), "rb") as zip:
+        try:
+            with open(
+                os.path.join(base_dir, "lambda-function-package.zip"), "rb"
+            ) as lambda_function_zip:
                 client.create_function(
-                    FunctionName=fn_name,
+                    Description="Created as part of testsuite for getsentry/sentry-python",
+                    FunctionName=full_fn_name,
                     Runtime=runtime,
                     Timeout=timeout,
-                    Role=os.environ["SENTRY_PYTHON_TEST_AWS_IAM_ROLE"],
-                    Handler="test_lambda.test_handler",
-                    Code={"ZipFile": zip.read()},
-                    Description="Created as part of testsuite for getsentry/sentry-python",
+                    Role=AWS_LAMBDA_EXECUTION_ROLE_ARN,
+                    Handler=handler,
+                    Code={"ZipFile": lambda_function_zip.read()},
+                    Environment=environment,
+                    Layers=layers,
                 )
-        else:
-            subprocess.run(
-                ["zip", "-q", "-x", "**/__pycache__/*", "-r", "ball.zip", "./"],
-                cwd=tmpdir,
-                check=True,
+
+                waiter = client.get_waiter("function_active_v2")
+                waiter.wait(FunctionName=full_fn_name)
+        except client.exceptions.ResourceConflictException:
+            print(
+                "Lambda function already exists, this is fine, we will just invoke it."
             )
 
-            # Default initial handler
-            if not initial_handler:
-                initial_handler = "test_lambda.test_handler"
+    response = client.invoke(
+        FunctionName=full_fn_name,
+        InvocationType="RequestResponse",
+        LogType="Tail",
+        Payload=payload,
+    )
 
-            build_no_code_serverless_function_and_layer(
-                client, tmpdir, fn_name, runtime, timeout, initial_handler
-            )
+    assert 200 <= response["StatusCode"] < 300, response
+    return response
 
-        @add_finalizer
-        def clean_up():
-            client.delete_function(FunctionName=fn_name)
-
-            # this closes the web socket so we don't get a
-            #   ResourceWarning: unclosed 
-            # warning on every test
-            # based on https://github.com/boto/botocore/pull/1810
-            # (if that's ever merged, this can just become client.close())
-            session = client._endpoint.http_session
-            managers = [session._manager] + list(session._proxy_managers.values())
-            for manager in managers:
-                manager.clear()
-
-        response = client.invoke(
-            FunctionName=fn_name,
-            InvocationType="RequestResponse",
-            LogType="Tail",
-            Payload=payload,
-        )
 
-        assert 200 <= response["StatusCode"] < 300, response
-        return response
+# This is for inspecting new Python runtime environments in AWS Lambda
+# If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
+# in that runtime in a Lambda function:
+#
+#    pip3 install click
+#    python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
+#
 
 
 _REPL_CODE = """
@@ -199,7 +364,7 @@ def test_handler(event, context):
 
     @click.command()
     @click.option(
-        "--runtime", required=True, help="name of the runtime to use, eg python3.8"
+        "--runtime", required=True, help="name of the runtime to use, eg python3.11"
     )
     @click.option("--verbose", is_flag=True, default=False)
     def repl(runtime, verbose):
@@ -221,12 +386,14 @@ def repl(runtime, verbose):
                 _REPL_CODE.format(line=line),
                 b"",
                 cleanup.append,
-                subprocess_kwargs={
-                    "stdout": subprocess.DEVNULL,
-                    "stderr": subprocess.DEVNULL,
-                }
-                if not verbose
-                else {},
+                subprocess_kwargs=(
+                    {
+                        "stdout": subprocess.DEVNULL,
+                        "stderr": subprocess.DEVNULL,
+                    }
+                    if not verbose
+                    else {}
+                ),
             )
 
             for line in base64.b64decode(response["LogResult"]).splitlines():
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index c9084beb14..d0879f7fca 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -1,32 +1,43 @@
 """
-# AWS Lambda system tests
+# AWS Lambda System Tests
 
-This testsuite uses boto3 to upload actual lambda functions to AWS, execute
-them and assert some things about the externally observed behavior. What that
-means for you is that those tests won't run without AWS access keys:
+This testsuite uses boto3 to upload actual Lambda functions to AWS Lambda and invoke them.
 
-    export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID=..
-    export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY=...
-    export SENTRY_PYTHON_TEST_AWS_IAM_ROLE="arn:aws:iam::920901907255:role/service-role/lambda"
+For running test locally you need to set these env vars:
+(You can find the values in the Sentry password manager by searching for "AWS Lambda for Python SDK Tests").
 
-If you need to debug a new runtime, use this REPL to figure things out:
+    export SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID="..."
+    export SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY="..."
+
+
+You can use `scripts/aws-cleanup.sh` to delete all files generated by this test suite.
+
+
+If you need to debug a new runtime, use this REPL to run arbitrary Python or bash commands
+in that runtime in a Lambda function: (see the bottom of client.py for more information.)
 
     pip3 install click
     python3 tests/integrations/aws_lambda/client.py --runtime=python4.0
+
+IMPORTANT:
+
+During running of this test suite temporary folders will be created for compiling the Lambda functions.
+This temporary folders will not be cleaned up. This is because in CI generated files have to be shared
+between tests and thus the folders can not be deleted right after use.
+
+If you run your tests locally, you need to clean up the temporary folders manually. The location of
+the temporary folders is printed when running a test.
 """
+
 import base64
 import json
-import os
 import re
 from textwrap import dedent
 
 import pytest
 
-boto3 = pytest.importorskip("boto3")
 
 LAMBDA_PRELUDE = """
-from __future__ import print_function
-
 from sentry_sdk.integrations.aws_lambda import AwsLambdaIntegration, get_lambda_bootstrap
 import sentry_sdk
 import json
@@ -34,56 +45,84 @@
 
 from sentry_sdk.transport import HttpTransport
 
-def event_processor(event):
+def truncate_data(data):
     # AWS Lambda truncates the log output to 4kb, which is small enough to miss
     # parts of even a single error-event/transaction-envelope pair if considered
     # in full, so only grab the data we need.
 
-    event_data = {}
-    event_data["contexts"] = {}
-    event_data["contexts"]["trace"] = event.get("contexts", {}).get("trace")
-    event_data["exception"] = event.get("exception")
-    event_data["extra"] = event.get("extra")
-    event_data["level"] = event.get("level")
-    event_data["request"] = event.get("request")
-    event_data["tags"] = event.get("tags")
-    event_data["transaction"] = event.get("transaction")
+    cleaned_data = {}
 
-    return event_data
+    if data.get("type") is not None:
+        cleaned_data["type"] = data["type"]
 
-def envelope_processor(envelope):
-    # AWS Lambda truncates the log output to 4kb, which is small enough to miss
-    # parts of even a single error-event/transaction-envelope pair if considered
-    # in full, so only grab the data we need.
+    if data.get("contexts") is not None:
+        cleaned_data["contexts"] = {}
 
-    (item,) = envelope.items
-    envelope_json = json.loads(item.get_bytes())
+        if data["contexts"].get("trace") is not None:
+            cleaned_data["contexts"]["trace"] = data["contexts"].get("trace")
+
+    if data.get("transaction") is not None:
+        cleaned_data["transaction"] = data.get("transaction")
+
+    if data.get("request") is not None:
+        cleaned_data["request"] = data.get("request")
+
+    if data.get("tags") is not None:
+        cleaned_data["tags"] = data.get("tags")
+
+    if data.get("exception") is not None:
+        cleaned_data["exception"] = data.get("exception")
 
-    envelope_data = {}
-    envelope_data["contexts"] = {}
-    envelope_data["type"] = envelope_json["type"]
-    envelope_data["transaction"] = envelope_json["transaction"]
-    envelope_data["contexts"]["trace"] = envelope_json["contexts"]["trace"]
-    envelope_data["request"] = envelope_json["request"]
-    envelope_data["tags"] = envelope_json["tags"]
+        for value in cleaned_data["exception"]["values"]:
+            for frame in value.get("stacktrace", {}).get("frames", []):
+                del frame["vars"]
+                del frame["pre_context"]
+                del frame["context_line"]
+                del frame["post_context"]
 
-    return envelope_data
+    if data.get("extra") is not None:
+        cleaned_data["extra"] = {}
+
+        for key in data["extra"].keys():
+            if key == "lambda":
+                for lambda_key in data["extra"]["lambda"].keys():
+                    if lambda_key in ["function_name"]:
+                        cleaned_data["extra"].setdefault("lambda", {})[lambda_key] = data["extra"]["lambda"][lambda_key]
+            elif key == "cloudwatch logs":
+                for cloudwatch_key in data["extra"]["cloudwatch logs"].keys():
+                    if cloudwatch_key in ["url", "log_group", "log_stream"]:
+                        cleaned_data["extra"].setdefault("cloudwatch logs", {})[cloudwatch_key] = data["extra"]["cloudwatch logs"][cloudwatch_key]
+
+    if data.get("level") is not None:
+        cleaned_data["level"] = data.get("level")
+
+    if data.get("message") is not None:
+        cleaned_data["message"] = data.get("message")
+
+    if "contexts" not in cleaned_data:
+        raise Exception(json.dumps(data))
+
+    return cleaned_data
+
+def event_processor(event):
+    return truncate_data(event)
+
+def envelope_processor(envelope):
+    (item,) = envelope.items
+    item_json = json.loads(item.get_bytes())
+
+    return truncate_data(item_json)
 
 
 class TestTransport(HttpTransport):
     def _send_event(self, event):
         event = event_processor(event)
-        # Writing a single string to stdout holds the GIL (seems like) and
-        # therefore cannot be interleaved with other threads. This is why we
-        # explicitly add a newline at the end even though `print` would provide
-        # us one.
         print("\\nEVENT: {}\\n".format(json.dumps(event)))
 
     def _send_envelope(self, envelope):
         envelope = envelope_processor(envelope)
         print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))
 
-
 def init_sdk(timeout_warning=False, **extra_init_args):
     sentry_sdk.init(
         dsn="https://123abc@example.com/123",
@@ -97,16 +136,19 @@ def init_sdk(timeout_warning=False, **extra_init_args):
 
 @pytest.fixture
 def lambda_client():
-    if "SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID" not in os.environ:
-        pytest.skip("AWS environ vars not set")
-
     from tests.integrations.aws_lambda.client import get_boto_client
 
     return get_boto_client()
 
 
 @pytest.fixture(
-    params=["python3.6", "python3.7", "python3.8", "python3.9", "python2.7"]
+    params=[
+        "python3.8",
+        "python3.9",
+        "python3.10",
+        "python3.11",
+        "python3.12",
+    ]
 )
 def lambda_runtime(request):
     return request.param
@@ -131,8 +173,13 @@ def inner(
             initial_handler=initial_handler,
         )
 
-        # for better debugging
-        response["LogResult"] = base64.b64decode(response["LogResult"]).splitlines()
+        # Make sure the "ENVELOPE:" and "EVENT:" log entries are always starting a new line. (Sometimes they don't.)
+        response["LogResult"] = (
+            base64.b64decode(response["LogResult"])
+            .replace(b"EVENT:", b"\nEVENT:")
+            .replace(b"ENVELOPE:", b"\nENVELOPE:")
+            .splitlines()
+        )
         response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
         del response["ResponseMetadata"]
 
@@ -156,19 +203,14 @@ def inner(
 
 
 def test_basic(run_lambda_function):
-    envelopes, events, response = run_lambda_function(
+    _, events, response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
         init_sdk()
 
-        def event_processor(event):
-            # Delay event output like this to test proper shutdown
-            time.sleep(1)
-            return event
-
         def test_handler(event, context):
-            raise Exception("something went wrong")
+            raise Exception("Oh!")
         """
         ),
         b'{"foo": "bar"}',
@@ -180,7 +222,7 @@ def test_handler(event, context):
     assert event["level"] == "error"
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "Exception"
-    assert exception["value"] == "something went wrong"
+    assert exception["value"] == "Oh!"
 
     (frame1,) = exception["stacktrace"]["frames"]
     assert frame1["filename"] == "test_lambda.py"
@@ -189,15 +231,16 @@ def test_handler(event, context):
 
     assert frame1["in_app"] is True
 
-    assert exception["mechanism"] == {"type": "aws_lambda", "handled": False}
+    assert exception["mechanism"]["type"] == "aws_lambda"
+    assert not exception["mechanism"]["handled"]
 
-    assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
+    assert event["extra"]["lambda"]["function_name"].startswith("test_")
 
     logs_url = event["extra"]["cloudwatch logs"]["url"]
     assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
     assert not re.search("(=;|=$)", logs_url)
     assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
-        "/aws/lambda/test_function_"
+        "/aws/lambda/test_"
     )
 
     log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
@@ -211,27 +254,28 @@ def test_initialization_order(run_lambda_function):
     as seen by AWS already runs. At this point at least draining the queue
     should work."""
 
-    envelopes, events, _response = run_lambda_function(
+    _, events, _ = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
             def test_handler(event, context):
                 init_sdk()
-                sentry_sdk.capture_exception(Exception("something went wrong"))
+                sentry_sdk.capture_exception(Exception("Oh!"))
         """
         ),
         b'{"foo": "bar"}',
     )
 
     (event,) = events
+
     assert event["level"] == "error"
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "Exception"
-    assert exception["value"] == "something went wrong"
+    assert exception["value"] == "Oh!"
 
 
 def test_request_data(run_lambda_function):
-    envelopes, events, _response = run_lambda_function(
+    _, events, _ = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
@@ -248,7 +292,7 @@ def test_handler(event, context):
           "httpMethod": "GET",
           "headers": {
             "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
-            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
+            "User-Agent": "custom",
             "X-Forwarded-Proto": "https"
           },
           "queryStringParameters": {
@@ -273,7 +317,7 @@ def test_handler(event, context):
     assert event["request"] == {
         "headers": {
             "Host": "iwsz2c7uwi.execute-api.us-east-1.amazonaws.com",
-            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0",
+            "User-Agent": "custom",
             "X-Forwarded-Proto": "https",
         },
         "method": "GET",
@@ -283,27 +327,24 @@ def test_handler(event, context):
 
 
 def test_init_error(run_lambda_function, lambda_runtime):
-    if lambda_runtime == "python2.7":
-        pytest.skip("initialization error not supported on Python 2.7")
-
-    envelopes, events, response = run_lambda_function(
+    _, events, _ = run_lambda_function(
         LAMBDA_PRELUDE
-        + (
-            "def event_processor(event):\n"
-            '    return event["exception"]["values"][0]["value"]\n'
-            "init_sdk()\n"
-            "func()"
+        + dedent(
+            """
+        init_sdk()
+        func()
+        """
         ),
         b'{"foo": "bar"}',
         syntax_check=False,
     )
 
     (event,) = events
-    assert "name 'func' is not defined" in event
+    assert event["exception"]["values"][0]["value"] == "name 'func' is not defined"
 
 
 def test_timeout_error(run_lambda_function):
-    envelopes, events, response = run_lambda_function(
+    _, events, _ = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
@@ -315,7 +356,7 @@ def test_handler(event, context):
         """
         ),
         b'{"foo": "bar"}',
-        timeout=3,
+        timeout=2,
     )
 
     (event,) = events
@@ -323,19 +364,20 @@ def test_handler(event, context):
     (exception,) = event["exception"]["values"]
     assert exception["type"] == "ServerlessTimeoutWarning"
     assert exception["value"] in (
-        "WARNING : Function is expected to get timed out. Configured timeout duration = 4 seconds.",
         "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds.",
+        "WARNING : Function is expected to get timed out. Configured timeout duration = 2 seconds.",
     )
 
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
-    assert event["extra"]["lambda"]["function_name"].startswith("test_function_")
+    assert event["extra"]["lambda"]["function_name"].startswith("test_")
 
     logs_url = event["extra"]["cloudwatch logs"]["url"]
     assert logs_url.startswith("https://console.aws.amazon.com/cloudwatch/home?region=")
     assert not re.search("(=;|=$)", logs_url)
     assert event["extra"]["cloudwatch logs"]["log_group"].startswith(
-        "/aws/lambda/test_function_"
+        "/aws/lambda/test_"
     )
 
     log_stream_re = "^[0-9]{4}/[0-9]{2}/[0-9]{2}/\\[[^\\]]+][a-f0-9]+$"
@@ -345,7 +387,7 @@ def test_handler(event, context):
 
 
 def test_performance_no_error(run_lambda_function):
-    envelopes, events, response = run_lambda_function(
+    envelopes, _, _ = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
@@ -359,38 +401,41 @@ def test_handler(event, context):
     )
 
     (envelope,) = envelopes
+
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("test_function_")
+    assert envelope["contexts"]["trace"]["op"] == "function.aws"
+    assert envelope["transaction"].startswith("test_")
     assert envelope["transaction"] in envelope["request"]["url"]
 
 
 def test_performance_error(run_lambda_function):
-    envelopes, events, response = run_lambda_function(
+    envelopes, _, _ = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
         init_sdk(traces_sample_rate=1.0)
 
         def test_handler(event, context):
-            raise Exception("something went wrong")
+            raise Exception("Oh!")
         """
         ),
         b'{"foo": "bar"}',
     )
 
-    (event,) = events
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
-    assert exception["type"] == "Exception"
-    assert exception["value"] == "something went wrong"
+    (
+        error_event,
+        transaction_event,
+    ) = envelopes
 
-    (envelope,) = envelopes
+    assert error_event["level"] == "error"
+    (exception,) = error_event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Oh!"
 
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("test_function_")
-    assert envelope["transaction"] in envelope["request"]["url"]
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["contexts"]["trace"]["op"] == "function.aws"
+    assert transaction_event["transaction"].startswith("test_")
+    assert transaction_event["transaction"] in transaction_event["request"]["url"]
 
 
 @pytest.mark.parametrize(
@@ -417,29 +462,25 @@ def test_handler(event, context):
             [
                 {
                     "headers": {
-                        "Host": "dogs.are.great",
-                        "X-Forwarded-Proto": "http"
+                        "Host": "x1.io",
+                        "X-Forwarded-Proto": "https"
                     },
                     "httpMethod": "GET",
-                    "path": "/tricks/kangaroo",
+                    "path": "/path1",
                     "queryStringParameters": {
-                        "completed_successfully": "true",
-                        "treat_provided": "true",
-                        "treat_type": "cheese"
+                        "done": "false"
                     },
                     "dog": "Maisey"
                 },
                 {
                     "headers": {
-                        "Host": "dogs.are.great",
+                        "Host": "x2.io",
                         "X-Forwarded-Proto": "http"
                     },
-                    "httpMethod": "GET",
-                    "path": "/tricks/kangaroo",
+                    "httpMethod": "POST",
+                    "path": "/path2",
                     "queryStringParameters": {
-                        "completed_successfully": "true",
-                        "treat_provided": "true",
-                        "treat_type": "cheese"
+                        "done": "true"
                     },
                     "dog": "Charlie"
                 }
@@ -448,6 +489,7 @@ def test_handler(event, context):
             True,
             2,
         ),
+        (b"[]", False, 1),
     ],
 )
 def test_non_dict_event(
@@ -457,14 +499,14 @@ def test_non_dict_event(
     batch_size,
     DictionaryContaining,  # noqa:N803
 ):
-    envelopes, events, response = run_lambda_function(
+    envelopes, _, response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(
             """
         init_sdk(traces_sample_rate=1.0)
 
         def test_handler(event, context):
-            raise Exception("More treats, please!")
+            raise Exception("Oh?")
         """
         ),
         aws_event,
@@ -472,50 +514,50 @@ def test_handler(event, context):
 
     assert response["FunctionError"] == "Unhandled"
 
-    error_event = events[0]
+    (
+        error_event,
+        transaction_event,
+    ) = envelopes
     assert error_event["level"] == "error"
-    assert error_event["contexts"]["trace"]["op"] == "serverless.function"
+    assert error_event["contexts"]["trace"]["op"] == "function.aws"
 
     function_name = error_event["extra"]["lambda"]["function_name"]
-    assert function_name.startswith("test_function_")
+    assert function_name.startswith("test_")
     assert error_event["transaction"] == function_name
 
     exception = error_event["exception"]["values"][0]
     assert exception["type"] == "Exception"
-    assert exception["value"] == "More treats, please!"
+    assert exception["value"] == "Oh?"
     assert exception["mechanism"]["type"] == "aws_lambda"
 
-    envelope = envelopes[0]
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"] == DictionaryContaining(
+    assert transaction_event["type"] == "transaction"
+    assert transaction_event["contexts"]["trace"] == DictionaryContaining(
         error_event["contexts"]["trace"]
     )
-    assert envelope["contexts"]["trace"]["status"] == "internal_error"
-    assert envelope["transaction"] == error_event["transaction"]
-    assert envelope["request"]["url"] == error_event["request"]["url"]
+    assert transaction_event["contexts"]["trace"]["status"] == "internal_error"
+    assert transaction_event["transaction"] == error_event["transaction"]
+    assert transaction_event["request"]["url"] == error_event["request"]["url"]
 
     if has_request_data:
         request_data = {
-            "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
+            "headers": {"Host": "x1.io", "X-Forwarded-Proto": "https"},
             "method": "GET",
-            "url": "http://dogs.are.great/tricks/kangaroo",
+            "url": "https://x1.io/path1",
             "query_string": {
-                "completed_successfully": "true",
-                "treat_provided": "true",
-                "treat_type": "cheese",
+                "done": "false",
             },
         }
     else:
         request_data = {"url": "awslambda:///{}".format(function_name)}
 
     assert error_event["request"] == request_data
-    assert envelope["request"] == request_data
+    assert transaction_event["request"] == request_data
 
     if batch_size > 1:
         assert error_event["tags"]["batch_size"] == batch_size
         assert error_event["tags"]["batch_request"] is True
-        assert envelope["tags"]["batch_size"] == batch_size
-        assert envelope["tags"]["batch_request"] is True
+        assert transaction_event["tags"]["batch_size"] == batch_size
+        assert transaction_event["tags"]["batch_request"] is True
 
 
 def test_traces_sampler_gets_correct_values_in_sampling_context(
@@ -552,7 +594,7 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(
 
     import inspect
 
-    envelopes, events, response = run_lambda_function(
+    _, _, response = run_lambda_function(
         LAMBDA_PRELUDE
         + dedent(inspect.getsource(StringContaining))
         + dedent(inspect.getsource(DictionaryContaining))
@@ -587,12 +629,12 @@ def test_handler(event, context):
                                 "aws_event": DictionaryContaining({
                                     "httpMethod": "GET",
                                     "path": "/sit/stay/rollover",
-                                    "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"},
+                                    "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"},
                                 }),
                                 "aws_context": ObjectDescribedBy(
                                     type=get_lambda_bootstrap().LambdaContext,
                                     attrs={
-                                        'function_name': StringContaining("test_function"),
+                                        'function_name': StringContaining("test_"),
                                         'function_version': '$LATEST',
                                     }
                                 )
@@ -614,12 +656,15 @@ def test_handler(event, context):
             )
         """
         ),
-        b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "dogs.are.great", "X-Forwarded-Proto": "http"}}',
+        b'{"httpMethod": "GET", "path": "/sit/stay/rollover", "headers": {"Host": "x.io", "X-Forwarded-Proto": "http"}}',
     )
 
     assert response["Payload"]["AssertionError raised"] is False
 
 
+@pytest.mark.xfail(
+    reason="The limited log output we depend on is being clogged by a new warning"
+)
 def test_serverless_no_code_instrumentation(run_lambda_function):
     """
     Test that ensures that just by adding a lambda layer containing the
@@ -646,7 +691,7 @@ def test_handler(event, context):
                 assert isinstance(current_client.options['integrations'][0],
                                   sentry_sdk.integrations.aws_lambda.AwsLambdaIntegration)
 
-                raise Exception("something went wrong")
+                raise Exception("Oh!")
             """
             ),
             b'{"foo": "bar"}',
@@ -659,6 +704,187 @@ def test_handler(event, context):
         assert response["Payload"]["errorType"] != "AssertionError"
 
         assert response["Payload"]["errorType"] == "Exception"
-        assert response["Payload"]["errorMessage"] == "something went wrong"
+        assert response["Payload"]["errorMessage"] == "Oh!"
 
         assert "sentry_handler" in response["LogResult"][3].decode("utf-8")
+
+
+@pytest.mark.xfail(
+    reason="The limited log output we depend on is being clogged by a new warning"
+)
+def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
+    envelopes, _, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("Oh!")
+        """
+        ),
+        payload=b'{"foo": "bar"}',
+    )
+
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
+    _, events, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None) # this is the default, just added for clarity
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("Oh!")
+        """
+        ),
+        payload=b'{"foo": "bar"}',
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.xfail(
+    reason="The limited log output we depend on is being clogged by a new warning"
+)
+def test_error_has_existing_trace_context_performance_enabled(run_lambda_function):
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
+    # as the `headers` dict in the event passed to the Lambda function.
+    payload = {
+        "headers": {
+            "sentry-trace": sentry_trace_header,
+        }
+    }
+
+    envelopes, _, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("Oh!")
+        """
+        ),
+        payload=json.dumps(payload).encode(),
+    )
+
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(run_lambda_function):
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    # We simulate here AWS Api Gateway's behavior of passing HTTP headers
+    # as the `headers` dict in the event passed to the Lambda function.
+    payload = {
+        "headers": {
+            "sentry-trace": sentry_trace_header,
+        }
+    }
+
+    _, events, _ = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None)  # this is the default, just added for clarity
+
+        def test_handler(event, context):
+            sentry_sdk.capture_message("hi")
+            raise Exception("Oh!")
+        """
+        ),
+        payload=json.dumps(payload).encode(),
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_basic_with_eventbridge_source(run_lambda_function):
+    _, events, response = run_lambda_function(
+        LAMBDA_PRELUDE
+        + dedent(
+            """
+        init_sdk()
+
+        def test_handler(event, context):
+            raise Exception("Oh!")
+        """
+        ),
+        b'[{"topic":"lps-ranges","partition":1,"offset":0,"timestamp":1701268939207,"timestampType":"CREATE_TIME","key":"REDACTED","value":"REDACTED","headers":[],"eventSourceArn":"REDACTED","bootstrapServers":"REDACTED","eventSource":"aws:kafka","eventSourceKey":"lps-ranges-1"}]',
+    )
+
+    assert response["FunctionError"] == "Unhandled"
+
+    (event,) = events
+    assert event["level"] == "error"
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Oh!"
diff --git a/tests/integrations/beam/__init__.py b/tests/integrations/beam/__init__.py
new file mode 100644
index 0000000000..f4fe442d63
--- /dev/null
+++ b/tests/integrations/beam/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("apache_beam")
diff --git a/tests/integrations/beam/test_beam.py b/tests/integrations/beam/test_beam.py
index 7aeb617e3c..7926521ca6 100644
--- a/tests/integrations/beam/test_beam.py
+++ b/tests/integrations/beam/test_beam.py
@@ -1,8 +1,6 @@
 import pytest
 import inspect
 
-pytest.importorskip("apache_beam")
-
 import dill
 
 from sentry_sdk.integrations.beam import (
@@ -14,9 +12,14 @@
 from apache_beam.typehints.trivial_inference import instance_to_type
 from apache_beam.typehints.decorators import getcallargs_forhints
 from apache_beam.transforms.core import DoFn, ParDo, _DoFnParam, CallableWrapperDoFn
-from apache_beam.runners.common import DoFnInvoker, OutputProcessor, DoFnContext
+from apache_beam.runners.common import DoFnInvoker, DoFnContext
 from apache_beam.utils.windowed_value import WindowedValue
 
+try:
+    from apache_beam.runners.common import OutputHandler
+except ImportError:
+    from apache_beam.runners.common import OutputProcessor as OutputHandler
+
 
 def foo():
     return True
@@ -151,9 +154,16 @@ def test_monkey_patch_signature(f, args, kwargs):
         pass
 
 
-class _OutputProcessor(OutputProcessor):
+class _OutputHandler(OutputHandler):
     def process_outputs(
         self, windowed_input_element, results, watermark_estimator=None
+    ):
+        self.handle_process_outputs(
+            windowed_input_element, results, watermark_estimator
+        )
+
+    def handle_process_outputs(
+        self, windowed_input_element, results, watermark_estimator=None
     ):
         print(windowed_input_element)
         try:
@@ -170,7 +180,7 @@ def inner(fn):
         # Little hack to avoid having to run the whole pipeline.
         pardo = ParDo(fn)
         signature = pardo._signature
-        output_processor = _OutputProcessor()
+        output_processor = _OutputHandler()
         return DoFnInvoker.create_invoker(
             signature, output_processor, DoFnContext("test")
         )
diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py
index 67376b55d4..8c05b72a3e 100644
--- a/tests/integrations/boto3/test_s3.py
+++ b/tests/integrations/boto3/test_s3.py
@@ -1,9 +1,18 @@
+import pytest
+
+import boto3
+
 from sentry_sdk import Hub
 from sentry_sdk.integrations.boto3 import Boto3Integration
+from tests.conftest import ApproxDict
 from tests.integrations.boto3.aws_mock import MockResponse
 from tests.integrations.boto3 import read_fixture
 
-import boto3
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 session = boto3.Session(
     aws_access_key_id="-",
@@ -30,7 +39,7 @@ def test_basic(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 1
     (span,) = event["spans"]
-    assert span["op"] == "aws.request"
+    assert span["op"] == "http.client"
     assert span["description"] == "aws.s3.ListObjects"
 
 
@@ -53,11 +62,21 @@ def test_streaming(sentry_init, capture_events):
     (event,) = events
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
+
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     assert span1["description"] == "aws.s3.GetObject"
+    assert span1["data"] == ApproxDict(
+        {
+            "http.method": "GET",
+            "aws.request.url": "https://bucket.s3.amazonaws.com/foo.pdf",
+            "http.fragment": "",
+            "http.query": "",
+        }
+    )
+
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
     assert span2["description"] == "aws.s3.GetObject"
     assert span2["parent_span_id"] == span1["span_id"]
 
@@ -80,6 +99,40 @@ def test_streaming_close(sentry_init, capture_events):
     assert event["type"] == "transaction"
     assert len(event["spans"]) == 2
     span1 = event["spans"][0]
-    assert span1["op"] == "aws.request"
+    assert span1["op"] == "http.client"
     span2 = event["spans"][1]
-    assert span2["op"] == "aws.request.stream"
+    assert span2["op"] == "http.client.stream"
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0, integrations=[Boto3Integration()])
+    events = capture_events()
+
+    s3 = session.resource("s3")
+
+    with mock.patch(
+        "sentry_sdk.integrations.boto3.parse_url",
+        side_effect=ValueError,
+    ):
+        with Hub.current.start_transaction() as transaction, MockResponse(
+            s3.meta.client, 200, {}, read_fixture("s3_list.xml")
+        ):
+            bucket = s3.Bucket("bucket")
+            items = [obj for obj in bucket.objects.all()]
+            assert len(items) == 2
+            assert items[0].key == "foo.txt"
+            assert items[1].key == "bar.txt"
+            transaction.finish()
+
+    (event,) = events
+    assert event["spans"][0]["data"] == ApproxDict(
+        {
+            "http.method": "GET",
+            # no url data
+        }
+    )
+
+    assert "aws.request.url" not in event["spans"][0]["data"]
+    assert "http.fragment" not in event["spans"][0]["data"]
+    assert "http.query" not in event["spans"][0]["data"]
diff --git a/tests/integrations/bottle/__init__.py b/tests/integrations/bottle/__init__.py
new file mode 100644
index 0000000000..39015ee6f2
--- /dev/null
+++ b/tests/integrations/bottle/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("bottle")
diff --git a/tests/integrations/bottle/test_bottle.py b/tests/integrations/bottle/test_bottle.py
index ec133e4d75..660acb3902 100644
--- a/tests/integrations/bottle/test_bottle.py
+++ b/tests/integrations/bottle/test_bottle.py
@@ -2,12 +2,10 @@
 import pytest
 import logging
 
-
-pytest.importorskip("bottle")
-
 from io import BytesIO
 from bottle import Bottle, debug as set_debug, abort, redirect
 from sentry_sdk import capture_message
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
 
 from sentry_sdk.integrations.logging import LoggingIntegration
 from werkzeug.test import Client
@@ -24,6 +22,11 @@ def hi():
         capture_message("hi")
         return "ok"
 
+    @app.route("/message/")
+    def hi_with_id(message_id):
+        capture_message("hi")
+        return "ok"
+
     @app.route("/message-named-route", name="hi")
     def named_hi():
         capture_message("hi")
@@ -55,20 +58,21 @@ def test_has_context(sentry_init, app, capture_events, get_client):
 
 
 @pytest.mark.parametrize(
-    "url,transaction_style,expected_transaction",
+    "url,transaction_style,expected_transaction,expected_source",
     [
-        ("/message", "endpoint", "hi"),
-        ("/message", "url", "/message"),
-        ("/message-named-route", "endpoint", "hi"),
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "url", "/message/", "route"),
+        ("/message-named-route", "endpoint", "hi", "component"),
     ],
 )
 def test_transaction_style(
     sentry_init,
-    app,
-    capture_events,
+    url,
     transaction_style,
     expected_transaction,
-    url,
+    expected_source,
+    capture_events,
     get_client,
 ):
     sentry_init(
@@ -79,11 +83,14 @@ def test_transaction_style(
     events = capture_events()
 
     client = get_client()
-    response = client.get("/message")
+    response = client.get(url)
     assert response[1] == "200 OK"
 
     (event,) = events
+    # We use endswith() because in Python 2.7 it is "test_bottle.hi"
+    # and in later Pythons "test_bottle.app..hi"
     assert event["transaction"].endswith(expected_transaction)
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 @pytest.mark.parametrize("debug", (True, False), ids=["debug", "nodebug"])
@@ -141,9 +148,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -191,9 +198,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
 
 @pytest.mark.parametrize("input_char", ["a", b"a"])
@@ -201,7 +208,7 @@ def test_too_large_raw_request(
     sentry_init, input_char, capture_events, app, get_client
 ):
     sentry_init(
-        integrations=[bottle_sentry.BottleIntegration()], request_bodies="small"
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="small"
     )
 
     data = input_char * 2000
@@ -225,15 +232,13 @@ def index():
     assert response[1] == "200 OK"
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
 def test_files_and_form(sentry_init, capture_events, app, get_client):
     sentry_init(
-        integrations=[bottle_sentry.BottleIntegration()], request_bodies="always"
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
     )
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
@@ -256,19 +261,49 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
     assert event["_meta"]["request"]["data"]["file"] == {
         "": {
-            "len": -1,
-            "rem": [["!raw", "x", 0, -1]],
-        }  # bottle default content-length is -1
+            "rem": [["!raw", "x"]],
+        }
     }
     assert not event["request"]["data"]["file"]
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, app, get_client
+):
+    sentry_init(
+        integrations=[bottle_sentry.BottleIntegration()], max_request_body_size="always"
+    )
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @app.route("/", method="POST")
+    def index():
+        import bottle
+
+        assert bottle.request.json == data
+        assert bottle.request.body.read() == json.dumps(data).encode("ascii")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = get_client()
+
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response[1] == "200 OK"
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 @pytest.mark.parametrize(
     "integrations",
     [
@@ -348,10 +383,8 @@ def crashing_app(environ, start_response):
     assert error is exc.value
 
     (event,) = events
-    assert event["exception"]["values"][0]["mechanism"] == {
-        "type": "bottle",
-        "handled": False,
-    }
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "bottle"
+    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
 
 
 def test_500(sentry_init, capture_events, app, get_client):
diff --git a/tests/integrations/celery/__init__.py b/tests/integrations/celery/__init__.py
new file mode 100644
index 0000000000..e37dfbf00e
--- /dev/null
+++ b/tests/integrations/celery/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("celery")
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index a77ac1adb1..9ada8640ad 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -2,11 +2,15 @@
 
 import pytest
 
-pytest.importorskip("celery")
+from sentry_sdk import Hub, configure_scope, start_transaction, get_current_span
+from sentry_sdk.integrations.celery import (
+    CeleryIntegration,
+    _get_headers,
+    _wrap_apply_async,
+)
 
-from sentry_sdk import Hub, configure_scope, start_transaction
-from sentry_sdk.integrations.celery import CeleryIntegration
 from sentry_sdk._compat import text_type
+from tests.conftest import ApproxDict
 
 from celery import Celery, VERSION
 from celery.bin import worker
@@ -84,8 +88,14 @@ def celery(init_celery):
 
 @pytest.fixture(
     params=[
-        lambda task, x, y: (task.delay(x, y), {"args": [x, y], "kwargs": {}}),
-        lambda task, x, y: (task.apply_async((x, y)), {"args": [x, y], "kwargs": {}}),
+        lambda task, x, y: (
+            task.delay(x, y),
+            {"args": [x, y], "kwargs": {}},
+        ),
+        lambda task, x, y: (
+            task.apply_async((x, y)),
+            {"args": [x, y], "kwargs": {}},
+        ),
         lambda task, x, y: (
             task.apply_async(args=(x, y)),
             {"args": [x, y], "kwargs": {}},
@@ -105,7 +115,8 @@ def celery_invocation(request):
     return request.param
 
 
-def test_simple(capture_events, celery, celery_invocation):
+def test_simple_with_performance(capture_events, init_celery, celery_invocation):
+    celery = init_celery(traces_sample_rate=1.0)
     events = capture_events()
 
     @celery.task(name="dummy_task")
@@ -113,26 +124,61 @@ def dummy_task(x, y):
         foo = 42  # noqa
         return x / y
 
-    with start_transaction() as transaction:
+    with start_transaction(op="unit test transaction") as transaction:
         celery_invocation(dummy_task, 1, 2)
         _, expected_context = celery_invocation(dummy_task, 1, 0)
 
-    (event,) = events
+    (_, error_event, _, _) = events
 
-    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
-    assert event["contexts"]["trace"]["span_id"] != transaction.span_id
-    assert event["transaction"] == "dummy_task"
-    assert "celery_task_id" in event["tags"]
-    assert event["extra"]["celery-job"] == dict(
+    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert error_event["contexts"]["trace"]["span_id"] != transaction.span_id
+    assert error_event["transaction"] == "dummy_task"
+    assert "celery_task_id" in error_event["tags"]
+    assert error_event["extra"]["celery-job"] == dict(
         task_name="dummy_task", **expected_context
     )
 
-    (exception,) = event["exception"]["values"]
+    (exception,) = error_event["exception"]["values"]
     assert exception["type"] == "ZeroDivisionError"
     assert exception["mechanism"]["type"] == "celery"
     assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
 
 
+def test_simple_without_performance(capture_events, init_celery, celery_invocation):
+    celery = init_celery(traces_sample_rate=None)
+    events = capture_events()
+
+    @celery.task(name="dummy_task")
+    def dummy_task(x, y):
+        foo = 42  # noqa
+        return x / y
+
+    with configure_scope() as scope:
+        celery_invocation(dummy_task, 1, 2)
+        _, expected_context = celery_invocation(dummy_task, 1, 0)
+
+        (error_event,) = events
+
+        assert (
+            error_event["contexts"]["trace"]["trace_id"]
+            == scope._propagation_context["trace_id"]
+        )
+        assert (
+            error_event["contexts"]["trace"]["span_id"]
+            != scope._propagation_context["span_id"]
+        )
+        assert error_event["transaction"] == "dummy_task"
+        assert "celery_task_id" in error_event["tags"]
+        assert error_event["extra"]["celery-job"] == dict(
+            task_name="dummy_task", **expected_context
+        )
+
+        (exception,) = error_event["exception"]["values"]
+        assert exception["type"] == "ZeroDivisionError"
+        assert exception["mechanism"]["type"] == "celery"
+        assert exception["stacktrace"]["frames"][0]["vars"]["foo"] == "42"
+
+
 @pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
 def test_transaction_events(capture_events, init_celery, celery_invocation, task_fails):
     celery = init_celery(traces_sample_rate=1.0)
@@ -155,9 +201,11 @@ def dummy_task(x, y):
         assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
 
     execution_event, submission_event = events
-
     assert execution_event["transaction"] == "dummy_task"
+    assert execution_event["transaction_info"] == {"source": "task"}
+
     assert submission_event["transaction"] == "submission"
+    assert submission_event["transaction_info"] == {"source": "custom"}
 
     assert execution_event["type"] == submission_event["type"] == "transaction"
     assert execution_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
@@ -171,8 +219,9 @@ def dummy_task(x, y):
     assert execution_event["spans"] == []
     assert submission_event["spans"] == [
         {
+            "data": ApproxDict(),
             "description": "dummy_task",
-            "op": "celery.submit",
+            "op": "queue.submit.celery",
             "parent_span_id": submission_event["contexts"]["trace"]["span_id"],
             "same_process_as_parent": True,
             "span_id": submission_event["spans"][0]["span_id"],
@@ -311,8 +360,10 @@ def dummy_task(self):
         assert e["type"] == "ZeroDivisionError"
 
 
+# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
+@pytest.mark.skip
 @pytest.mark.forked
-def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe, tmpdir):
+def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
     celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
 
     events = capture_events_forksafe()
@@ -328,7 +379,7 @@ def dummy_task(self):
         # Curious: Cannot use delay() here or py2.7-celery-4.2 crashes
         res = dummy_task.apply_async()
 
-    with pytest.raises(Exception):
+    with pytest.raises(Exception):  # noqa: B017
         # Celery 4.1 raises a gibberish exception
         res.wait()
 
@@ -343,7 +394,7 @@ def dummy_task(self):
         submit_transaction["spans"]
     ), 4  # Because redis integration was auto enabled
     span = submit_transaction["spans"][0]
-    assert span["op"] == "celery.submit"
+    assert span["op"] == "queue.submit.celery"
     assert span["description"] == "dummy_task"
 
     event = events.read_event()
@@ -367,11 +418,24 @@ def dummy_task(self):
 @pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
 def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
     def instrument_newrelic():
-        import celery.app.trace as celery_mod
-        from newrelic.hooks.application_celery import instrument_celery_execute_trace
+        try:
+            # older newrelic versions
+            from newrelic.hooks.application_celery import (
+                instrument_celery_execute_trace,
+            )
+            import celery.app.trace as celery_trace_module
+
+            assert hasattr(celery_trace_module, "build_tracer")
+            instrument_celery_execute_trace(celery_trace_module)
+
+        except ImportError:
+            # newer newrelic versions
+            from newrelic.hooks.application_celery import instrument_celery_app_base
+            import celery.app as celery_app_module
 
-        assert hasattr(celery_mod, "build_tracer")
-        instrument_celery_execute_trace(celery_mod)
+            assert hasattr(celery_app_module, "Celery")
+            assert hasattr(celery_app_module.Celery, "send_task")
+            instrument_celery_app_base(celery_app_module)
 
     if newrelic_order == "sentry_first":
         celery = init_celery()
@@ -433,3 +497,129 @@ def dummy_task(x, y):
         celery_invocation(dummy_task, 1, 0)
 
     assert not events
+
+
+def test_task_headers(celery):
+    """
+    Test that the headers set in the Celery Beat auto-instrumentation are passed to the celery signal handlers
+    """
+    sentry_crons_setup = {
+        "sentry-monitor-slug": "some-slug",
+        "sentry-monitor-config": {"some": "config"},
+        "sentry-monitor-check-in-id": "123abc",
+    }
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self, x, y):
+        return _get_headers(self)
+
+    # This is how the Celery Beat auto-instrumentation starts a task
+    # in the monkey patched version of `apply_async`
+    # in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
+    result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)
+    assert result.get() == sentry_crons_setup
+
+
+def test_baggage_propagation(init_celery):
+    celery = init_celery(traces_sample_rate=1.0, release="abcdef")
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self, x, y):
+        return _get_headers(self)
+
+    with start_transaction() as transaction:
+        result = dummy_task.apply_async(
+            args=(1, 0),
+            headers={"baggage": "custom=value"},
+        ).get()
+
+        assert sorted(result["baggage"].split(",")) == sorted(
+            [
+                "sentry-release=abcdef",
+                "sentry-trace_id={}".format(transaction.trace_id),
+                "sentry-environment=production",
+                "sentry-sample_rate=1.0",
+                "sentry-sampled=true",
+                "custom=value",
+            ]
+        )
+
+
+def test_sentry_propagate_traces_override(init_celery):
+    """
+    Test if the `sentry-propagate-traces` header given to `apply_async`
+    overrides the `propagate_traces` parameter in the integration constructor.
+    """
+    celery = init_celery(
+        propagate_traces=True, traces_sample_rate=1.0, release="abcdef"
+    )
+
+    @celery.task(name="dummy_task", bind=True)
+    def dummy_task(self, message):
+        trace_id = get_current_span().trace_id
+        return trace_id
+
+    with start_transaction() as transaction:
+        transaction_trace_id = transaction.trace_id
+
+        # should propagate trace
+        task_transaction_id = dummy_task.apply_async(
+            args=("some message",),
+        ).get()
+        assert transaction_trace_id == task_transaction_id
+
+        # should NOT propagate trace (overrides `propagate_traces` parameter in integration constructor)
+        task_transaction_id = dummy_task.apply_async(
+            args=("another message",),
+            headers={"sentry-propagate-traces": False},
+        ).get()
+        assert transaction_trace_id != task_transaction_id
+
+
+def test_apply_async_manually_span(sentry_init):
+    sentry_init(
+        integrations=[CeleryIntegration()],
+    )
+
+    def dummy_function(*args, **kwargs):
+        headers = kwargs.get("headers")
+        assert "sentry-trace" in headers
+        assert "baggage" in headers
+
+    wrapped = _wrap_apply_async(dummy_function)
+    wrapped(mock.MagicMock(), (), headers={})
+
+
+def test_apply_async_from_beat_no_span(sentry_init):
+    sentry_init(
+        integrations=[CeleryIntegration()],
+    )
+
+    def dummy_function(*args, **kwargs):
+        headers = kwargs.get("headers")
+        assert "sentry-trace" not in headers
+        assert "baggage" not in headers
+
+    wrapped = _wrap_apply_async(dummy_function)
+    wrapped(
+        mock.MagicMock(),
+        [
+            "BEAT",
+        ],
+        headers={},
+    )
+
+
+def test_apply_async_no_args(init_celery):
+    celery = init_celery()
+
+    @celery.task
+    def example_task():
+        return "success"
+
+    try:
+        result = example_task.apply_async(None, {})
+    except TypeError:
+        pytest.fail("Calling `apply_async` without arguments raised a TypeError")
+
+    assert result.get() == "success"
diff --git a/tests/integrations/celery/test_celery_beat_crons.py b/tests/integrations/celery/test_celery_beat_crons.py
new file mode 100644
index 0000000000..9ffa59b00d
--- /dev/null
+++ b/tests/integrations/celery/test_celery_beat_crons.py
@@ -0,0 +1,503 @@
+import datetime
+import sys
+
+import pytest
+
+from sentry_sdk.integrations.celery import (
+    _get_headers,
+    _get_humanized_interval,
+    _get_monitor_config,
+    _patch_beat_apply_entry,
+    _patch_redbeat_maybe_due,
+    crons_task_success,
+    crons_task_failure,
+    crons_task_retry,
+)
+from sentry_sdk.crons import MonitorStatus
+from celery.schedules import crontab, schedule
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+
+def test_get_headers():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "bla": "blub",
+        "foo": "bar",
+    }
+
+    assert _get_headers(fake_task) == {}
+
+    fake_task.request.update(
+        {
+            "headers": {
+                "bla": "blub",
+            },
+        }
+    )
+
+    assert _get_headers(fake_task) == {"bla": "blub"}
+
+    fake_task.request.update(
+        {
+            "headers": {
+                "headers": {
+                    "tri": "blub",
+                    "bar": "baz",
+                },
+                "bla": "blub",
+            },
+        }
+    )
+
+    assert _get_headers(fake_task) == {"bla": "blub", "tri": "blub", "bar": "baz"}
+
+
+@pytest.mark.parametrize(
+    "seconds, expected_tuple",
+    [
+        (0, (0, "second")),
+        (1, (1, "second")),
+        (0.00001, (0, "second")),
+        (59, (59, "second")),
+        (60, (1, "minute")),
+        (100, (1, "minute")),
+        (1000, (16, "minute")),
+        (10000, (2, "hour")),
+        (100000, (1, "day")),
+        (100000000, (1157, "day")),
+    ],
+)
+def test_get_humanized_interval(seconds, expected_tuple):
+    assert _get_humanized_interval(seconds) == expected_tuple
+
+
+def test_crons_task_success():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_success(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.OK,
+            )
+
+
+def test_crons_task_failure():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_failure(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.ERROR,
+            )
+
+
+def test_crons_task_retry():
+    fake_task = MagicMock()
+    fake_task.request = {
+        "headers": {
+            "sentry-monitor-slug": "test123",
+            "sentry-monitor-check-in-id": "1234567890",
+            "sentry-monitor-start-timestamp-s": 200.1,
+            "sentry-monitor-config": {
+                "schedule": {
+                    "type": "interval",
+                    "value": 3,
+                    "unit": "day",
+                },
+                "timezone": "Europe/Vienna",
+            },
+            "sentry-monitor-some-future-key": "some-future-value",
+        },
+    }
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.capture_checkin"
+    ) as mock_capture_checkin:
+        with mock.patch(
+            "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+            return_value=500.5,
+        ):
+            crons_task_retry(fake_task)
+
+            mock_capture_checkin.assert_called_once_with(
+                monitor_slug="test123",
+                monitor_config={
+                    "schedule": {
+                        "type": "interval",
+                        "value": 3,
+                        "unit": "day",
+                    },
+                    "timezone": "Europe/Vienna",
+                },
+                duration=300.4,
+                check_in_id="1234567890",
+                status=MonitorStatus.ERROR,
+            )
+
+
+def test_get_monitor_config_crontab():
+    app = MagicMock()
+    app.timezone = "Europe/Vienna"
+
+    # schedule with the default timezone
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "crontab",
+            "value": "*/10 12 3 * *",
+        },
+        "timezone": "UTC",  # the default because `crontab` does not know about the app
+    }
+    assert "unit" not in monitor_config["schedule"]
+
+    # schedule with the timezone from the app
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10", app=app)
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "crontab",
+            "value": "*/10 12 3 * *",
+        },
+        "timezone": "Europe/Vienna",  # the timezone from the app
+    }
+
+    # schedule without a timezone, the celery integration will read the config from the app
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+    celery_schedule.tz = None
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "crontab",
+            "value": "*/10 12 3 * *",
+        },
+        "timezone": "Europe/Vienna",  # the timezone from the app
+    }
+
+    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
+    app = MagicMock()
+    app.timezone = None
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+    celery_schedule.tz = None
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "crontab",
+            "value": "*/10 12 3 * *",
+        },
+        "timezone": "UTC",  # default timezone from celery integration
+    }
+
+
+def test_get_monitor_config_seconds():
+    app = MagicMock()
+    app.timezone = "Europe/Vienna"
+
+    celery_schedule = schedule(run_every=3)  # seconds
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.logger.warning"
+    ) as mock_logger_warning:
+        monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+        mock_logger_warning.assert_called_with(
+            "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+            "foo",
+            3,
+        )
+        assert monitor_config == {}
+
+
+def test_get_monitor_config_minutes():
+    app = MagicMock()
+    app.timezone = "Europe/Vienna"
+
+    # schedule with the default timezone
+    celery_schedule = schedule(run_every=60)  # seconds
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "interval",
+            "value": 1,
+            "unit": "minute",
+        },
+        "timezone": "UTC",
+    }
+
+    # schedule with the timezone from the app
+    celery_schedule = schedule(run_every=60, app=app)  # seconds
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "interval",
+            "value": 1,
+            "unit": "minute",
+        },
+        "timezone": "Europe/Vienna",  # the timezone from the app
+    }
+
+    # schedule without a timezone, the celery integration will read the config from the app
+    celery_schedule = schedule(run_every=60)  # seconds
+    celery_schedule.tz = None
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "interval",
+            "value": 1,
+            "unit": "minute",
+        },
+        "timezone": "Europe/Vienna",  # the timezone from the app
+    }
+
+    # schedule without a timezone, and an app without timezone, the celery integration will fall back to UTC
+    app = MagicMock()
+    app.timezone = None
+
+    celery_schedule = schedule(run_every=60)  # seconds
+    celery_schedule.tz = None
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "foo")
+    assert monitor_config == {
+        "schedule": {
+            "type": "interval",
+            "value": 1,
+            "unit": "minute",
+        },
+        "timezone": "UTC",  # default timezone from celery integration
+    }
+
+
+def test_get_monitor_config_unknown():
+    app = MagicMock()
+    app.timezone = "Europe/Vienna"
+
+    unknown_celery_schedule = MagicMock()
+    monitor_config = _get_monitor_config(unknown_celery_schedule, app, "foo")
+    assert monitor_config == {}
+
+
+def test_get_monitor_config_default_timezone():
+    app = MagicMock()
+    app.timezone = None
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
+
+    assert monitor_config["timezone"] == "UTC"
+
+
+def test_get_monitor_config_timezone_in_app_conf():
+    app = MagicMock()
+    app.timezone = "Asia/Karachi"
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+    celery_schedule.tz = None
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
+
+    assert monitor_config["timezone"] == "Asia/Karachi"
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 0),
+    reason="no datetime.timezone for Python 2, so skipping this test.",
+)
+def test_get_monitor_config_timezone_in_celery_schedule():
+    app = MagicMock()
+    app.timezone = "Asia/Karachi"
+
+    panama_tz = datetime.timezone(datetime.timedelta(hours=-5), name="America/Panama")
+
+    celery_schedule = crontab(day_of_month="3", hour="12", minute="*/10")
+    celery_schedule.tz = panama_tz
+
+    monitor_config = _get_monitor_config(celery_schedule, app, "dummy_monitor_name")
+
+    assert monitor_config["timezone"] == str(panama_tz)
+
+
+@pytest.mark.parametrize(
+    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
+    [
+        ["some_task_name", ["xxx", "some_task.*"], True],
+        ["some_task_name", ["xxx", "some_other_task.*"], False],
+    ],
+)
+def test_exclude_beat_tasks_option(
+    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
+):
+    """
+    Test excluding Celery Beat tasks from automatic instrumentation.
+    """
+    fake_apply_entry = MagicMock()
+
+    fake_scheduler = MagicMock()
+    fake_scheduler.apply_entry = fake_apply_entry
+
+    fake_integration = MagicMock()
+    fake_integration.exclude_beat_tasks = exclude_beat_tasks
+
+    fake_schedule_entry = MagicMock()
+    fake_schedule_entry.name = task_name
+
+    fake_get_monitor_config = MagicMock()
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.Scheduler", fake_scheduler
+    ) as Scheduler:  # noqa: N806
+        with mock.patch(
+            "sentry_sdk.integrations.celery.Hub.current.get_integration",
+            return_value=fake_integration,
+        ):
+            with mock.patch(
+                "sentry_sdk.integrations.celery._get_monitor_config",
+                fake_get_monitor_config,
+            ) as _get_monitor_config:
+                # Mimic CeleryIntegration patching of Scheduler.apply_entry()
+                _patch_beat_apply_entry()
+                # Mimic Celery Beat calling a task from the Beat schedule
+                Scheduler.apply_entry(fake_scheduler, fake_schedule_entry)
+
+                if task_in_excluded_beat_tasks:
+                    # Only the original Scheduler.apply_entry() is called, _get_monitor_config is NOT called.
+                    assert fake_apply_entry.call_count == 1
+                    _get_monitor_config.assert_not_called()
+
+                else:
+                    # The original Scheduler.apply_entry() is called, AND _get_monitor_config is called.
+                    assert fake_apply_entry.call_count == 1
+                    assert _get_monitor_config.call_count == 1
+
+
+@pytest.mark.parametrize(
+    "task_name,exclude_beat_tasks,task_in_excluded_beat_tasks",
+    [
+        ["some_task_name", ["xxx", "some_task.*"], True],
+        ["some_task_name", ["xxx", "some_other_task.*"], False],
+    ],
+)
+def test_exclude_redbeat_tasks_option(
+    task_name, exclude_beat_tasks, task_in_excluded_beat_tasks
+):
+    """
+    Test excluding Celery RedBeat tasks from automatic instrumentation.
+    """
+    fake_maybe_due = MagicMock()
+
+    fake_redbeat_scheduler = MagicMock()
+    fake_redbeat_scheduler.maybe_due = fake_maybe_due
+
+    fake_integration = MagicMock()
+    fake_integration.exclude_beat_tasks = exclude_beat_tasks
+
+    fake_schedule_entry = MagicMock()
+    fake_schedule_entry.name = task_name
+
+    fake_get_monitor_config = MagicMock()
+
+    with mock.patch(
+        "sentry_sdk.integrations.celery.RedBeatScheduler", fake_redbeat_scheduler
+    ) as RedBeatScheduler:  # noqa: N806
+        with mock.patch(
+            "sentry_sdk.integrations.celery.Hub.current.get_integration",
+            return_value=fake_integration,
+        ):
+            with mock.patch(
+                "sentry_sdk.integrations.celery._get_monitor_config",
+                fake_get_monitor_config,
+            ) as _get_monitor_config:
+                # Mimic CeleryIntegration patching of RedBeatScheduler.maybe_due()
+                _patch_redbeat_maybe_due()
+                # Mimic Celery RedBeat calling a task from the RedBeat schedule
+                RedBeatScheduler.maybe_due(fake_redbeat_scheduler, fake_schedule_entry)
+
+                if task_in_excluded_beat_tasks:
+                    # Only the original RedBeatScheduler.maybe_due() is called, _get_monitor_config is NOT called.
+                    assert fake_maybe_due.call_count == 1
+                    _get_monitor_config.assert_not_called()
+
+                else:
+                    # The original RedBeatScheduler.maybe_due() is called, AND _get_monitor_config is called.
+                    assert fake_maybe_due.call_count == 1
+                    assert _get_monitor_config.call_count == 1
diff --git a/tests/integrations/chalice/test_chalice.py b/tests/integrations/chalice/test_chalice.py
index 8bb33a5cb6..fbd4be4e59 100644
--- a/tests/integrations/chalice/test_chalice.py
+++ b/tests/integrations/chalice/test_chalice.py
@@ -3,7 +3,9 @@
 from chalice import Chalice, BadRequestError
 from chalice.local import LambdaContext, LocalGateway
 
-from sentry_sdk.integrations.chalice import ChaliceIntegration
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.chalice import CHALICE_VERSION, ChaliceIntegration
+from sentry_sdk.utils import parse_version
 
 from pytest_chalice.handlers import RequestHandler
 
@@ -41,6 +43,16 @@ def has_request():
     def badrequest():
         raise BadRequestError("bad-request")
 
+    @app.route("/message")
+    def hi():
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @app.route("/message/{message_id}")
+    def hi_with_id(message_id):
+        capture_message("hi again")
+        return {"status": "ok"}
+
     LocalGateway._generate_lambda_context = _generate_lambda_context
 
     return app
@@ -54,12 +66,10 @@ def lambda_context_args():
 def test_exception_boom(app, client: RequestHandler) -> None:
     response = client.get("/boom")
     assert response.status_code == 500
-    assert response.json == dict(
-        [
-            ("Code", "InternalServerError"),
-            ("Message", "An internal server error occurred."),
-        ]
-    )
+    assert response.json == {
+        "Code": "InternalServerError",
+        "Message": "An internal server error occurred.",
+    }
 
 
 def test_has_request(app, capture_events, client: RequestHandler):
@@ -99,13 +109,54 @@ def every_hour(event):
     assert str(exc_info.value) == "schedule event!"
 
 
-def test_bad_reques(client: RequestHandler) -> None:
+@pytest.mark.skipif(
+    parse_version(CHALICE_VERSION) >= (1, 28),
+    reason="different behavior based on chalice version",
+)
+def test_bad_request_old(client: RequestHandler) -> None:
     response = client.get("/badrequest")
 
     assert response.status_code == 400
-    assert response.json == dict(
-        [
-            ("Code", "BadRequestError"),
-            ("Message", "BadRequestError: bad-request"),
-        ]
-    )
+    assert response.json == {
+        "Code": "BadRequestError",
+        "Message": "BadRequestError: bad-request",
+    }
+
+
+@pytest.mark.skipif(
+    parse_version(CHALICE_VERSION) < (1, 28),
+    reason="different behavior based on chalice version",
+)
+def test_bad_request(client: RequestHandler) -> None:
+    response = client.get("/badrequest")
+
+    assert response.status_code == 400
+    assert response.json == {
+        "Code": "BadRequestError",
+        "Message": "bad-request",
+    }
+
+
+@pytest.mark.parametrize(
+    "url,expected_transaction,expected_source",
+    [
+        ("/message", "api_handler", "component"),
+        ("/message/123456", "api_handler", "component"),
+    ],
+)
+def test_transaction(
+    app,
+    client: RequestHandler,
+    capture_events,
+    url,
+    expected_transaction,
+    expected_source,
+):
+    events = capture_events()
+
+    response = client.get(url)
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
diff --git a/tests/integrations/clickhouse_driver/__init__.py b/tests/integrations/clickhouse_driver/__init__.py
new file mode 100644
index 0000000000..602c4e553c
--- /dev/null
+++ b/tests/integrations/clickhouse_driver/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("clickhouse_driver")
diff --git a/tests/integrations/clickhouse_driver/test_clickhouse_driver.py b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py
new file mode 100644
index 0000000000..b39f722c52
--- /dev/null
+++ b/tests/integrations/clickhouse_driver/test_clickhouse_driver.py
@@ -0,0 +1,893 @@
+"""
+Tests need a local clickhouse instance running, this can best be done using
+```sh
+docker run -d -p 18123:8123 -p9000:9000 --name clickhouse-test --ulimit nofile=262144:262144 --rm clickhouse/clickhouse-server
+```
+"""
+
+import clickhouse_driver
+from clickhouse_driver import Client, connect
+
+from sentry_sdk import start_transaction, capture_message
+from sentry_sdk.integrations.clickhouse_driver import ClickhouseDriverIntegration
+from tests.conftest import ApproxDict
+
+EXPECT_PARAMS_IN_SELECT = True
+if clickhouse_driver.VERSION < (0, 2, 6):
+    EXPECT_PARAMS_IN_SELECT = False
+
+
+def test_clickhouse_client_breadcrumbs(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    client = Client("localhost")
+    client.execute("DROP TABLE IF EXISTS test")
+    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
+    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
+
+    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+    assert res[0][0] == 370
+
+    capture_message("hi")
+
+    (event,) = events
+
+    expected_breadcrumbs = [
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "DROP TABLE IF EXISTS test",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "SELECT sum(x) FROM test WHERE x > 150",
+            "type": "default",
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_breadcrumbs[-1]["data"].pop("db.params", None)
+
+    for crumb in expected_breadcrumbs:
+        crumb["data"] = ApproxDict(crumb["data"])
+
+    for crumb in event["breadcrumbs"]["values"]:
+        crumb.pop("timestamp", None)
+
+    assert event["breadcrumbs"]["values"] == expected_breadcrumbs
+
+
+def test_clickhouse_client_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        send_default_pii=True,
+        _experiments={"record_sql_params": True},
+    )
+    events = capture_events()
+
+    client = Client("localhost")
+    client.execute("DROP TABLE IF EXISTS test")
+    client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+    client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
+    client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
+
+    res = client.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+    assert res[0][0] == 370
+
+    capture_message("hi")
+
+    (event,) = events
+
+    expected_breadcrumbs = [
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [],
+            },
+            "message": "DROP TABLE IF EXISTS test",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [],
+            },
+            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [{"x": 100}],
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [[170], [200]],
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [[370]],
+                "db.params": {"minv": 150},
+            },
+            "message": "SELECT sum(x) FROM test WHERE x > 150",
+            "type": "default",
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_breadcrumbs[-1]["data"].pop("db.params", None)
+
+    for crumb in expected_breadcrumbs:
+        crumb["data"] = ApproxDict(crumb["data"])
+
+    for crumb in event["breadcrumbs"]["values"]:
+        crumb.pop("timestamp", None)
+
+    assert event["breadcrumbs"]["values"] == expected_breadcrumbs
+
+
+def test_clickhouse_client_spans(
+    sentry_init, capture_events, capture_envelopes
+) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    transaction_trace_id = None
+    transaction_span_id = None
+
+    with start_transaction(name="test_clickhouse_transaction") as transaction:
+        transaction_trace_id = transaction.trace_id
+        transaction_span_id = transaction.span_id
+
+        client = Client("localhost")
+        client.execute("DROP TABLE IF EXISTS test")
+        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
+        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
+
+        res = client.execute(
+            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
+        )
+        assert res[0][0] == 370
+
+    (event,) = events
+
+    expected_spans = [
+        {
+            "op": "db",
+            "description": "DROP TABLE IF EXISTS test",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "SELECT sum(x) FROM test WHERE x > 150",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_spans[-1]["data"].pop("db.params", None)
+
+    for span in expected_spans:
+        span["data"] = ApproxDict(span["data"])
+
+    for span in event["spans"]:
+        span.pop("span_id", None)
+        span.pop("start_timestamp", None)
+        span.pop("timestamp", None)
+
+    assert event["spans"] == expected_spans
+
+
+def test_clickhouse_client_spans_with_pii(
+    sentry_init, capture_events, capture_envelopes
+) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    transaction_trace_id = None
+    transaction_span_id = None
+
+    with start_transaction(name="test_clickhouse_transaction") as transaction:
+        transaction_trace_id = transaction.trace_id
+        transaction_span_id = transaction.span_id
+
+        client = Client("localhost")
+        client.execute("DROP TABLE IF EXISTS test")
+        client.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+        client.execute("INSERT INTO test (x) VALUES", [{"x": 100}])
+        client.execute("INSERT INTO test (x) VALUES", [[170], [200]])
+
+        res = client.execute(
+            "SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150}
+        )
+        assert res[0][0] == 370
+
+    (event,) = events
+
+    expected_spans = [
+        {
+            "op": "db",
+            "description": "DROP TABLE IF EXISTS test",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [{"x": 100}],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [[170], [200]],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "SELECT sum(x) FROM test WHERE x > 150",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": {"minv": 150},
+                "db.result": [[370]],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_spans[-1]["data"].pop("db.params", None)
+
+    for span in expected_spans:
+        span["data"] = ApproxDict(span["data"])
+
+    for span in event["spans"]:
+        span.pop("span_id", None)
+        span.pop("start_timestamp", None)
+        span.pop("timestamp", None)
+
+    assert event["spans"] == expected_spans
+
+
+def test_clickhouse_dbapi_breadcrumbs(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+    )
+    events = capture_events()
+
+    conn = connect("clickhouse://localhost")
+    cursor = conn.cursor()
+    cursor.execute("DROP TABLE IF EXISTS test")
+    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
+    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
+    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+    res = cursor.fetchall()
+
+    assert res[0][0] == 370
+
+    capture_message("hi")
+
+    (event,) = events
+
+    expected_breadcrumbs = [
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "DROP TABLE IF EXISTS test",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "message": "SELECT sum(x) FROM test WHERE x > 150",
+            "type": "default",
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_breadcrumbs[-1]["data"].pop("db.params", None)
+
+    for crumb in expected_breadcrumbs:
+        crumb["data"] = ApproxDict(crumb["data"])
+
+    for crumb in event["breadcrumbs"]["values"]:
+        crumb.pop("timestamp", None)
+
+    assert event["breadcrumbs"]["values"] == expected_breadcrumbs
+
+
+def test_clickhouse_dbapi_breadcrumbs_with_pii(sentry_init, capture_events) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    conn = connect("clickhouse://localhost")
+    cursor = conn.cursor()
+    cursor.execute("DROP TABLE IF EXISTS test")
+    cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+    cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
+    cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
+    cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+    res = cursor.fetchall()
+
+    assert res[0][0] == 370
+
+    capture_message("hi")
+
+    (event,) = events
+
+    expected_breadcrumbs = [
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [[], []],
+            },
+            "message": "DROP TABLE IF EXISTS test",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [[], []],
+            },
+            "message": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [{"x": 100}],
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [[170], [200]],
+            },
+            "message": "INSERT INTO test (x) VALUES",
+            "type": "default",
+        },
+        {
+            "category": "query",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": {"minv": 150},
+                "db.result": [[["370"]], [["'sum(x)'", "'Int64'"]]],
+            },
+            "message": "SELECT sum(x) FROM test WHERE x > 150",
+            "type": "default",
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_breadcrumbs[-1]["data"].pop("db.params", None)
+
+    for crumb in expected_breadcrumbs:
+        crumb["data"] = ApproxDict(crumb["data"])
+
+    for crumb in event["breadcrumbs"]["values"]:
+        crumb.pop("timestamp", None)
+
+    assert event["breadcrumbs"]["values"] == expected_breadcrumbs
+
+
+def test_clickhouse_dbapi_spans(sentry_init, capture_events, capture_envelopes) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    transaction_trace_id = None
+    transaction_span_id = None
+
+    with start_transaction(name="test_clickhouse_transaction") as transaction:
+        transaction_trace_id = transaction.trace_id
+        transaction_span_id = transaction.span_id
+
+        conn = connect("clickhouse://localhost")
+        cursor = conn.cursor()
+        cursor.execute("DROP TABLE IF EXISTS test")
+        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
+        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
+        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+        res = cursor.fetchall()
+
+        assert res[0][0] == 370
+
+    (event,) = events
+
+    expected_spans = [
+        {
+            "op": "db",
+            "description": "DROP TABLE IF EXISTS test",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "SELECT sum(x) FROM test WHERE x > 150",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_spans[-1]["data"].pop("db.params", None)
+
+    for span in expected_spans:
+        span["data"] = ApproxDict(span["data"])
+
+    for span in event["spans"]:
+        span.pop("span_id", None)
+        span.pop("start_timestamp", None)
+        span.pop("timestamp", None)
+
+    assert event["spans"] == expected_spans
+
+
+def test_clickhouse_dbapi_spans_with_pii(
+    sentry_init, capture_events, capture_envelopes
+) -> None:
+    sentry_init(
+        integrations=[ClickhouseDriverIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    transaction_trace_id = None
+    transaction_span_id = None
+
+    with start_transaction(name="test_clickhouse_transaction") as transaction:
+        transaction_trace_id = transaction.trace_id
+        transaction_span_id = transaction.span_id
+
+        conn = connect("clickhouse://localhost")
+        cursor = conn.cursor()
+        cursor.execute("DROP TABLE IF EXISTS test")
+        cursor.execute("CREATE TABLE test (x Int32) ENGINE = Memory")
+        cursor.executemany("INSERT INTO test (x) VALUES", [{"x": 100}])
+        cursor.executemany("INSERT INTO test (x) VALUES", [[170], [200]])
+        cursor.execute("SELECT sum(x) FROM test WHERE x > %(minv)i", {"minv": 150})
+        res = cursor.fetchall()
+
+        assert res[0][0] == 370
+
+    (event,) = events
+
+    expected_spans = [
+        {
+            "op": "db",
+            "description": "DROP TABLE IF EXISTS test",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [[], []],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "CREATE TABLE test (x Int32) ENGINE = Memory",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.result": [[], []],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [{"x": 100}],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "INSERT INTO test (x) VALUES",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": [[170], [200]],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+        {
+            "op": "db",
+            "description": "SELECT sum(x) FROM test WHERE x > 150",
+            "data": {
+                "db.system": "clickhouse",
+                "db.name": "",
+                "db.user": "default",
+                "server.address": "localhost",
+                "server.port": 9000,
+                "db.params": {"minv": 150},
+                "db.result": [[[370]], [["sum(x)", "Int64"]]],
+            },
+            "same_process_as_parent": True,
+            "trace_id": transaction_trace_id,
+            "parent_span_id": transaction_span_id,
+        },
+    ]
+
+    if not EXPECT_PARAMS_IN_SELECT:
+        expected_spans[-1]["data"].pop("db.params", None)
+
+    for span in expected_spans:
+        span["data"] = ApproxDict(span["data"])
+
+    for span in event["spans"]:
+        span.pop("span_id", None)
+        span.pop("start_timestamp", None)
+        span.pop("timestamp", None)
+
+    assert event["spans"] == expected_spans
diff --git a/tests/integrations/cloud_resource_context/__init__.py b/tests/integrations/cloud_resource_context/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/cloud_resource_context/test_cloud_resource_context.py b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
new file mode 100644
index 0000000000..b36f795a2b
--- /dev/null
+++ b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
@@ -0,0 +1,410 @@
+import json
+
+import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+from sentry_sdk.integrations.cloud_resource_context import (
+    CLOUD_PLATFORM,
+    CLOUD_PROVIDER,
+)
+
+AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD = {
+    "accountId": "298817902971",
+    "architecture": "x86_64",
+    "availabilityZone": "us-east-1b",
+    "billingProducts": None,
+    "devpayProductCodes": None,
+    "marketplaceProductCodes": None,
+    "imageId": "ami-00874d747dde344fa",
+    "instanceId": "i-07d3301297fe0a55a",
+    "instanceType": "t2.small",
+    "kernelId": None,
+    "pendingTime": "2023-02-08T07:54:05Z",
+    "privateIp": "171.131.65.115",
+    "ramdiskId": None,
+    "region": "us-east-1",
+    "version": "2017-09-30",
+}
+
+try:
+    # Python 3
+    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
+        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
+    )
+except TypeError:
+    # Python 2
+    AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
+        json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD)
+    ).encode("utf-8")
+
+GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
+    "instance": {
+        "attributes": {},
+        "cpuPlatform": "Intel Broadwell",
+        "description": "",
+        "disks": [
+            {
+                "deviceName": "tests-cloud-contexts-in-python-sdk",
+                "index": 0,
+                "interface": "SCSI",
+                "mode": "READ_WRITE",
+                "type": "PERSISTENT-BALANCED",
+            }
+        ],
+        "guestAttributes": {},
+        "hostname": "tests-cloud-contexts-in-python-sdk.c.client-infra-internal.internal",
+        "id": 1535324527892303790,
+        "image": "projects/debian-cloud/global/images/debian-11-bullseye-v20221206",
+        "licenses": [{"id": "2853224013536823851"}],
+        "machineType": "projects/542054129475/machineTypes/e2-medium",
+        "maintenanceEvent": "NONE",
+        "name": "tests-cloud-contexts-in-python-sdk",
+        "networkInterfaces": [
+            {
+                "accessConfigs": [
+                    {"externalIp": "134.30.53.15", "type": "ONE_TO_ONE_NAT"}
+                ],
+                "dnsServers": ["169.254.169.254"],
+                "forwardedIps": [],
+                "gateway": "10.188.0.1",
+                "ip": "10.188.0.3",
+                "ipAliases": [],
+                "mac": "42:01:0c:7c:00:13",
+                "mtu": 1460,
+                "network": "projects/544954029479/networks/default",
+                "subnetmask": "255.255.240.0",
+                "targetInstanceIps": [],
+            }
+        ],
+        "preempted": "FALSE",
+        "remainingCpuTime": -1,
+        "scheduling": {
+            "automaticRestart": "TRUE",
+            "onHostMaintenance": "MIGRATE",
+            "preemptible": "FALSE",
+        },
+        "serviceAccounts": {},
+        "tags": ["http-server", "https-server"],
+        "virtualClock": {"driftToken": "0"},
+        "zone": "projects/142954069479/zones/northamerica-northeast2-b",
+    },
+    "oslogin": {"authenticate": {"sessions": {}}},
+    "project": {
+        "attributes": {},
+        "numericProjectId": 204954049439,
+        "projectId": "my-project-internal",
+    },
+}
+
+try:
+    # Python 3
+    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
+        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD), "utf-8"
+    )
+except TypeError:
+    # Python 2
+    GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES = bytes(
+        json.dumps(GCP_GCE_EXAMPLE_METADATA_PLAYLOAD)
+    ).encode("utf-8")
+
+
+def test_is_aws_http_error():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 405
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_aws() is False
+    assert CloudResourceContextIntegration.aws_token == ""
+
+
+def test_is_aws_ok():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 200
+    response.data = b"something"
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_aws() is True
+    assert CloudResourceContextIntegration.aws_token == "something"
+
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+    assert CloudResourceContextIntegration._is_aws() is False
+
+
+def test_is_aw_exception():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+
+    assert CloudResourceContextIntegration._is_aws() is False
+
+
+@pytest.mark.parametrize(
+    "http_status, response_data, expected_context",
+    [
+        [
+            405,
+            b"",
+            {
+                "cloud.provider": CLOUD_PROVIDER.AWS,
+                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+            },
+        ],
+        [
+            200,
+            b"something-but-not-json",
+            {
+                "cloud.provider": CLOUD_PROVIDER.AWS,
+                "cloud.platform": CLOUD_PLATFORM.AWS_EC2,
+            },
+        ],
+        [
+            200,
+            AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES,
+            {
+                "cloud.provider": "aws",
+                "cloud.platform": "aws_ec2",
+                "cloud.account.id": "298817902971",
+                "cloud.availability_zone": "us-east-1b",
+                "cloud.region": "us-east-1",
+                "host.id": "i-07d3301297fe0a55a",
+                "host.type": "t2.small",
+            },
+        ],
+    ],
+)
+def test_get_aws_context(http_status, response_data, expected_context):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = http_status
+    response.data = response_data
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._get_aws_context() == expected_context
+
+
+def test_is_gcp_http_error():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 405
+    response.data = b'{"some": "json"}'
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_gcp() is False
+    assert CloudResourceContextIntegration.gcp_metadata is None
+
+
+def test_is_gcp_ok():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    response = MagicMock()
+    response.status = 200
+    response.data = b'{"some": "json"}'
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._is_gcp() is True
+    assert CloudResourceContextIntegration.gcp_metadata == {"some": "json"}
+
+
+def test_is_gcp_exception():
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(
+        side_effect=Exception("Test")
+    )
+    assert CloudResourceContextIntegration._is_gcp() is False
+
+
+@pytest.mark.parametrize(
+    "http_status, response_data, expected_context",
+    [
+        [
+            405,
+            None,
+            {
+                "cloud.provider": CLOUD_PROVIDER.GCP,
+                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+            },
+        ],
+        [
+            200,
+            b"something-but-not-json",
+            {
+                "cloud.provider": CLOUD_PROVIDER.GCP,
+                "cloud.platform": CLOUD_PLATFORM.GCP_COMPUTE_ENGINE,
+            },
+        ],
+        [
+            200,
+            GCP_GCE_EXAMPLE_METADATA_PLAYLOAD_BYTES,
+            {
+                "cloud.provider": "gcp",
+                "cloud.platform": "gcp_compute_engine",
+                "cloud.account.id": "my-project-internal",
+                "cloud.availability_zone": "northamerica-northeast2-b",
+                "host.id": 1535324527892303790,
+            },
+        ],
+    ],
+)
+def test_get_gcp_context(http_status, response_data, expected_context):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.gcp_metadata = None
+
+    response = MagicMock()
+    response.status = http_status
+    response.data = response_data
+
+    CloudResourceContextIntegration.http = MagicMock()
+    CloudResourceContextIntegration.http.request = MagicMock(return_value=response)
+
+    assert CloudResourceContextIntegration._get_gcp_context() == expected_context
+
+
+@pytest.mark.parametrize(
+    "is_aws, is_gcp, expected_provider",
+    [
+        [False, False, ""],
+        [False, True, CLOUD_PROVIDER.GCP],
+        [True, False, CLOUD_PROVIDER.AWS],
+        [True, True, CLOUD_PROVIDER.AWS],
+    ],
+)
+def test_get_cloud_provider(is_aws, is_gcp, expected_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._is_aws = MagicMock(return_value=is_aws)
+    CloudResourceContextIntegration._is_gcp = MagicMock(return_value=is_gcp)
+
+    assert CloudResourceContextIntegration._get_cloud_provider() == expected_provider
+
+
+@pytest.mark.parametrize(
+    "cloud_provider",
+    [
+        CLOUD_PROVIDER.ALIBABA,
+        CLOUD_PROVIDER.AZURE,
+        CLOUD_PROVIDER.IBM,
+        CLOUD_PROVIDER.TENCENT,
+    ],
+)
+def test_get_cloud_resource_context_unsupported_providers(cloud_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
+        return_value=cloud_provider
+    )
+
+    assert CloudResourceContextIntegration._get_cloud_resource_context() == {}
+
+
+@pytest.mark.parametrize(
+    "cloud_provider",
+    [
+        CLOUD_PROVIDER.AWS,
+        CLOUD_PROVIDER.GCP,
+    ],
+)
+def test_get_cloud_resource_context_supported_providers(cloud_provider):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration._get_cloud_provider = MagicMock(
+        return_value=cloud_provider
+    )
+
+    assert CloudResourceContextIntegration._get_cloud_resource_context() != {}
+
+
+@pytest.mark.parametrize(
+    "cloud_provider, cloud_resource_context, warning_called, set_context_called",
+    [
+        ["", {}, False, False],
+        [CLOUD_PROVIDER.AWS, {}, False, False],
+        [CLOUD_PROVIDER.GCP, {}, False, False],
+        [CLOUD_PROVIDER.AZURE, {}, True, False],
+        [CLOUD_PROVIDER.ALIBABA, {}, True, False],
+        [CLOUD_PROVIDER.IBM, {}, True, False],
+        [CLOUD_PROVIDER.TENCENT, {}, True, False],
+        ["", {"some": "context"}, False, True],
+        [CLOUD_PROVIDER.AWS, {"some": "context"}, False, True],
+        [CLOUD_PROVIDER.GCP, {"some": "context"}, False, True],
+    ],
+)
+def test_setup_once(
+    cloud_provider, cloud_resource_context, warning_called, set_context_called
+):
+    from sentry_sdk.integrations.cloud_resource_context import (
+        CloudResourceContextIntegration,
+    )
+
+    CloudResourceContextIntegration.cloud_provider = cloud_provider
+    CloudResourceContextIntegration._get_cloud_resource_context = MagicMock(
+        return_value=cloud_resource_context
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.cloud_resource_context.set_context"
+    ) as fake_set_context:
+        with mock.patch(
+            "sentry_sdk.integrations.cloud_resource_context.logger.warning"
+        ) as fake_warning:
+            CloudResourceContextIntegration.setup_once()
+
+            if set_context_called:
+                fake_set_context.assert_called_once_with(
+                    "cloud_resource", cloud_resource_context
+                )
+            else:
+                fake_set_context.assert_not_called()
+
+            if warning_called:
+                assert fake_warning.call_count == 1
+            else:
+                fake_warning.assert_not_called()
diff --git a/tests/integrations/django/__init__.py b/tests/integrations/django/__init__.py
index d2555a8d48..41d72f92a5 100644
--- a/tests/integrations/django/__init__.py
+++ b/tests/integrations/django/__init__.py
@@ -1,3 +1,9 @@
+import os
+import sys
 import pytest
 
-django = pytest.importorskip("django")
+pytest.importorskip("django")
+
+# Load `django_helpers` into the module search path to test query source path names relative to module. See
+# `test_query_source_with_module_in_search_path`
+sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
diff --git a/tests/integrations/django/asgi/image.png b/tests/integrations/django/asgi/image.png
new file mode 100644
index 0000000000..8db277a9fc
Binary files /dev/null and b/tests/integrations/django/asgi/image.png differ
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 0e6dd4f9ff..21a72e4a32 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -1,3 +1,7 @@
+import base64
+import json
+import os
+
 import django
 import pytest
 from channels.testing import HttpCommunicator
@@ -5,6 +9,16 @@
 from sentry_sdk.integrations.django import DjangoIntegration
 from tests.integrations.django.myapp.asgi import channels_application
 
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 APPS = [channels_application]
 if django.VERSION >= (3, 0):
     from tests.integrations.django.myapp.asgi import asgi_application
@@ -14,6 +28,7 @@
 
 @pytest.mark.parametrize("application", APPS)
 @pytest.mark.asyncio
+@pytest.mark.forked
 async def test_basic(sentry_init, capture_events, application):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
 
@@ -46,6 +61,7 @@ async def test_basic(sentry_init, capture_events, application):
 
 @pytest.mark.parametrize("application", APPS)
 @pytest.mark.asyncio
+@pytest.mark.forked
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
@@ -70,11 +86,49 @@ async def test_async_views(sentry_init, capture_events, application):
     }
 
 
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
 @pytest.mark.asyncio
+@pytest.mark.forked
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
-async def test_async_views_concurrent_execution(sentry_init, capture_events, settings):
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, application):
+    with mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0):
+        sentry_init(
+            integrations=[DjangoIntegration()],
+            traces_sample_rate=1.0,
+            _experiments={"profiles_sample_rate": 1.0},
+        )
+
+        envelopes = capture_envelopes()
+
+        comm = HttpCommunicator(application, "GET", endpoint)
+        response = await comm.get_response()
+        assert response["status"] == 200, response["body"]
+
+        await comm.wait()
+
+        data = json.loads(response["body"])
+
+        envelopes = [envelope for envelope in envelopes]
+        assert len(envelopes) == 1
+
+        profiles = [item for item in envelopes[0].items if item.type == "profile"]
+        assert len(profiles) == 1
+
+        for profile in profiles:
+            transactions = profile.payload.json["transactions"]
+            assert len(transactions) == 1
+            assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_async_views_concurrent_execution(sentry_init, settings):
     import asyncio
     import time
 
@@ -104,11 +158,12 @@ async def test_async_views_concurrent_execution(sentry_init, capture_events, set
 
 
 @pytest.mark.asyncio
+@pytest.mark.forked
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
 async def test_async_middleware_that_is_function_concurrent_execution(
-    sentry_init, capture_events, settings
+    sentry_init, settings
 ):
     import asyncio
     import time
@@ -141,6 +196,7 @@ async def test_async_middleware_that_is_function_concurrent_execution(
 
 
 @pytest.mark.asyncio
+@pytest.mark.forked
 @pytest.mark.skipif(
     django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
 )
@@ -175,10 +231,290 @@ async def test_async_middleware_spans(
         render_span_tree(transaction)
         == """\
 - op="http.server": description=null
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
-          - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-          - op="django.view": description="async_message\""""
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__acall__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__acall__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__acall__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__acall__"
+          - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+          - op="view.render": description="async_message"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="event.django": description="django.core.cache.close_caches"
+  - op="event.django": description="django.core.handlers.base.reset_urlconf\""""
+    )
+
+
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_has_trace_if_performance_enabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    # ASGI Django does not create transactions per default,
+    # so we do not have a transaction_event here.
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
     )
+
+
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_has_trace_if_performance_disabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+
+    events = capture_events()
+
+    comm = HttpCommunicator(asgi_application, "GET", "/view-exc-with-msg")
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_trace_from_headers_if_performance_enabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    comm = HttpCommunicator(
+        asgi_application,
+        "GET",
+        "/view-exc-with-msg",
+        headers=[(b"sentry-trace", sentry_trace_header.encode())],
+    )
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    # ASGI Django does not create transactions per default,
+    # so we do not have a transaction_event here.
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_trace_from_headers_if_performance_disabled(sentry_init, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    comm = HttpCommunicator(
+        asgi_application,
+        "GET",
+        "/view-exc-with-msg",
+        headers=[(b"sentry-trace", sentry_trace_header.encode())],
+    )
+    response = await comm.get_response()
+    assert response["status"] == 500
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "image.png")
+BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="image.png"\r\nContent-Type: image/png\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
+    "{{image_data}}", base64.b64encode(open(PICTURE, "rb").read()).decode("utf-8")
+).encode(
+    "utf-8"
+)
+BODY_FORM_CONTENT_LENGTH = str(len(BODY_FORM)).encode("utf-8")
+
+
+@pytest.mark.parametrize("application", APPS)
+@pytest.mark.parametrize(
+    "send_default_pii,method,headers,url_name,body,expected_data",
+    [
+        (
+            True,
+            "POST",
+            [(b"content-type", b"text/plain")],
+            "post_echo_async",
+            b"",
+            None,
+        ),
+        (
+            True,
+            "POST",
+            [(b"content-type", b"text/plain")],
+            "post_echo_async",
+            b"some raw text body",
+            "",
+        ),
+        (
+            True,
+            "POST",
+            [(b"content-type", b"application/json")],
+            "post_echo_async",
+            b'{"username":"xyz","password":"xyz"}',
+            {"username": "xyz", "password": "xyz"},
+        ),
+        (
+            True,
+            "POST",
+            [(b"content-type", b"application/xml")],
+            "post_echo_async",
+            b'',
+            "",
+        ),
+        (
+            True,
+            "POST",
+            [
+                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
+                (b"content-length", BODY_FORM_CONTENT_LENGTH),
+            ],
+            "post_echo_async",
+            BODY_FORM,
+            {"password": "hello123", "photo": "", "username": "Jane"},
+        ),
+        (
+            False,
+            "POST",
+            [(b"content-type", b"text/plain")],
+            "post_echo_async",
+            b"",
+            None,
+        ),
+        (
+            False,
+            "POST",
+            [(b"content-type", b"text/plain")],
+            "post_echo_async",
+            b"some raw text body",
+            "",
+        ),
+        (
+            False,
+            "POST",
+            [(b"content-type", b"application/json")],
+            "post_echo_async",
+            b'{"username":"xyz","password":"xyz"}',
+            {"username": "xyz", "password": "[Filtered]"},
+        ),
+        (
+            False,
+            "POST",
+            [(b"content-type", b"application/xml")],
+            "post_echo_async",
+            b'',
+            "",
+        ),
+        (
+            False,
+            "POST",
+            [
+                (b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"),
+                (b"content-length", BODY_FORM_CONTENT_LENGTH),
+            ],
+            "post_echo_async",
+            BODY_FORM,
+            {"password": "[Filtered]", "photo": "", "username": "Jane"},
+        ),
+    ],
+)
+@pytest.mark.asyncio
+@pytest.mark.forked
+@pytest.mark.skipif(
+    django.VERSION < (3, 1), reason="async views have been introduced in Django 3.1"
+)
+async def test_asgi_request_body(
+    sentry_init,
+    capture_envelopes,
+    application,
+    send_default_pii,
+    method,
+    headers,
+    url_name,
+    body,
+    expected_data,
+):
+    sentry_init(
+        send_default_pii=send_default_pii,
+        integrations=[
+            DjangoIntegration(),
+        ],
+    )
+
+    envelopes = capture_envelopes()
+
+    comm = HttpCommunicator(
+        application,
+        method=method,
+        headers=headers,
+        path=reverse(url_name),
+        body=body,
+    )
+    response = await comm.get_response()
+    assert response["status"] == 200
+
+    await comm.wait()
+    assert response["body"] == body
+
+    (envelope,) = envelopes
+    event = envelope.get_event()
+
+    if expected_data is not None:
+        assert event["request"]["data"] == expected_data
+    else:
+        assert "data" not in event["request"]
diff --git a/tests/integrations/django/django_helpers/__init__.py b/tests/integrations/django/django_helpers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/django/django_helpers/views.py b/tests/integrations/django/django_helpers/views.py
new file mode 100644
index 0000000000..a5759a5199
--- /dev/null
+++ b/tests/integrations/django/django_helpers/views.py
@@ -0,0 +1,9 @@
+from django.contrib.auth.models import User
+from django.http import HttpResponse
+from django.views.decorators.csrf import csrf_exempt
+
+
+@csrf_exempt
+def postgres_select_orm(request, *args, **kwargs):
+    user = User.objects.using("postgres").all().first()
+    return HttpResponse("ok {}".format(user))
diff --git a/tests/integrations/django/myapp/custom_urls.py b/tests/integrations/django/myapp/custom_urls.py
index 6dfa2ed2f1..bc703e0afe 100644
--- a/tests/integrations/django/myapp/custom_urls.py
+++ b/tests/integrations/django/myapp/custom_urls.py
@@ -13,6 +13,7 @@
     1. Import the include() function: from django.urls import include, path
     2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
 """
+
 from __future__ import absolute_import
 
 try:
diff --git a/tests/integrations/django/myapp/routing.py b/tests/integrations/django/myapp/routing.py
index b5755549ec..30cab968ad 100644
--- a/tests/integrations/django/myapp/routing.py
+++ b/tests/integrations/django/myapp/routing.py
@@ -1,11 +1,18 @@
 import channels
-
-from channels.http import AsgiHandler
 from channels.routing import ProtocolTypeRouter
 
-if channels.__version__ < "3.0.0":
-    channels_handler = AsgiHandler
-else:
-    channels_handler = AsgiHandler()
+try:
+    from channels.http import AsgiHandler
+
+    if channels.__version__ < "3.0.0":
+        django_asgi_app = AsgiHandler
+    else:
+        django_asgi_app = AsgiHandler()
+
+except ModuleNotFoundError:
+    # Since channels 4.0 ASGI handling is done by Django itself
+    from django.core.asgi import get_asgi_application
+
+    django_asgi_app = get_asgi_application()
 
-application = ProtocolTypeRouter({"http": channels_handler})
+application = ProtocolTypeRouter({"http": django_asgi_app})
diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py
index cc4d249082..ac06d9204e 100644
--- a/tests/integrations/django/myapp/settings.py
+++ b/tests/integrations/django/myapp/settings.py
@@ -10,7 +10,6 @@
 https://docs.djangoproject.com/en/2.0/ref/settings/
 """
 
-
 # We shouldn't access settings while setting up integrations. Initialize SDK
 # here to provoke any errors that might occur.
 import sentry_sdk
@@ -121,16 +120,24 @@ def middleware(request):
 try:
     import psycopg2  # noqa
 
+    db_engine = "django.db.backends.postgresql"
+    try:
+        from django.db.backends import postgresql  # noqa: F401
+    except ImportError:
+        db_engine = "django.db.backends.postgresql_psycopg2"
+
     DATABASES["postgres"] = {
-        "ENGINE": "django.db.backends.postgresql_psycopg2",
+        "ENGINE": db_engine,
         "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
         "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
         "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
-        "HOST": "localhost",
+        "HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
         "PORT": 5432,
     }
 except (ImportError, KeyError):
-    pass
+    from sentry_sdk.utils import logger
+
+    logger.warn("No psycopg2 found, testing with SQLite.")
 
 
 # Password validation
diff --git a/tests/integrations/django/myapp/signals.py b/tests/integrations/django/myapp/signals.py
new file mode 100644
index 0000000000..3dab92b8d9
--- /dev/null
+++ b/tests/integrations/django/myapp/signals.py
@@ -0,0 +1,15 @@
+from django.core import signals
+from django.dispatch import receiver
+
+myapp_custom_signal = signals.Signal()
+myapp_custom_signal_silenced = signals.Signal()
+
+
+@receiver(myapp_custom_signal)
+def signal_handler(sender, **kwargs):
+    assert sender == "hello"
+
+
+@receiver(myapp_custom_signal_silenced)
+def signal_handler_silenced(sender, **kwargs):
+    assert sender == "hello"
diff --git a/tests/integrations/django/myapp/templates/trace_meta.html b/tests/integrations/django/myapp/templates/trace_meta.html
new file mode 100644
index 0000000000..139fd16101
--- /dev/null
+++ b/tests/integrations/django/myapp/templates/trace_meta.html
@@ -0,0 +1 @@
+{{ sentry_trace_meta }}
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 8e43460bba..672a9b15ae 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -13,6 +13,7 @@
     1. Import the include() function: from django.urls import include, path
     2. Add a URL to urlpatterns:  path('blog/', include('blog.urls'))
 """
+
 from __future__ import absolute_import
 
 try:
@@ -25,9 +26,18 @@ def path(path, *args, **kwargs):
 
 
 from . import views
+from django_helpers import views as helper_views
 
 urlpatterns = [
     path("view-exc", views.view_exc, name="view_exc"),
+    path("view-exc-with-msg", views.view_exc_with_msg, name="view_exc_with_msg"),
+    path("cached-view", views.cached_view, name="cached_view"),
+    path("not-cached-view", views.not_cached_view, name="not_cached_view"),
+    path(
+        "view-with-cached-template-fragment",
+        views.view_with_cached_template_fragment,
+        name="view_with_cached_template_fragment",
+    ),
     path(
         "read-body-and-view-exc",
         views.read_body_and_view_exc,
@@ -47,7 +57,14 @@ def path(path, *args, **kwargs):
     path("template-exc", views.template_exc, name="template_exc"),
     path("template-test", views.template_test, name="template_test"),
     path("template-test2", views.template_test2, name="template_test2"),
+    path("template-test3", views.template_test3, name="template_test3"),
     path("postgres-select", views.postgres_select, name="postgres_select"),
+    path("postgres-select-slow", views.postgres_select_orm, name="postgres_select_orm"),
+    path(
+        "postgres-select-slow-from-supplement",
+        helper_views.postgres_select_orm,
+        name="postgres_select_slow_from_supplement",
+    ),
     path(
         "permission-denied-exc",
         views.permission_denied_exc,
@@ -58,6 +75,12 @@ def path(path, *args, **kwargs):
         views.csrf_hello_not_exempt,
         name="csrf_hello_not_exempt",
     ),
+    path("sync/thread_ids", views.thread_ids_sync, name="thread_ids_sync"),
+    path(
+        "send-myapp-custom-signal",
+        views.send_myapp_custom_signal,
+        name="send_myapp_custom_signal",
+    ),
 ]
 
 # async views
@@ -67,6 +90,16 @@ def path(path, *args, **kwargs):
 if views.my_async_view is not None:
     urlpatterns.append(path("my_async_view", views.my_async_view, name="my_async_view"))
 
+if views.thread_ids_async is not None:
+    urlpatterns.append(
+        path("async/thread_ids", views.thread_ids_async, name="thread_ids_async")
+    )
+
+if views.post_echo_async is not None:
+    urlpatterns.append(
+        path("post_echo_async", views.post_echo_async, name="post_echo_async")
+    )
+
 # rest framework
 try:
     urlpatterns.append(
@@ -80,6 +113,9 @@ def path(path, *args, **kwargs):
         )
     )
     urlpatterns.append(path("rest-hello", views.rest_hello, name="rest_hello"))
+    urlpatterns.append(
+        path("rest-json-response", views.rest_json_response, name="rest_json_response")
+    )
     urlpatterns.append(
         path(
             "rest-permission-denied-exc",
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index 02c67ca150..294895430b 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -1,16 +1,27 @@
+import json
+import threading
+
 from django import VERSION
 from django.contrib.auth import login
 from django.contrib.auth.models import User
 from django.core.exceptions import PermissionDenied
 from django.http import HttpResponse, HttpResponseNotFound, HttpResponseServerError
 from django.shortcuts import render
+from django.template import Context, Template
 from django.template.response import TemplateResponse
 from django.utils.decorators import method_decorator
+from django.views.decorators.cache import cache_page
 from django.views.decorators.csrf import csrf_exempt
 from django.views.generic import ListView
 
+from tests.integrations.django.myapp.signals import (
+    myapp_custom_signal,
+    myapp_custom_signal_silenced,
+)
+
 try:
     from rest_framework.decorators import api_view
+    from rest_framework.response import Response
 
     @api_view(["POST"])
     def rest_framework_exc(request):
@@ -29,11 +40,16 @@ def rest_hello(request):
     def rest_permission_denied_exc(request):
         raise PermissionDenied("bye")
 
+    @api_view(["GET"])
+    def rest_json_response(request):
+        return Response(dict(ok=True))
+
 except ImportError:
     pass
 
 
 import sentry_sdk
+from sentry_sdk import capture_message
 
 
 @csrf_exempt
@@ -41,6 +57,34 @@ def view_exc(request):
     1 / 0
 
 
+@csrf_exempt
+def view_exc_with_msg(request):
+    capture_message("oops")
+    1 / 0
+
+
+@cache_page(60)
+def cached_view(request):
+    return HttpResponse("ok")
+
+
+def not_cached_view(request):
+    return HttpResponse("ok")
+
+
+def view_with_cached_template_fragment(request):
+    template = Template(
+        """{% load cache %}
+        Not cached content goes here.
+        {% cache 500 some_identifier %}
+            And here some cached content.
+        {% endcache %}
+        """
+    )
+    rendered = template.render(Context({}))
+    return HttpResponse(rendered)
+
+
 # This is a "class based view" as previously found in the sentry codebase. The
 # interesting property of this one is that csrf_exempt, as a class attribute,
 # is not in __dict__, so regular use of functools.wraps will not forward the
@@ -136,6 +180,15 @@ def template_test2(request, *args, **kwargs):
     )
 
 
+@csrf_exempt
+def template_test3(request, *args, **kwargs):
+    from sentry_sdk import Hub
+
+    hub = Hub.current
+    capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+    return render(request, "trace_meta.html", {})
+
+
 @csrf_exempt
 def postgres_select(request, *args, **kwargs):
     from django.db import connections
@@ -145,6 +198,12 @@ def postgres_select(request, *args, **kwargs):
     return HttpResponse("ok")
 
 
+@csrf_exempt
+def postgres_select_orm(request, *args, **kwargs):
+    user = User.objects.using("postgres").all().first()
+    return HttpResponse("ok {}".format(user))
+
+
 @csrf_exempt
 def permission_denied_exc(*args, **kwargs):
     raise PermissionDenied("bye")
@@ -154,6 +213,16 @@ def csrf_hello_not_exempt(*args, **kwargs):
     return HttpResponse("ok")
 
 
+def thread_ids_sync(*args, **kwargs):
+    response = json.dumps(
+        {
+            "main": threading.main_thread().ident,
+            "active": threading.current_thread().ident,
+        }
+    )
+    return HttpResponse(response)
+
+
 if VERSION >= (3, 1):
     # Use exec to produce valid Python 2
     exec(
@@ -168,6 +237,31 @@ def csrf_hello_not_exempt(*args, **kwargs):
     await asyncio.sleep(1)
     return HttpResponse('Hello World')"""
     )
+
+    exec(
+        """async def thread_ids_async(request):
+    response = json.dumps({
+        "main": threading.main_thread().ident,
+        "active": threading.current_thread().ident,
+    })
+    return HttpResponse(response)"""
+    )
+
+    exec(
+        """async def post_echo_async(request):
+    sentry_sdk.capture_message("hi")
+    return HttpResponse(request.body)
+post_echo_async.csrf_exempt = True"""
+    )
 else:
     async_message = None
     my_async_view = None
+    thread_ids_async = None
+    post_echo_async = None
+
+
+@csrf_exempt
+def send_myapp_custom_signal(request):
+    myapp_custom_signal.send(sender="hello")
+    myapp_custom_signal_silenced.send(sender="hello")
+    return HttpResponse("ok")
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index 6106131375..1efe4be278 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -1,44 +1,38 @@
 from __future__ import absolute_import
 
-import pytest
-import pytest_django
 import json
+import os
+import random
+import re
+import pytest
+from functools import partial
 
 from werkzeug.test import Client
+
 from django import VERSION as DJANGO_VERSION
 from django.contrib.auth.models import User
 from django.core.management import execute_from_command_line
 from django.db.utils import OperationalError, ProgrammingError, DataError
 
-from sentry_sdk.integrations.executing import ExecutingIntegration
-
 try:
     from django.urls import reverse
 except ImportError:
     from django.core.urlresolvers import reverse
 
+from sentry_sdk._compat import PY2, PY310
 from sentry_sdk import capture_message, capture_exception, configure_scope
-from sentry_sdk.integrations.django import DjangoIntegration
-from functools import partial
-
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.django import DjangoIntegration, _set_db_data
+from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
+from sentry_sdk.integrations.django.caching import _get_span_description
+from sentry_sdk.integrations.executing import ExecutingIntegration
+from sentry_sdk.tracing import Span
+from tests.conftest import ApproxDict, unpack_werkzeug_response
 from tests.integrations.django.myapp.wsgi import application
+from tests.integrations.django.myapp.signals import myapp_custom_signal_silenced
+from tests.integrations.django.utils import pytest_mark_django_db_decorator
 
-# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
-# requires explicit database allow from failing the test
-pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
-try:
-    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
-    if pytest_version > (4, 2, 0):
-        pytest_mark_django_db_decorator = partial(
-            pytest.mark.django_db, databases="__all__"
-        )
-except ValueError:
-    if "dev" in pytest_django.__version__:
-        pytest_mark_django_db_decorator = partial(
-            pytest.mark.django_db, databases="__all__"
-        )
-except AttributeError:
-    pass
+DJANGO_VERSION = DJANGO_VERSION[:2]
 
 
 @pytest.fixture
@@ -46,6 +40,36 @@ def client():
     return Client(application)
 
 
+@pytest.fixture
+def use_django_caching(settings):
+    settings.CACHES = {
+        "default": {
+            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
+        }
+    }
+
+
+@pytest.fixture
+def use_django_caching_with_middlewares(settings):
+    settings.CACHES = {
+        "default": {
+            "BACKEND": "django.core.cache.backends.locmem.LocMemCache",
+            "LOCATION": "unique-snowflake-%s" % random.randint(1, 1000000),
+        }
+    }
+    if hasattr(settings, "MIDDLEWARE"):
+        middleware = settings.MIDDLEWARE
+    elif hasattr(settings, "MIDDLEWARE_CLASSES"):
+        middleware = settings.MIDDLEWARE_CLASSES
+    else:
+        middleware = None
+
+    if middleware is not None:
+        middleware.insert(0, "django.middleware.cache.UpdateCacheMiddleware")
+        middleware.append("django.middleware.cache.FetchFromCacheMiddleware")
+
+
 def test_view_exceptions(sentry_init, client, capture_exceptions, capture_events):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     exceptions = capture_exceptions()
@@ -111,8 +135,9 @@ def test_middleware_exceptions(sentry_init, client, capture_exceptions):
 def test_request_captured(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     events = capture_events()
-    content, status, headers = client.get(reverse("message"))
-    assert b"".join(content) == b"ok"
+    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
+
+    assert content == b"ok"
 
     (event,) = events
     assert event["transaction"] == "/message"
@@ -132,7 +157,9 @@ def test_transaction_with_class_view(sentry_init, client, capture_events):
         send_default_pii=True,
     )
     events = capture_events()
-    content, status, headers = client.head(reverse("classbased"))
+    content, status, headers = unpack_werkzeug_response(
+        client.head(reverse("classbased"))
+    )
     assert status.lower() == "200 ok"
 
     (event,) = events
@@ -143,18 +170,124 @@ def test_transaction_with_class_view(sentry_init, client, capture_events):
     assert event["message"] == "hi"
 
 
+def test_has_trace_if_performance_enabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    client.head(reverse("view_exc_with_msg"))
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_has_trace_if_performance_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+    )
+    events = capture_events()
+    client.head(reverse("view_exc_with_msg"))
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_trace_from_headers_if_performance_enabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    client.head(
+        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
+    )
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+def test_trace_from_headers_if_performance_disabled(
+    sentry_init, client, capture_events
+):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+    )
+
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    client.head(
+        reverse("view_exc_with_msg"), headers={"sentry-trace": sentry_trace_header}
+    )
+
+    (msg_event, error_event) = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
 @pytest.mark.forked
 @pytest.mark.django_db
 def test_user_captured(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     events = capture_events()
-    content, status, headers = client.get(reverse("mylogin"))
-    assert b"".join(content) == b"ok"
+    content, status, headers = unpack_werkzeug_response(client.get(reverse("mylogin")))
+    assert content == b"ok"
 
     assert not events
 
-    content, status, headers = client.get(reverse("message"))
-    assert b"".join(content) == b"ok"
+    content, status, headers = unpack_werkzeug_response(client.get(reverse("message")))
+    assert content == b"ok"
 
     (event,) = events
 
@@ -191,7 +324,7 @@ def test_queryset_repr(sentry_init, capture_events):
 def test_custom_error_handler_request_context(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()])
     events = capture_events()
-    content, status, headers = client.post("/404")
+    content, status, headers = unpack_werkzeug_response(client.post("/404"))
     assert status.lower() == "404 not found"
 
     (event,) = events
@@ -211,9 +344,9 @@ def test_500(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
     events = capture_events()
 
-    content, status, headers = client.get("/view-exc")
+    content, status, headers = unpack_werkzeug_response(client.get("/view-exc"))
     assert status.lower() == "500 internal server error"
-    content = b"".join(content).decode("utf-8")
+    content = content.decode("utf-8")
 
     (event,) = events
     event_id = event["event_id"]
@@ -299,6 +432,27 @@ def test_sql_dict_query_params(sentry_init, capture_events):
     assert crumb["data"]["db.params"] == {"my_foo": 10}
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_response_trace(sentry_init, client, capture_events, render_span_tree):
+    pytest.importorskip("rest_framework")
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        traces_sample_rate=1.0,
+    )
+
+    events = capture_events()
+    content, status, headers = unpack_werkzeug_response(
+        client.get(reverse("rest_json_response"))
+    )
+    assert status == "200 OK"
+
+    assert (
+        '- op="view.response.render": description="serialize response"'
+        in render_span_tree(events[0])
+    )
+
+
 @pytest.mark.parametrize(
     "query",
     [
@@ -424,17 +578,24 @@ def test_django_connect_trace(sentry_init, client, capture_events, render_span_t
 
     events = capture_events()
 
-    content, status, headers = client.get(reverse("postgres_select"))
+    content, status, headers = unpack_werkzeug_response(
+        client.get(reverse("postgres_select"))
+    )
     assert status == "200 OK"
 
-    assert '- op="db": description="connect"' in render_span_tree(events[0])
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db":
+            data = span.get("data")
+            assert data.get(SPANDATA.DB_SYSTEM) == "postgresql"
+
+    assert '- op="db": description="connect"' in render_span_tree(event)
 
 
 @pytest.mark.forked
 @pytest_mark_django_db_decorator(transaction=True)
-def test_django_connect_breadcrumbs(
-    sentry_init, client, capture_events, render_span_tree
-):
+def test_django_connect_breadcrumbs(sentry_init, capture_events):
     """
     Verify we record a breadcrumb when opening a new database.
     """
@@ -468,53 +629,127 @@ def test_django_connect_breadcrumbs(
     ]
 
 
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_db_connection_span_data(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+    )
+    from django.db import connections
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    content, status, headers = unpack_werkzeug_response(
+        client.get(reverse("postgres_select"))
+    )
+    assert status == "200 OK"
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db":
+            data = span.get("data")
+            assert data.get(SPANDATA.DB_SYSTEM) == "postgresql"
+            conn_params = connections["postgres"].get_connection_params()
+            assert data.get(SPANDATA.DB_NAME) is not None
+            assert data.get(SPANDATA.DB_NAME) == conn_params.get(
+                "database"
+            ) or conn_params.get("dbname")
+            assert data.get(SPANDATA.SERVER_ADDRESS) == os.environ.get(
+                "SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"
+            )
+            assert data.get(SPANDATA.SERVER_PORT) == "5432"
+
+
+def test_set_db_data_custom_backend():
+    class DummyBackend(object):
+        # https://github.com/mongodb/mongo-python-driver/blob/6ffae5522c960252b8c9adfe2a19b29ff28187cb/pymongo/collection.py#L126
+        def __getattr__(self, attr):
+            return self
+
+        def __call__(self):
+            raise TypeError
+
+        def get_connection_params(self):
+            return {}
+
+    try:
+        _set_db_data(Span(), DummyBackend())
+    except TypeError:
+        pytest.fail("A TypeError was raised")
+
+
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
+    "transaction_style,client_url,expected_transaction,expected_source,expected_response",
     [
-        ("function_name", "tests.integrations.django.myapp.views.message"),
-        ("url", "/message"),
+        (
+            "function_name",
+            "/message",
+            "tests.integrations.django.myapp.views.message",
+            "component",
+            b"ok",
+        ),
+        ("url", "/message", "/message", "route", b"ok"),
+        ("url", "/404", "/404", "url", b"404"),
     ],
 )
 def test_transaction_style(
-    sentry_init, client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    client,
+    capture_events,
+    transaction_style,
+    client_url,
+    expected_transaction,
+    expected_source,
+    expected_response,
 ):
     sentry_init(
         integrations=[DjangoIntegration(transaction_style=transaction_style)],
         send_default_pii=True,
     )
     events = capture_events()
-    content, status, headers = client.get(reverse("message"))
-    assert b"".join(content) == b"ok"
+    content, status, headers = unpack_werkzeug_response(client.get(client_url))
+    assert content == expected_response
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 def test_request_body(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()])
     events = capture_events()
-    content, status, headers = client.post(
-        reverse("post_echo"), data=b"heyooo", content_type="text/plain"
+    content, status, headers = unpack_werkzeug_response(
+        client.post(reverse("post_echo"), data=b"heyooo", content_type="text/plain")
     )
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b"heyooo"
+    assert content == b"heyooo"
 
     (event,) = events
 
     assert event["message"] == "hi"
     assert event["request"]["data"] == ""
     assert event["_meta"]["request"]["data"][""] == {
-        "len": 6,
-        "rem": [["!raw", "x", 0, 6]],
+        "rem": [["!raw", "x"]],
     }
 
     del events[:]
 
-    content, status, headers = client.post(
-        reverse("post_echo"), data=b'{"hey": 42}', content_type="application/json"
+    content, status, headers = unpack_werkzeug_response(
+        client.post(
+            reverse("post_echo"), data=b'{"hey": 42}', content_type="application/json"
+        )
     )
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b'{"hey": 42}'
+    assert content == b'{"hey": 42}'
 
     (event,) = events
 
@@ -528,10 +763,12 @@ def test_read_request(sentry_init, client, capture_events):
     sentry_init(integrations=[DjangoIntegration()])
     events = capture_events()
 
-    content, status, headers = client.post(
-        reverse("read_body_and_view_exc"),
-        data=b'{"hey": 42}',
-        content_type="application/json",
+    content, status, headers = unpack_werkzeug_response(
+        client.post(
+            reverse("read_body_and_view_exc"),
+            data=b'{"hey": 42}',
+            content_type="application/json",
+        )
     )
 
     assert status.lower() == "500 internal server error"
@@ -541,6 +778,29 @@ def test_read_request(sentry_init, client, capture_events):
     assert "data" not in event["request"]
 
 
+def test_template_tracing_meta(sentry_init, client, capture_events):
+    sentry_init(integrations=[DjangoIntegration()])
+    events = capture_events()
+
+    content, _, _ = unpack_werkzeug_response(client.get(reverse("template_test3")))
+    rendered_meta = content.decode("utf-8")
+
+    traceparent, baggage = events[0]["message"].split("\n")
+    assert traceparent != ""
+    assert baggage != ""
+
+    match = re.match(
+        r'^\n',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+
+
 @pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
 def test_template_exception(
     sentry_init, client, capture_events, with_executing_integration
@@ -548,7 +808,9 @@ def test_template_exception(
     sentry_init(integrations=[DjangoIntegration()] + with_executing_integration)
     events = capture_events()
 
-    content, status, headers = client.get(reverse("template_exc"))
+    content, status, headers = unpack_werkzeug_response(
+        client.get(reverse("template_exc"))
+    )
     assert status.lower() == "500 internal server error"
 
     (event,) = events
@@ -567,7 +829,6 @@ def test_template_exception(
 
     assert template_frame["post_context"] == ["11\n", "12\n", "13\n", "14\n", "15\n"]
     assert template_frame["lineno"] == 10
-    assert template_frame["in_app"]
     assert template_frame["filename"].endswith("error.html")
 
     filenames = [
@@ -615,7 +876,7 @@ def test_rest_framework_basic(
     elif ct == "application/x-www-form-urlencoded":
         client.post(reverse(route), data=body)
     else:
-        assert False
+        raise AssertionError("unreachable")
 
     (error,) = exceptions
     assert isinstance(error, ZeroDivisionError)
@@ -637,7 +898,7 @@ def test_does_not_capture_403(sentry_init, client, capture_events, endpoint):
     sentry_init(integrations=[DjangoIntegration()])
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse(endpoint))
+    _, status, _ = unpack_werkzeug_response(client.get(reverse(endpoint)))
     assert status.lower() == "403 forbidden"
 
     assert not events
@@ -651,85 +912,171 @@ def test_render_spans(sentry_init, client, capture_events, render_span_tree):
     views_tests = [
         (
             reverse("template_test2"),
-            '- op="django.template.render": description="[user_name.html, ...]"',
+            '- op="template.render": description="[user_name.html, ...]"',
         ),
     ]
     if DJANGO_VERSION >= (1, 7):
         views_tests.append(
             (
                 reverse("template_test"),
-                '- op="django.template.render": description="user_name.html"',
+                '- op="template.render": description="user_name.html"',
             ),
         )
 
     for url, expected_line in views_tests:
         events = capture_events()
-        _content, status, _headers = client.get(url)
+        client.get(url)
         transaction = events[0]
         assert expected_line in render_span_tree(transaction)
 
 
+if DJANGO_VERSION >= (1, 10):
+    EXPECTED_MIDDLEWARE_SPANS = """\
+- op="http.server": description=null
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
+    - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
+      - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
+        - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
+          - op="middleware.django": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
+            - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+            - op="view.render": description="message"\
+"""
+else:
+    EXPECTED_MIDDLEWARE_SPANS = """\
+- op="http.server": description=null
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
+  - op="middleware.django": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
+  - op="view.render": description="message"
+  - op="middleware.django": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
+  - op="middleware.django": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
+  - op="middleware.django": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+"""
+
+
 def test_middleware_spans(sentry_init, client, capture_events, render_span_tree):
     sentry_init(
-        integrations=[DjangoIntegration()],
+        integrations=[
+            DjangoIntegration(signals_spans=False),
+        ],
         traces_sample_rate=1.0,
-        _experiments={"record_sql_params": True},
     )
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse("message"))
+    client.get(reverse("message"))
 
     message, transaction = events
 
     assert message["message"] == "hi"
+    assert render_span_tree(transaction) == EXPECTED_MIDDLEWARE_SPANS
 
-    if DJANGO_VERSION >= (1, 10):
-        assert (
-            render_span_tree(transaction)
-            == """\
-- op="http.server": description=null
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.__call__"
-    - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.__call__"
-      - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.__call__"
-        - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.__call__"
-          - op="django.middleware": description="tests.integrations.django.myapp.settings.TestFunctionMiddleware.__call__"
-            - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-            - op="django.view": description="message"\
-"""
-        )
 
-    else:
-        assert (
-            render_span_tree(transaction)
-            == """\
+def test_middleware_spans_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(middleware_spans=False, signals_spans=False),
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("message"))
+
+    message, transaction = events
+
+    assert message["message"] == "hi"
+    assert not len(transaction["spans"])
+
+
+EXPECTED_SIGNALS_SPANS = """\
 - op="http.server": description=null
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_request"
-  - op="django.middleware": description="django.contrib.auth.middleware.AuthenticationMiddleware.process_request"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_request"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_view"
-  - op="django.view": description="message"
-  - op="django.middleware": description="tests.integrations.django.myapp.settings.TestMiddleware.process_response"
-  - op="django.middleware": description="django.middleware.csrf.CsrfViewMiddleware.process_response"
-  - op="django.middleware": description="django.contrib.sessions.middleware.SessionMiddleware.process_response"\
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"\
 """
-        )
 
 
-def test_middleware_spans_disabled(sentry_init, client, capture_events):
+def test_signals_spans(sentry_init, client, capture_events, render_span_tree):
     sentry_init(
-        integrations=[DjangoIntegration(middleware_spans=False)], traces_sample_rate=1.0
+        integrations=[
+            DjangoIntegration(middleware_spans=False),
+        ],
+        traces_sample_rate=1.0,
     )
     events = capture_events()
 
-    _content, status, _headers = client.get(reverse("message"))
+    client.get(reverse("message"))
 
     message, transaction = events
 
     assert message["message"] == "hi"
+    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS
+
+    assert transaction["spans"][0]["op"] == "event.django"
+    assert transaction["spans"][0]["description"] == "django.db.reset_queries"
+
+    assert transaction["spans"][1]["op"] == "event.django"
+    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
 
+
+def test_signals_spans_disabled(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(middleware_spans=False, signals_spans=False),
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("message"))
+
+    message, transaction = events
+
+    assert message["message"] == "hi"
     assert not transaction["spans"]
 
 
+EXPECTED_SIGNALS_SPANS_FILTERED = """\
+- op="http.server": description=null
+  - op="event.django": description="django.db.reset_queries"
+  - op="event.django": description="django.db.close_old_connections"
+  - op="event.django": description="tests.integrations.django.myapp.signals.signal_handler"\
+"""
+
+
+def test_signals_spans_filtering(sentry_init, client, capture_events, render_span_tree):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                middleware_spans=False,
+                signals_denylist=[
+                    myapp_custom_signal_silenced,
+                ],
+            ),
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("send_myapp_custom_signal"))
+
+    (transaction,) = events
+
+    assert render_span_tree(transaction) == EXPECTED_SIGNALS_SPANS_FILTERED
+
+    assert transaction["spans"][0]["op"] == "event.django"
+    assert transaction["spans"][0]["description"] == "django.db.reset_queries"
+
+    assert transaction["spans"][1]["op"] == "event.django"
+    assert transaction["spans"][1]["description"] == "django.db.close_old_connections"
+
+    assert transaction["spans"][2]["op"] == "event.django"
+    assert (
+        transaction["spans"][2]["description"]
+        == "tests.integrations.django.myapp.signals.signal_handler"
+    )
+
+
 def test_csrf(sentry_init, client):
     """
     Assert that CSRF view decorator works even with the view wrapped in our own
@@ -738,23 +1085,33 @@ def test_csrf(sentry_init, client):
 
     sentry_init(integrations=[DjangoIntegration()])
 
-    content, status, _headers = client.post(reverse("csrf_hello_not_exempt"))
+    content, status, _headers = unpack_werkzeug_response(
+        client.post(reverse("csrf_hello_not_exempt"))
+    )
     assert status.lower() == "403 forbidden"
 
-    content, status, _headers = client.post(reverse("sentryclass_csrf"))
+    content, status, _headers = unpack_werkzeug_response(
+        client.post(reverse("sentryclass_csrf"))
+    )
     assert status.lower() == "403 forbidden"
 
-    content, status, _headers = client.post(reverse("sentryclass"))
+    content, status, _headers = unpack_werkzeug_response(
+        client.post(reverse("sentryclass"))
+    )
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b"ok"
+    assert content == b"ok"
 
-    content, status, _headers = client.post(reverse("classbased"))
+    content, status, _headers = unpack_werkzeug_response(
+        client.post(reverse("classbased"))
+    )
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b"ok"
+    assert content == b"ok"
 
-    content, status, _headers = client.post(reverse("message"))
+    content, status, _headers = unpack_werkzeug_response(
+        client.post(reverse("message"))
+    )
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b"ok"
+    assert content == b"ok"
 
 
 @pytest.mark.skipif(DJANGO_VERSION < (2, 0), reason="Requires Django > 2.0")
@@ -773,15 +1130,15 @@ def test_custom_urlconf_middleware(
     sentry_init(integrations=[DjangoIntegration()], traces_sample_rate=1.0)
     events = capture_events()
 
-    content, status, _headers = client.get("/custom/ok")
+    content, status, _headers = unpack_werkzeug_response(client.get("/custom/ok"))
     assert status.lower() == "200 ok"
-    assert b"".join(content) == b"custom ok"
+    assert content == b"custom ok"
 
     event = events.pop(0)
     assert event["transaction"] == "/custom/ok"
     assert "custom_urlconf_middleware" in render_span_tree(event)
 
-    _content, status, _headers = client.get("/custom/exc")
+    _content, status, _headers = unpack_werkzeug_response(client.get("/custom/exc"))
     assert status.lower() == "500 internal server error"
 
     error_event, transaction_event = events
@@ -791,3 +1148,262 @@ def test_custom_urlconf_middleware(
     assert "custom_urlconf_middleware" in render_span_tree(transaction_event)
 
     settings.MIDDLEWARE.pop(0)
+
+
+def test_get_receiver_name():
+    def dummy(a, b):
+        return a + b
+
+    name = _get_receiver_name(dummy)
+
+    if PY2:
+        assert name == "tests.integrations.django.test_basic.dummy"
+    else:
+        assert (
+            name
+            == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
+        )
+
+    a_partial = partial(dummy)
+    name = _get_receiver_name(a_partial)
+    if PY310:
+        assert name == "functools.partial()"
+    else:
+        assert name == "partial()"
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_middleware(
+    sentry_init, client, capture_events, use_django_caching_with_middlewares
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("not_cached_view"))
+    client.get(reverse("not_cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_decorator(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("cached_view"))
+    client.get(reverse("cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_disabled_templatetag(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=False,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("view_with_cached_template_fragment"))
+    client.get(reverse("view_with_cached_template_fragment"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 0
+    assert len(second_event["spans"]) == 0
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_middleware(
+    sentry_init, client, capture_events, use_django_caching_with_middlewares
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+
+    client.application.load_middleware()
+    events = capture_events()
+
+    client.get(reverse("not_cached_view"))
+    client.get(reverse("not_cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False})
+
+    assert len(second_event["spans"]) == 2
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert second_event["spans"][0]["data"] == ApproxDict({"cache.hit": False})
+
+    assert second_event["spans"][1]["op"] == "cache.get_item"
+    assert second_event["spans"][1]["description"].startswith(
+        "get views.decorators.cache.cache_page."
+    )
+    assert second_event["spans"][1]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][1]["data"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_decorator(sentry_init, client, capture_events, use_django_caching):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("cached_view"))
+    client.get(reverse("cached_view"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False})
+
+    assert len(second_event["spans"]) == 2
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get views.decorators.cache.cache_header."
+    )
+    assert second_event["spans"][0]["data"] == ApproxDict({"cache.hit": False})
+
+    assert second_event["spans"][1]["op"] == "cache.get_item"
+    assert second_event["spans"][1]["description"].startswith(
+        "get views.decorators.cache.cache_page."
+    )
+    assert second_event["spans"][1]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][1]["data"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+@pytest.mark.skipif(DJANGO_VERSION < (1, 9), reason="Requires Django >= 1.9")
+def test_cache_spans_templatetag(
+    sentry_init, client, capture_events, use_django_caching
+):
+    sentry_init(
+        integrations=[
+            DjangoIntegration(
+                cache_spans=True,
+                middleware_spans=False,
+                signals_spans=False,
+            )
+        ],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client.get(reverse("view_with_cached_template_fragment"))
+    client.get(reverse("view_with_cached_template_fragment"))
+
+    (first_event, second_event) = events
+    assert len(first_event["spans"]) == 1
+    assert first_event["spans"][0]["op"] == "cache.get_item"
+    assert first_event["spans"][0]["description"].startswith(
+        "get template.cache.some_identifier."
+    )
+    assert first_event["spans"][0]["data"] == ApproxDict({"cache.hit": False})
+
+    assert len(second_event["spans"]) == 1
+    assert second_event["spans"][0]["op"] == "cache.get_item"
+    assert second_event["spans"][0]["description"].startswith(
+        "get template.cache.some_identifier."
+    )
+    assert second_event["spans"][0]["data"]["cache.hit"]
+    assert "cache.item_size" in second_event["spans"][0]["data"]
+
+
+@pytest.mark.parametrize(
+    "method_name, args, kwargs, expected_description",
+    [
+        ("get", None, None, "get "),
+        ("get", [], {}, "get "),
+        ("get", ["bla", "blub", "foo"], {}, "get bla"),
+        (
+            "get_many",
+            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
+            {},
+            "get_many ['bla 1', 'bla 2', 'bla 3']",
+        ),
+        (
+            "get_many",
+            [["bla 1", "bla 2", "bla 3"], "blub", "foo"],
+            {"key": "bar"},
+            "get_many ['bla 1', 'bla 2', 'bla 3']",
+        ),
+        ("get", [], {"key": "bar"}, "get bar"),
+        (
+            "get",
+            "something",
+            {},
+            "get s",
+        ),  # this should never happen, just making sure that we are not raising an exception in that case.
+    ],
+)
+def test_cache_spans_get_span_description(
+    method_name, args, kwargs, expected_description
+):
+    assert _get_span_description(method_name, args, kwargs) == expected_description
diff --git a/tests/integrations/django/test_data_scrubbing.py b/tests/integrations/django/test_data_scrubbing.py
new file mode 100644
index 0000000000..128da9b97e
--- /dev/null
+++ b/tests/integrations/django/test_data_scrubbing.py
@@ -0,0 +1,84 @@
+import pytest
+
+from werkzeug.test import Client
+
+from sentry_sdk.integrations.django import DjangoIntegration
+from tests.conftest import werkzeug_set_cookie
+from tests.integrations.django.myapp.wsgi import application
+from tests.integrations.django.utils import pytest_mark_django_db_decorator
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_removed(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=False)
+    events = capture_events()
+    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
+    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
+    werkzeug_set_cookie(client, "localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert "cookies" not in event["request"]
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+):
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    werkzeug_set_cookie(client, "localhost", "sessionid", "123")
+    werkzeug_set_cookie(client, "localhost", "csrftoken", "456")
+    werkzeug_set_cookie(client, "localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "sessionid": "[Filtered]",
+        "csrftoken": "[Filtered]",
+        "foo": "bar",
+    }
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator()
+def test_scrub_django_custom_session_cookies_filtered(
+    sentry_init,
+    client,
+    capture_events,
+    settings,
+):
+    settings.SESSION_COOKIE_NAME = "my_sess"
+    settings.CSRF_COOKIE_NAME = "csrf_secret"
+
+    sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
+    events = capture_events()
+    werkzeug_set_cookie(client, "localhost", "my_sess", "123")
+    werkzeug_set_cookie(client, "localhost", "csrf_secret", "456")
+    werkzeug_set_cookie(client, "localhost", "foo", "bar")
+    client.get(reverse("view_exc"))
+
+    (event,) = events
+    assert event["request"]["cookies"] == {
+        "my_sess": "[Filtered]",
+        "csrf_secret": "[Filtered]",
+        "foo": "bar",
+    }
diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py
new file mode 100644
index 0000000000..92b1415f78
--- /dev/null
+++ b/tests/integrations/django/test_db_query_data.py
@@ -0,0 +1,465 @@
+from __future__ import absolute_import
+
+import os
+import pytest
+from datetime import datetime
+
+from sentry_sdk._compat import PY2
+from django import VERSION as DJANGO_VERSION
+from django.db import connections
+
+try:
+    from django.urls import reverse
+except ImportError:
+    from django.core.urlresolvers import reverse
+
+from werkzeug.test import Client
+
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.django import DjangoIntegration
+from sentry_sdk.tracing_utils import record_sql_queries
+
+from tests.conftest import unpack_werkzeug_response
+from tests.integrations.django.utils import pytest_mark_django_db_decorator
+from tests.integrations.django.myapp.wsgi import application
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
+
+
+@pytest.fixture
+def client():
+    return Client(application)
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source_disabled(sentry_init, client, capture_events):
+    sentry_options = {
+        "integrations": [DjangoIntegration()],
+        "send_default_pii": True,
+        "traces_sample_rate": 1.0,
+        "enable_db_query_source": False,
+        "db_query_source_threshold_ms": 0,
+    }
+
+    sentry_init(**sentry_options)
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO not in data
+            assert SPANDATA.CODE_NAMESPACE not in data
+            assert SPANDATA.CODE_FILEPATH not in data
+            assert SPANDATA.CODE_FUNCTION not in data
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+@pytest.mark.parametrize("enable_db_query_source", [None, True])
+def test_query_source_enabled(
+    sentry_init, client, capture_events, enable_db_query_source
+):
+    sentry_options = {
+        "integrations": [DjangoIntegration()],
+        "send_default_pii": True,
+        "traces_sample_rate": 1.0,
+        "db_query_source_threshold_ms": 0,
+    }
+
+    if enable_db_query_source is not None:
+        sentry_options["enable_db_query_source"] = enable_db_query_source
+
+    sentry_init(**sentry_options)
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+
+            assert (
+                data.get(SPANDATA.CODE_NAMESPACE)
+                == "tests.integrations.django.myapp.views"
+            )
+            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                "tests/integrations/django/myapp/views.py"
+            )
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source_with_module_in_search_path(sentry_init, client, capture_events):
+    """
+    Test that query source is relative to the path of the module it ran in
+    """
+    client = Client(application)
+
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(
+        client.get(reverse("postgres_select_slow_from_supplement"))
+    )
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+
+            if not PY2:
+                assert data.get(SPANDATA.CODE_NAMESPACE) == "django_helpers.views"
+                assert data.get(SPANDATA.CODE_FILEPATH) == "django_helpers/views.py"
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source_with_in_app_exclude(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+        in_app_exclude=["tests.integrations.django.myapp.views"],
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+
+            if DJANGO_VERSION >= (1, 11):
+                assert (
+                    data.get(SPANDATA.CODE_NAMESPACE)
+                    == "tests.integrations.django.myapp.settings"
+                )
+                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                    "tests/integrations/django/myapp/settings.py"
+                )
+                assert data.get(SPANDATA.CODE_FUNCTION) == "middleware"
+            else:
+                assert (
+                    data.get(SPANDATA.CODE_NAMESPACE)
+                    == "tests.integrations.django.test_db_query_data"
+                )
+                assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                    "tests/integrations/django/test_db_query_data.py"
+                )
+                assert (
+                    data.get(SPANDATA.CODE_FUNCTION)
+                    == "test_query_source_with_in_app_exclude"
+                )
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source_with_in_app_include(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+        in_app_include=["django"],
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    _, status, _ = unpack_werkzeug_response(client.get(reverse("postgres_select_orm")))
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+
+            assert data.get(SPANDATA.CODE_NAMESPACE) == "django.db.models.sql.compiler"
+            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                "django/db/models/sql/compiler.py"
+            )
+            assert data.get(SPANDATA.CODE_FUNCTION) == "execute_sql"
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_no_query_source_if_duration_too_short(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    class fake_record_sql_queries:  # noqa: N801
+        def __init__(self, *args, **kwargs):
+            with record_sql_queries(*args, **kwargs) as span:
+                self.span = span
+
+            self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
+            self.span.timestamp = datetime(2024, 1, 1, microsecond=99999)
+
+        def __enter__(self):
+            return self.span
+
+        def __exit__(self, type, value, traceback):
+            pass
+
+    with mock.patch(
+        "sentry_sdk.integrations.django.record_sql_queries",
+        fake_record_sql_queries,
+    ):
+        _, status, _ = unpack_werkzeug_response(
+            client.get(reverse("postgres_select_orm"))
+        )
+
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO not in data
+            assert SPANDATA.CODE_NAMESPACE not in data
+            assert SPANDATA.CODE_FILEPATH not in data
+            assert SPANDATA.CODE_FUNCTION not in data
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.forked
+@pytest_mark_django_db_decorator(transaction=True)
+def test_query_source_if_duration_over_threshold(sentry_init, client, capture_events):
+    sentry_init(
+        integrations=[DjangoIntegration()],
+        send_default_pii=True,
+        traces_sample_rate=1.0,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+
+    if "postgres" not in connections:
+        pytest.skip("postgres tests disabled")
+
+    # trigger Django to open a new connection by marking the existing one as None.
+    connections["postgres"].connection = None
+
+    events = capture_events()
+
+    class fake_record_sql_queries:  # noqa: N801
+        def __init__(self, *args, **kwargs):
+            with record_sql_queries(*args, **kwargs) as span:
+                self.span = span
+
+            self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
+            self.span.timestamp = datetime(2024, 1, 1, microsecond=101000)
+
+        def __enter__(self):
+            return self.span
+
+        def __exit__(self, type, value, traceback):
+            pass
+
+    with mock.patch(
+        "sentry_sdk.integrations.django.record_sql_queries",
+        fake_record_sql_queries,
+    ):
+        _, status, _ = unpack_werkzeug_response(
+            client.get(reverse("postgres_select_orm"))
+        )
+
+    assert status == "200 OK"
+
+    (event,) = events
+    for span in event["spans"]:
+        if span.get("op") == "db" and "auth_user" in span.get("description"):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+
+            assert (
+                data.get(SPANDATA.CODE_NAMESPACE)
+                == "tests.integrations.django.myapp.views"
+            )
+            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                "tests/integrations/django/myapp/views.py"
+            )
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert data.get(SPANDATA.CODE_FUNCTION) == "postgres_select_orm"
+            break
+    else:
+        raise AssertionError("No db span found")
diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py
index a87dc621a9..c9914c8ec5 100644
--- a/tests/integrations/django/test_transactions.py
+++ b/tests/integrations/django/test_transactions.py
@@ -3,43 +3,55 @@
 import pytest
 import django
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+# django<2.0 has only `url` with regex based patterns.
+# django>=2.0 renames `url` to `re_path`, and additionally introduces `path`
+# for new style URL patterns, e.g. .
 if django.VERSION >= (2, 0):
-    # TODO: once we stop supporting django < 2, use the real name of this
-    # function (re_path)
-    from django.urls import re_path as url
+    from django.urls import path, re_path
+    from django.urls.converters import PathConverter
     from django.conf.urls import include
 else:
-    from django.conf.urls import url, include
+    from django.conf.urls import url as re_path, include
 
 if django.VERSION < (1, 9):
-    included_url_conf = (url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
+    included_url_conf = (re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "", ""
 else:
-    included_url_conf = ((url(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
+    included_url_conf = ((re_path(r"^foo/bar/(?P[\w]+)", lambda x: ""),), "")
 
 from sentry_sdk.integrations.django.transactions import RavenResolver
 
 
 example_url_conf = (
-    url(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
-    url(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
-    url(r"^report/", lambda x: ""),
-    url(r"^example/", include(included_url_conf)),
+    re_path(r"^api/(?P[\w_-]+)/store/$", lambda x: ""),
+    re_path(r"^api/(?P(v1|v2))/author/$", lambda x: ""),
+    re_path(
+        r"^api/(?P[^\/]+)/product/(?P(?:\d+|[A-Fa-f0-9-]{32,36}))/$",
+        lambda x: "",
+    ),
+    re_path(r"^report/", lambda x: ""),
+    re_path(r"^example/", include(included_url_conf)),
 )
 
 
-def test_legacy_resolver_no_match():
+def test_resolver_no_match():
     resolver = RavenResolver()
     result = resolver.resolve("/foo/bar", example_url_conf)
-    assert result == "/foo/bar"
+    assert result is None
 
 
-def test_legacy_resolver_complex_match():
+def test_resolver_re_path_complex_match():
     resolver = RavenResolver()
     result = resolver.resolve("/api/1234/store/", example_url_conf)
     assert result == "/api/{project_id}/store/"
 
 
-def test_legacy_resolver_complex_either_match():
+def test_resolver_re_path_complex_either_match():
     resolver = RavenResolver()
     result = resolver.resolve("/api/v1/author/", example_url_conf)
     assert result == "/api/{version}/author/"
@@ -47,17 +59,65 @@ def test_legacy_resolver_complex_either_match():
     assert result == "/api/{version}/author/"
 
 
-def test_legacy_resolver_included_match():
+def test_resolver_re_path_included_match():
     resolver = RavenResolver()
     result = resolver.resolve("/example/foo/bar/baz", example_url_conf)
     assert result == "/example/foo/bar/{param}"
 
 
-@pytest.mark.skipif(django.VERSION < (2, 0), reason="Requires Django > 2.0")
-def test_legacy_resolver_newstyle_django20_urlconf():
-    from django.urls import path
+def test_resolver_re_path_multiple_groups():
+    resolver = RavenResolver()
+    result = resolver.resolve(
+        "/api/myproject/product/cb4ef1caf3554c34ae134f3c1b3d605f/", example_url_conf
+    )
+    assert result == "/api/{project_id}/product/{pid}/"
+
 
+@pytest.mark.skipif(
+    django.VERSION < (2, 0),
+    reason="Django>=2.0 required for  patterns",
+)
+def test_resolver_path_group():
     url_conf = (path("api/v2//store/", lambda x: ""),)
     resolver = RavenResolver()
     result = resolver.resolve("/api/v2/1234/store/", url_conf)
     assert result == "/api/v2/{project_id}/store/"
+
+
+@pytest.mark.skipif(
+    django.VERSION < (2, 0),
+    reason="Django>=2.0 required for  patterns",
+)
+def test_resolver_path_multiple_groups():
+    url_conf = (path("api/v2//product/", lambda x: ""),)
+    resolver = RavenResolver()
+    result = resolver.resolve("/api/v2/myproject/product/5689", url_conf)
+    assert result == "/api/v2/{project_id}/product/{pid}"
+
+
+@pytest.mark.skipif(
+    django.VERSION < (2, 0),
+    reason="Django>=2.0 required for  patterns",
+)
+def test_resolver_path_complex_path():
+    class CustomPathConverter(PathConverter):
+        regex = r"[^/]+(/[^/]+){0,2}"
+
+    with mock.patch(
+        "django.urls.resolvers.get_converter", return_value=CustomPathConverter
+    ):
+        url_conf = (path("api/v3/", lambda x: ""),)
+        resolver = RavenResolver()
+        result = resolver.resolve("/api/v3/abc/def/ghi", url_conf)
+        assert result == "/api/v3/{my_path}"
+
+
+@pytest.mark.skipif(
+    django.VERSION < (2, 0),
+    reason="Django>=2.0 required for  patterns",
+)
+def test_resolver_path_no_converter():
+    url_conf = (path("api/v4/", lambda x: ""),)
+    resolver = RavenResolver()
+    result = resolver.resolve("/api/v4/myproject", url_conf)
+    assert result == "/api/v4/{project_id}"
diff --git a/tests/integrations/django/utils.py b/tests/integrations/django/utils.py
new file mode 100644
index 0000000000..8f68c8fa14
--- /dev/null
+++ b/tests/integrations/django/utils.py
@@ -0,0 +1,22 @@
+from functools import partial
+
+import pytest
+import pytest_django
+
+
+# Hack to prevent from experimental feature introduced in version `4.3.0` in `pytest-django` that
+# requires explicit database allow from failing the test
+pytest_mark_django_db_decorator = partial(pytest.mark.django_db)
+try:
+    pytest_version = tuple(map(int, pytest_django.__version__.split(".")))
+    if pytest_version > (4, 2, 0):
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except ValueError:
+    if "dev" in pytest_django.__version__:
+        pytest_mark_django_db_decorator = partial(
+            pytest.mark.django_db, databases="__all__"
+        )
+except AttributeError:
+    pass
diff --git a/tests/integrations/falcon/__init__.py b/tests/integrations/falcon/__init__.py
new file mode 100644
index 0000000000..2319937c18
--- /dev/null
+++ b/tests/integrations/falcon/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("falcon")
diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py
index 84e8d228f0..65140a9fd7 100644
--- a/tests/integrations/falcon/test_falcon.py
+++ b/tests/integrations/falcon/test_falcon.py
@@ -4,13 +4,23 @@
 
 import pytest
 
-pytest.importorskip("falcon")
-
 import falcon
 import falcon.testing
 import sentry_sdk
 from sentry_sdk.integrations.falcon import FalconIntegration
 from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.utils import parse_version
+
+
+try:
+    import falcon.asgi
+except ImportError:
+    pass
+else:
+    import falcon.inspect  # We only need this module for the ASGI test
+
+
+FALCON_VERSION = parse_version(falcon.__version__)
 
 
 @pytest.fixture
@@ -21,8 +31,27 @@ def on_get(self, req, resp):
                 sentry_sdk.capture_message("hi")
                 resp.media = "hi"
 
+        class MessageByIdResource:
+            def on_get(self, req, resp, message_id):
+                sentry_sdk.capture_message("hi")
+                resp.media = "hi"
+
+        class CustomError(Exception):
+            pass
+
+        class CustomErrorResource:
+            def on_get(self, req, resp):
+                raise CustomError()
+
+        def custom_error_handler(*args, **kwargs):
+            raise falcon.HTTPError(status=falcon.HTTP_400)
+
         app = falcon.API()
         app.add_route("/message", MessageResource())
+        app.add_route("/message/{message_id:int}", MessageByIdResource())
+        app.add_route("/custom-error", CustomErrorResource())
+
+        app.add_error_handler(CustomError, custom_error_handler)
 
         return app
 
@@ -53,22 +82,34 @@ def test_has_context(sentry_init, capture_events, make_client):
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
-    [("uri_template", "/message"), ("path", "/message")],
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "uri_template", "/message", "route"),
+        ("/message", "path", "/message", "url"),
+        ("/message/123456", "uri_template", "/message/{message_id:int}", "route"),
+        ("/message/123456", "path", "/message/123456", "url"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, make_client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    make_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     integration = FalconIntegration(transaction_style=transaction_style)
     sentry_init(integrations=[integration])
     events = capture_events()
 
     client = make_client()
-    response = client.simulate_get("/message")
+    response = client.simulate_get(url)
     assert response.status == falcon.HTTP_200
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
@@ -189,9 +230,9 @@ def on_post(self, req, resp):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -373,3 +414,44 @@ def generator():
 
     with sentry_sdk.configure_scope() as scope:
         assert not scope._tags["request_data"]
+
+
+@pytest.mark.skipif(
+    not hasattr(falcon, "asgi"), reason="This Falcon version lacks ASGI support."
+)
+def test_falcon_not_breaking_asgi(sentry_init):
+    """
+    This test simply verifies that the Falcon integration does not break ASGI
+    Falcon apps.
+
+    The test does not verify ASGI Falcon support, since our Falcon integration
+    currently lacks support for ASGI Falcon apps.
+    """
+    sentry_init(integrations=[FalconIntegration()])
+
+    asgi_app = falcon.asgi.App()
+
+    try:
+        falcon.inspect.inspect_app(asgi_app)
+    except TypeError:
+        pytest.fail("Falcon integration causing errors in ASGI apps.")
+
+
+@pytest.mark.skipif(
+    (FALCON_VERSION or ()) < (3,),
+    reason="The Sentry Falcon integration only supports custom error handlers on Falcon 3+",
+)
+def test_falcon_custom_error_handler(sentry_init, make_app, capture_events):
+    """
+    When a custom error handler handles what otherwise would have resulted in a 5xx error,
+    changing the HTTP status to a non-5xx status, no error event should be sent to Sentry.
+    """
+    sentry_init(integrations=[FalconIntegration()])
+    events = capture_events()
+
+    app = make_app()
+    client = falcon.testing.TestClient(app)
+
+    client.simulate_get("/custom-error")
+
+    assert len(events) == 0
diff --git a/tests/integrations/fastapi/__init__.py b/tests/integrations/fastapi/__init__.py
new file mode 100644
index 0000000000..7f667e6f75
--- /dev/null
+++ b/tests/integrations/fastapi/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("fastapi")
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
new file mode 100644
index 0000000000..56d52be474
--- /dev/null
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -0,0 +1,513 @@
+import json
+import logging
+import threading
+
+import pytest
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+
+from fastapi import FastAPI, Request
+from fastapi.testclient import TestClient
+from fastapi.middleware.trustedhost import TrustedHostMiddleware
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.starlette import StarletteIntegration
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def fastapi_app_factory():
+    app = FastAPI()
+
+    @app.get("/error")
+    async def _error():
+        capture_message("Hi")
+        1 / 0
+        return {"message": "Hi"}
+
+    @app.get("/message")
+    async def _message():
+        capture_message("Hi")
+        return {"message": "Hi"}
+
+    @app.get("/message/{message_id}")
+    async def _message_with_id(message_id):
+        capture_message("Hi")
+        return {"message": "Hi"}
+
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    return app
+
+
+@pytest.mark.asyncio
+async def test_response(sentry_init, capture_events):
+    # FastAPI is heavily based on Starlette so we also need
+    # to enable StarletteIntegration.
+    # In the future this will be auto enabled.
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        debug=True,
+    )
+
+    app = fastapi_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(app)
+    response = client.get("/message")
+
+    assert response.json() == {"message": "Hi"}
+
+    assert len(events) == 2
+
+    (message_event, transaction_event) = events
+    assert message_event["message"] == "Hi"
+    assert transaction_event["transaction"] == "/message"
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "/message",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
+            "component",
+        ),
+    ],
+)
+def test_transaction_style(
+    sentry_init,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[
+            StarletteIntegration(transaction_style=transaction_style),
+            FastApiIntegration(transaction_style=transaction_style),
+        ],
+    )
+    app = fastapi_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(app)
+    client.get(url)
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+    # Assert that state is not leaked
+    events.clear()
+    capture_message("foo")
+    (event,) = events
+
+    assert "request" not in event
+    assert "transaction" not in event
+
+
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integrations
+    # and forgets to remove SentryAsgiMiddleware
+    sentry_init()
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    events = capture_events()
+
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = fastapi_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+@pytest.mark.asyncio
+async def test_original_request_not_scrubbed(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = FastAPI()
+
+    @app.post("/error")
+    async def _error(request: Request):
+        logging.critical("Oh no!")
+        assert request.headers["Authorization"] == "Bearer ohno"
+        assert await request.json() == {"password": "secret"}
+
+        return {"error": "Oh no!"}
+
+    events = capture_events()
+
+    client = TestClient(app)
+    client.post(
+        "/error", json={"password": "secret"}, headers={"Authorization": "Bearer ohno"}
+    )
+
+    event = events[0]
+    assert event["request"]["data"] == {"password": "[Filtered]"}
+    assert event["request"]["headers"]["authorization"] == "[Filtered]"
+
+
+@pytest.mark.asyncio
+def test_response_status_code_ok_in_transaction_context(sentry_init, capture_envelopes):
+    """
+    Tests that the response status code is added to the transaction "response" context.
+    """
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        release="demo-release",
+    )
+
+    envelopes = capture_envelopes()
+
+    app = fastapi_app_factory()
+
+    client = TestClient(app)
+    client.get("/message")
+
+    (_, transaction_envelope) = envelopes
+    transaction = transaction_envelope.get_transaction_event()
+
+    assert transaction["type"] == "transaction"
+    assert len(transaction["contexts"]) > 0
+    assert (
+        "response" in transaction["contexts"].keys()
+    ), "Response context not found in transaction"
+    assert transaction["contexts"]["response"]["status_code"] == 200
+
+
+@pytest.mark.asyncio
+def test_response_status_code_error_in_transaction_context(
+    sentry_init,
+    capture_envelopes,
+):
+    """
+    Tests that the response status code is added to the transaction "response" context.
+    """
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        release="demo-release",
+    )
+
+    envelopes = capture_envelopes()
+
+    app = fastapi_app_factory()
+
+    client = TestClient(app)
+    with pytest.raises(ZeroDivisionError):
+        client.get("/error")
+
+    (
+        _,
+        _,
+        transaction_envelope,
+    ) = envelopes
+    transaction = transaction_envelope.get_transaction_event()
+
+    assert transaction["type"] == "transaction"
+    assert len(transaction["contexts"]) > 0
+    assert (
+        "response" in transaction["contexts"].keys()
+    ), "Response context not found in transaction"
+    assert transaction["contexts"]["response"]["status_code"] == 500
+
+
+@pytest.mark.asyncio
+def test_response_status_code_not_found_in_transaction_context(
+    sentry_init,
+    capture_envelopes,
+):
+    """
+    Tests that the response status code is added to the transaction "response" context.
+    """
+    sentry_init(
+        integrations=[StarletteIntegration(), FastApiIntegration()],
+        traces_sample_rate=1.0,
+        release="demo-release",
+    )
+
+    envelopes = capture_envelopes()
+
+    app = fastapi_app_factory()
+
+    client = TestClient(app)
+    client.get("/non-existing-route-123")
+
+    (transaction_envelope,) = envelopes
+    transaction = transaction_envelope.get_transaction_event()
+
+    assert transaction["type"] == "transaction"
+    assert len(transaction["contexts"]) > 0
+    assert (
+        "response" in transaction["contexts"].keys()
+    ), "Response context not found in transaction"
+    assert transaction["contexts"]["response"]["status_code"] == 404
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.fastapi.test_fastapi.fastapi_app_factory.._message_with_id",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+    ],
+)
+def test_transaction_name(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    capture_envelopes,
+):
+    """
+    Tests that the transaction name is something meaningful.
+    """
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[
+            StarletteIntegration(transaction_style=transaction_style),
+            FastApiIntegration(transaction_style=transaction_style),
+        ],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    envelopes = capture_envelopes()
+
+    app = fastapi_app_factory()
+
+    client = TestClient(app)
+    client.get(request_url)
+
+    (_, transaction_envelope) = envelopes
+    transaction_event = transaction_envelope.get_transaction_event()
+
+    assert transaction_event["transaction"] == expected_transaction_name
+    assert (
+        transaction_event["transaction_info"]["source"] == expected_transaction_source
+    )
+
+
+def test_route_endpoint_equal_dependant_call(sentry_init):
+    """
+    Tests that the route endpoint name is equal to the wrapped dependant call name.
+    """
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[
+            StarletteIntegration(),
+            FastApiIntegration(),
+        ],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = fastapi_app_factory()
+
+    for route in app.router.routes:
+        if not hasattr(route, "dependant"):
+            continue
+        assert route.endpoint.__qualname__ == route.dependant.call.__qualname__
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "http://testserver/message/123456",
+            "url",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "http://testserver/message/123456",
+            "url",
+        ),
+    ],
+)
+def test_transaction_name_in_traces_sampler(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+):
+    """
+    Tests that a custom traces_sampler retrieves a meaningful transaction name.
+    In this case the URL or endpoint, because we do not have the route yet.
+    """
+
+    def dummy_traces_sampler(sampling_context):
+        assert (
+            sampling_context["transaction_context"]["name"] == expected_transaction_name
+        )
+        assert (
+            sampling_context["transaction_context"]["source"]
+            == expected_transaction_source
+        )
+
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[StarletteIntegration(transaction_style=transaction_style)],
+        traces_sampler=dummy_traces_sampler,
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = fastapi_app_factory()
+
+    client = TestClient(app)
+    client.get(request_url)
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "starlette.middleware.trustedhost.TrustedHostMiddleware",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "http://testserver/message/123456",
+            "url",
+        ),
+    ],
+)
+def test_transaction_name_in_middleware(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    capture_envelopes,
+):
+    """
+    Tests that the transaction name is something meaningful.
+    """
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[
+            StarletteIntegration(transaction_style=transaction_style),
+            FastApiIntegration(transaction_style=transaction_style),
+        ],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    envelopes = capture_envelopes()
+
+    app = fastapi_app_factory()
+
+    app.add_middleware(
+        TrustedHostMiddleware,
+        allowed_hosts=[
+            "example.com",
+        ],
+    )
+
+    client = TestClient(app)
+    client.get(request_url)
+
+    (transaction_envelope,) = envelopes
+    transaction_event = transaction_envelope.get_transaction_event()
+
+    assert transaction_event["contexts"]["response"]["status_code"] == 400
+    assert transaction_event["transaction"] == expected_transaction_name
+    assert (
+        transaction_event["transaction_info"]["source"] == expected_transaction_source
+    )
diff --git a/tests/integrations/flask/__init__.py b/tests/integrations/flask/__init__.py
new file mode 100644
index 0000000000..601f9ed8d5
--- /dev/null
+++ b/tests/integrations/flask/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("flask")
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index 8723a35c86..3d3572e2d3 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -1,11 +1,9 @@
 import json
-import pytest
+import re
 import logging
-
 from io import BytesIO
 
-flask = pytest.importorskip("flask")
-
+import pytest
 from flask import (
     Flask,
     Response,
@@ -15,9 +13,14 @@
     render_template_string,
 )
 from flask.views import View
-
 from flask_login import LoginManager, login_user
 
+try:
+    from werkzeug.wrappers.request import UnsupportedMediaType
+except ImportError:
+    UnsupportedMediaType = None
+
+import sentry_sdk.integrations.flask as flask_sentry
 from sentry_sdk import (
     set_tag,
     configure_scope,
@@ -27,7 +30,7 @@
     Hub,
 )
 from sentry_sdk.integrations.logging import LoggingIntegration
-import sentry_sdk.integrations.flask as flask_sentry
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
 
 
 login_manager = LoginManager()
@@ -46,6 +49,11 @@ def hi():
         capture_message("hi")
         return "ok"
 
+    @app.route("/message/")
+    def hi_with_id(message_id):
+        capture_message("hi again")
+        return "ok"
+
     return app
 
 
@@ -74,10 +82,22 @@ def test_has_context(sentry_init, app, capture_events):
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")]
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "endpoint", "hi_with_id", "component"),
+        ("/message/123456", "url", "/message/", "route"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, app, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    app,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(
         integrations=[
@@ -87,11 +107,12 @@ def test_transaction_style(
     events = capture_events()
 
     client = app.test_client()
-    response = client.get("/message")
+    response = client.get(url)
     assert response.status_code == 200
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 @pytest.mark.parametrize("debug", (True, False))
@@ -245,9 +266,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 def test_flask_session_tracking(sentry_init, capture_envelopes, app):
@@ -322,7 +343,11 @@ def test_flask_medium_formdata_request(sentry_init, capture_events, app):
     def index():
         assert request.form["foo"] == data["foo"]
         assert not request.get_data()
-        assert not request.get_json()
+        try:
+            assert not request.get_json()
+        except UnsupportedMediaType:
+            # flask/werkzeug 3
+            pass
         capture_message("hi")
         return "ok"
 
@@ -334,9 +359,9 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
 
 def test_flask_formdata_request_appear_transaction_body(
@@ -354,7 +379,11 @@ def index():
         assert request.form["username"] == data["username"]
         assert request.form["age"] == data["age"]
         assert not request.get_data()
-        assert not request.get_json()
+        try:
+            assert not request.get_json()
+        except UnsupportedMediaType:
+            # flask/werkzeug 3
+            pass
         set_tag("view", "yes")
         capture_message("hi")
         return "ok"
@@ -374,7 +403,9 @@ def index():
 
 @pytest.mark.parametrize("input_char", ["a", b"a"])
 def test_flask_too_large_raw_request(sentry_init, input_char, capture_events, app):
-    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="small")
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="small"
+    )
 
     data = input_char * 2000
 
@@ -385,7 +416,11 @@ def index():
             assert request.get_data() == data
         else:
             assert request.get_data() == data.encode("ascii")
-        assert not request.get_json()
+        try:
+            assert not request.get_json()
+        except UnsupportedMediaType:
+            # flask/werkzeug 3
+            pass
         capture_message("hi")
         return "ok"
 
@@ -396,14 +431,14 @@ def index():
     assert response.status_code == 200
 
     (event,) = events
-    assert event["_meta"]["request"]["data"] == {
-        "": {"len": 2000, "rem": [["!config", "x", 0, 2000]]}
-    }
+    assert event["_meta"]["request"]["data"] == {"": {"rem": [["!config", "x"]]}}
     assert not event["request"]["data"]
 
 
 def test_flask_files_and_form(sentry_init, capture_events, app):
-    sentry_init(integrations=[flask_sentry.FlaskIntegration()], request_bodies="always")
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
+    )
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
 
@@ -411,7 +446,11 @@ def test_flask_files_and_form(sentry_init, capture_events, app):
     def index():
         assert list(request.form) == ["foo"]
         assert list(request.files) == ["file"]
-        assert not request.get_json()
+        try:
+            assert not request.get_json()
+        except UnsupportedMediaType:
+            # flask/werkzeug 3
+            pass
         capture_message("hi")
         return "ok"
 
@@ -423,16 +462,42 @@ def index():
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, app
+):
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()], max_request_body_size="always"
+    )
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @app.route("/", methods=["POST"])
+    def index():
+        assert request.get_json() == data
+        assert request.get_data() == json.dumps(data).encode("ascii")
+        capture_message("hi")
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    response = client.post("/", content_type="application/json", data=json.dumps(data))
+    assert response.status_code == 200
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 @pytest.mark.parametrize(
     "integrations",
     [
@@ -499,9 +564,12 @@ def test_cli_commands_raise(app):
     def foo():
         1 / 0
 
+    def create_app(*_):
+        return app
+
     with pytest.raises(ZeroDivisionError):
         app.cli.main(
-            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=lambda _: app)
+            args=["foo"], prog_name="myapp", obj=ScriptInfo(create_app=create_app)
         )
 
 
@@ -724,6 +792,25 @@ def error():
     assert exception["type"] == "ZeroDivisionError"
 
 
+def test_error_has_trace_context_if_tracing_disabled(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    events = capture_events()
+
+    @app.route("/error")
+    def error():
+        1 / 0
+
+    with pytest.raises(ZeroDivisionError):
+        with app.test_client() as client:
+            response = client.get("/error")
+            assert response.status_code == 500
+
+    (error_event,) = events
+
+    assert error_event["contexts"]["trace"]
+
+
 def test_class_based_views(sentry_init, app, capture_events):
     sentry_init(integrations=[flask_sentry.FlaskIntegration()])
     events = capture_events()
@@ -746,22 +833,38 @@ def dispatch_request(self):
     assert event["transaction"] == "hello_class"
 
 
-def test_sentry_trace_context(sentry_init, app, capture_events):
+@pytest.mark.parametrize(
+    "template_string", ["{{ sentry_trace }}", "{{ sentry_trace_meta }}"]
+)
+def test_template_tracing_meta(sentry_init, app, capture_events, template_string):
     sentry_init(integrations=[flask_sentry.FlaskIntegration()])
     events = capture_events()
 
     @app.route("/")
     def index():
-        sentry_span = Hub.current.scope.span
-        capture_message(sentry_span.to_traceparent())
-        return render_template_string("{{ sentry_trace }}")
+        hub = Hub.current
+        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+        return render_template_string(template_string)
 
     with app.test_client() as client:
         response = client.get("/")
         assert response.status_code == 200
-        assert response.data.decode(
-            "utf-8"
-        ) == '' % (events[0]["message"],)
+
+        rendered_meta = response.data.decode("utf-8")
+        traceparent, baggage = events[0]["message"].split("\n")
+        assert traceparent != ""
+        assert baggage != ""
+
+    match = re.match(
+        r'^',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
 
 
 def test_dont_override_sentry_trace_context(sentry_init, app):
@@ -775,3 +878,115 @@ def index():
         response = client.get("/")
         assert response.status_code == 200
         assert response.data == b"hi"
+
+
+def test_request_not_modified_by_reference(sentry_init, capture_events, app):
+    sentry_init(integrations=[flask_sentry.FlaskIntegration()])
+
+    @app.route("/", methods=["POST"])
+    def index():
+        logging.critical("oops")
+        assert request.get_json() == {"password": "ohno"}
+        assert request.headers["Authorization"] == "Bearer ohno"
+        return "ok"
+
+    events = capture_events()
+
+    client = app.test_client()
+    client.post(
+        "/", json={"password": "ohno"}, headers={"Authorization": "Bearer ohno"}
+    )
+
+    (event,) = events
+
+    assert event["request"]["data"]["password"] == "[Filtered]"
+    assert event["request"]["headers"]["Authorization"] == "[Filtered]"
+
+
+@pytest.mark.parametrize("traces_sample_rate", [None, 1.0])
+def test_replay_event_context(sentry_init, capture_events, app, traces_sample_rate):
+    """
+    Tests that the replay context is added to the event context.
+    This is not strictly a Flask integration test, but it's the easiest way to test this.
+    """
+    sentry_init(traces_sample_rate=traces_sample_rate)
+
+    @app.route("/error")
+    def error():
+        return 1 / 0
+
+    events = capture_events()
+
+    client = app.test_client()
+    headers = {
+        "baggage": "other-vendor-value-1=foo;bar;baz,sentry-trace_id=771a43a4192642f0b136d5159a501700,sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,other-vendor-value-2=foo;bar,sentry-replay_id=12312012123120121231201212312012",
+        "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1",
+    }
+    with pytest.raises(ZeroDivisionError):
+        client.get("/error", headers=headers)
+
+    event = events[0]
+
+    assert event["contexts"]
+    assert event["contexts"]["replay"]
+    assert (
+        event["contexts"]["replay"]["replay_id"] == "12312012123120121231201212312012"
+    )
+
+
+def test_response_status_code_ok_in_transaction_context(
+    sentry_init, capture_envelopes, app
+):
+    """
+    Tests that the response status code is added to the transaction context.
+    This also works for when there is an Exception during the request, but somehow the test flask app doesn't seem to trigger that.
+    """
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()],
+        traces_sample_rate=1.0,
+        release="demo-release",
+    )
+
+    envelopes = capture_envelopes()
+
+    client = app.test_client()
+    client.get("/message")
+
+    Hub.current.client.flush()
+
+    (_, transaction_envelope, _) = envelopes
+    transaction = transaction_envelope.get_transaction_event()
+
+    assert transaction["type"] == "transaction"
+    assert len(transaction["contexts"]) > 0
+    assert (
+        "response" in transaction["contexts"].keys()
+    ), "Response context not found in transaction"
+    assert transaction["contexts"]["response"]["status_code"] == 200
+
+
+def test_response_status_code_not_found_in_transaction_context(
+    sentry_init, capture_envelopes, app
+):
+    sentry_init(
+        integrations=[flask_sentry.FlaskIntegration()],
+        traces_sample_rate=1.0,
+        release="demo-release",
+    )
+
+    envelopes = capture_envelopes()
+
+    client = app.test_client()
+    client.get("/not-existing-route")
+
+    Hub.current.client.flush()
+
+    (transaction_envelope, _) = envelopes
+    transaction = transaction_envelope.get_transaction_event()
+
+    assert transaction["type"] == "transaction"
+    assert len(transaction["contexts"]) > 0
+    assert (
+        "response" in transaction["contexts"].keys()
+    ), "Response context not found in transaction"
+    assert transaction["contexts"]["response"]["status_code"] == 404
diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py
index 78ac8f2746..9c4e11e8d5 100644
--- a/tests/integrations/gcp/test_gcp.py
+++ b/tests/integrations/gcp/test_gcp.py
@@ -2,6 +2,7 @@
 # GCP Cloud Functions unit tests
 
 """
+
 import json
 from textwrap import dedent
 import tempfile
@@ -93,9 +94,8 @@ def init_sdk(timeout_warning=False, **extra_init_args):
 @pytest.fixture
 def run_cloud_function():
     def inner(code, subprocess_kwargs=()):
-
-        event = []
-        envelope = []
+        events = []
+        envelopes = []
         return_value = None
 
         # STEP : Create a zip of cloud function
@@ -133,10 +133,10 @@ def inner(code, subprocess_kwargs=()):
                 print("GCP:", line)
                 if line.startswith("EVENT: "):
                     line = line[len("EVENT: ") :]
-                    event = json.loads(line)
+                    events.append(json.loads(line))
                 elif line.startswith("ENVELOPE: "):
                     line = line[len("ENVELOPE: ") :]
-                    envelope = json.loads(line)
+                    envelopes.append(json.loads(line))
                 elif line.startswith("RETURN VALUE: "):
                     line = line[len("RETURN VALUE: ") :]
                     return_value = json.loads(line)
@@ -145,13 +145,13 @@ def inner(code, subprocess_kwargs=()):
 
             stream.close()
 
-        return envelope, event, return_value
+        return envelopes, events, return_value
 
     return inner
 
 
 def test_handled_exception(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, return_value = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -168,16 +168,17 @@ def cloud_function(functionhandler, event):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "Exception"
     assert exception["value"] == "something went wrong"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_unhandled_exception(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -195,16 +196,17 @@ def cloud_function(functionhandler, event):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "ZeroDivisionError"
     assert exception["value"] == "division by zero"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_timeout_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    _, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -222,19 +224,20 @@ def cloud_function(functionhandler, event):
         """
         )
     )
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert events[0]["level"] == "error"
+    (exception,) = events[0]["exception"]["values"]
 
     assert exception["type"] == "ServerlessTimeoutWarning"
     assert (
         exception["value"]
         == "WARNING : Function is expected to get timed out. Configured timeout duration = 3 seconds."
     )
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
 
 def test_performance_no_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    envelopes, _, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -252,14 +255,15 @@ def cloud_function(functionhandler, event):
         )
     )
 
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("Google Cloud function")
-    assert envelope["transaction"] in envelope["request"]["url"]
+    assert envelopes[0]["type"] == "transaction"
+    assert envelopes[0]["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelopes[0]["transaction"].startswith("Google Cloud function")
+    assert envelopes[0]["transaction_info"] == {"source": "component"}
+    assert envelopes[0]["transaction"] in envelopes[0]["request"]["url"]
 
 
 def test_performance_error(run_cloud_function):
-    envelope, event, return_value = run_cloud_function(
+    envelopes, events, _ = run_cloud_function(
         dedent(
             """
         functionhandler = None
@@ -277,16 +281,18 @@ def cloud_function(functionhandler, event):
         )
     )
 
-    assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "serverless.function"
-    assert envelope["transaction"].startswith("Google Cloud function")
-    assert envelope["transaction"] in envelope["request"]["url"]
-    assert event["level"] == "error"
-    (exception,) = event["exception"]["values"]
+    assert envelopes[0]["level"] == "error"
+    (exception,) = envelopes[0]["exception"]["values"]
 
     assert exception["type"] == "Exception"
     assert exception["value"] == "something went wrong"
-    assert exception["mechanism"] == {"type": "gcp", "handled": False}
+    assert exception["mechanism"]["type"] == "gcp"
+    assert not exception["mechanism"]["handled"]
+
+    assert envelopes[1]["type"] == "transaction"
+    assert envelopes[1]["contexts"]["trace"]["op"] == "function.gcp"
+    assert envelopes[1]["transaction"].startswith("Google Cloud function")
+    assert envelopes[1]["transaction"] in envelopes[0]["request"]["url"]
 
 
 def test_traces_sampler_gets_correct_values_in_sampling_context(
@@ -366,3 +372,184 @@ def _safe_is_equal(x, y):
     )
 
     assert return_value["AssertionError raised"] is False
+
+
+def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
+    """
+    envelopes, _, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
+    """
+    _, events, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+        event = {}
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_existing_trace_context_performance_enabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is enabled.
+    """
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    envelopes, _, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+
+        from collections import namedtuple
+        GCPEvent = namedtuple("GCPEvent", ["headers"])
+        event = GCPEvent(headers={"sentry-trace": "%s"})
+
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+            % sentry_trace_header
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=1.0)
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event, transaction_event) = envelopes
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(run_cloud_function):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is disabled.
+    """
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    _, events, _ = run_cloud_function(
+        dedent(
+            """
+        functionhandler = None
+
+        from collections import namedtuple
+        GCPEvent = namedtuple("GCPEvent", ["headers"])
+        event = GCPEvent(headers={"sentry-trace": "%s"})
+
+        def cloud_function(functionhandler, event):
+            sentry_sdk.capture_message("hi")
+            x = 3/0
+            return "3"
+        """
+            % sentry_trace_header
+        )
+        + FUNCTIONS_PRELUDE
+        + dedent(
+            """
+        init_sdk(traces_sample_rate=None),  # this is the default, just added for clarity
+        gcp_functions.worker_v1.FunctionHandler.invoke_user_function(functionhandler, event)
+        """
+        )
+    )
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
diff --git a/tests/integrations/gql/__init__.py b/tests/integrations/gql/__init__.py
new file mode 100644
index 0000000000..c3361b42f3
--- /dev/null
+++ b/tests/integrations/gql/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("gql")
diff --git a/tests/integrations/gql/test_gql.py b/tests/integrations/gql/test_gql.py
new file mode 100644
index 0000000000..7ae3cfe77d
--- /dev/null
+++ b/tests/integrations/gql/test_gql.py
@@ -0,0 +1,216 @@
+import pytest
+
+import responses
+from gql import gql
+from gql import Client
+from gql.transport.exceptions import TransportQueryError
+from gql.transport.requests import RequestsHTTPTransport
+from graphql import DocumentNode
+from sentry_sdk.integrations.gql import GQLIntegration
+from unittest.mock import MagicMock, patch
+
+
+class _MockClientBase(MagicMock):
+    """
+    Mocked version of GQL Client class, following same spec as GQL Client.
+    """
+
+    def __init__(self, *args, **kwargs):
+        kwargs["spec"] = Client
+        super().__init__(*args, **kwargs)
+
+    transport = MagicMock()
+
+
+@responses.activate
+def _execute_mock_query(response_json):
+    url = "http://example.com/graphql"
+    query_string = """
+        query Example {
+            example
+        }
+    """
+
+    # Mock the GraphQL server response
+    responses.add(
+        method=responses.POST,
+        url=url,
+        json=response_json,
+        status=200,
+    )
+
+    transport = RequestsHTTPTransport(url=url)
+    client = Client(transport=transport)
+    query = gql(query_string)
+
+    return client.execute(query)
+
+
+def _make_erroneous_query(capture_events):
+    """
+    Make an erroneous GraphQL query, and assert that the error was reraised, that
+    exactly one event was recorded, and that the exception recorded was a
+    TransportQueryError. Then, return the event to allow further verifications.
+    """
+    events = capture_events()
+    response_json = {"errors": ["something bad happened"]}
+
+    with pytest.raises(TransportQueryError):
+        _execute_mock_query(response_json)
+
+    assert (
+        len(events) == 1
+    ), "the sdk captured %d events, but 1 event was expected" % len(events)
+
+    (event,) = events
+    (exception,) = event["exception"]["values"]
+
+    assert (
+        exception["type"] == "TransportQueryError"
+    ), "%s was captured, but we expected a TransportQueryError" % exception(type)
+
+    assert "request" in event
+
+    return event
+
+
+def test_gql_init(sentry_init):
+    """
+    Integration test to ensure we can initialize the SDK with the GQL Integration
+    """
+    sentry_init(integrations=[GQLIntegration()])
+
+
+@patch("sentry_sdk.integrations.gql.Hub")
+def test_setup_once_patches_execute_and_patched_function_calls_original(_):
+    """
+    Unit test which ensures the following:
+        1. The GQLIntegration setup_once function patches the gql.Client.execute method
+        2. The patched gql.Client.execute method still calls the original method, and it
+           forwards its arguments to the original method.
+        3. The patched gql.Client.execute method returns the same value that the original
+           method returns.
+    """
+    original_method_return_value = MagicMock()
+
+    class OriginalMockClient(_MockClientBase):
+        """
+        This mock client always returns the mock original_method_return_value when a query
+        is executed. This can be used to simulate successful GraphQL queries.
+        """
+
+        execute = MagicMock(
+            spec=Client.execute, return_value=original_method_return_value
+        )
+
+    original_execute_method = OriginalMockClient.execute
+
+    with patch(
+        "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
+    ) as PatchedMockClient:  # noqa: N806
+        # Below line should patch the PatchedMockClient with Sentry SDK magic
+        GQLIntegration.setup_once()
+
+        # We expect GQLIntegration.setup_once to patch the execute method.
+        assert (
+            PatchedMockClient.execute is not original_execute_method
+        ), "execute method not patched"
+
+        # Now, let's instantiate a client and send it a query. Original execute still should get called.
+        mock_query = MagicMock(spec=DocumentNode)
+        client_instance = PatchedMockClient()
+        patched_method_return_value = client_instance.execute(mock_query)
+
+    # Here, we check that the original execute was called
+    original_execute_method.assert_called_once_with(client_instance, mock_query)
+
+    # Also, let's verify that the patched execute returns the expected value.
+    assert (
+        patched_method_return_value is original_method_return_value
+    ), "pathced execute method returns a different value than the original execute method"
+
+
+@patch("sentry_sdk.integrations.gql.event_from_exception")
+@patch("sentry_sdk.integrations.gql.Hub")
+def test_patched_gql_execute_captures_and_reraises_graphql_exception(
+    mock_hub, mock_event_from_exception
+):
+    """
+    Unit test which ensures that in the case that calling the execute method results in a
+    TransportQueryError (which gql raises when a GraphQL error occurs), the patched method
+    captures the event on the current Hub and it reraises the error.
+    """
+    mock_event_from_exception.return_value = (dict(), MagicMock())
+
+    class OriginalMockClient(_MockClientBase):
+        """
+        This mock client always raises a TransportQueryError when a GraphQL query is attempted.
+        This simulates a GraphQL query which results in errors.
+        """
+
+        execute = MagicMock(
+            spec=Client.execute, side_effect=TransportQueryError("query failed")
+        )
+
+    with patch(
+        "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
+    ) as PatchedMockClient:  # noqa: N806
+        # Below line should patch the PatchedMockClient with Sentry SDK magic
+        GQLIntegration.setup_once()
+
+        mock_query = MagicMock(spec=DocumentNode)
+        client_instance = PatchedMockClient()
+
+        # The error should still get raised even though we have instrumented the execute method.
+        with pytest.raises(TransportQueryError):
+            client_instance.execute(mock_query)
+
+    # However, we should have also captured the error on the hub.
+    mock_capture_event = mock_hub.current.capture_event
+    mock_capture_event.assert_called_once()
+
+
+def test_real_gql_request_no_error(sentry_init, capture_events):
+    """
+    Integration test verifying that the GQLIntegration works as expected with successful query.
+    """
+    sentry_init(integrations=[GQLIntegration()])
+    events = capture_events()
+
+    response_data = {"example": "This is the example"}
+    response_json = {"data": response_data}
+
+    result = _execute_mock_query(response_json)
+
+    assert (
+        result == response_data
+    ), "client.execute returned a different value from what it received from the server"
+    assert (
+        len(events) == 0
+    ), "the sdk captured an event, even though the query was successful"
+
+
+def test_real_gql_request_with_error_no_pii(sentry_init, capture_events):
+    """
+    Integration test verifying that the GQLIntegration works as expected with query resulting
+    in a GraphQL error, and that PII is not sent.
+    """
+    sentry_init(integrations=[GQLIntegration()])
+
+    event = _make_erroneous_query(capture_events)
+
+    assert "data" not in event["request"]
+    assert "response" not in event["contexts"]
+
+
+def test_real_gql_request_with_error_with_pii(sentry_init, capture_events):
+    """
+    Integration test verifying that the GQLIntegration works as expected with query resulting
+    in a GraphQL error, and that PII is not sent.
+    """
+    sentry_init(integrations=[GQLIntegration()], send_default_pii=True)
+
+    event = _make_erroneous_query(capture_events)
+
+    assert "data" in event["request"]
+    assert "response" in event["contexts"]
diff --git a/tests/integrations/graphene/__init__.py b/tests/integrations/graphene/__init__.py
new file mode 100644
index 0000000000..f81854aed5
--- /dev/null
+++ b/tests/integrations/graphene/__init__.py
@@ -0,0 +1,5 @@
+import pytest
+
+pytest.importorskip("graphene")
+pytest.importorskip("fastapi")
+pytest.importorskip("flask")
diff --git a/tests/integrations/graphene/test_graphene_py3.py b/tests/integrations/graphene/test_graphene_py3.py
new file mode 100644
index 0000000000..02bc34a515
--- /dev/null
+++ b/tests/integrations/graphene/test_graphene_py3.py
@@ -0,0 +1,203 @@
+from fastapi import FastAPI, Request
+from fastapi.testclient import TestClient
+from flask import Flask, request, jsonify
+from graphene import ObjectType, String, Schema
+
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+from sentry_sdk.integrations.flask import FlaskIntegration
+from sentry_sdk.integrations.graphene import GrapheneIntegration
+from sentry_sdk.integrations.starlette import StarletteIntegration
+
+
+class Query(ObjectType):
+    hello = String(first_name=String(default_value="stranger"))
+    goodbye = String()
+
+    def resolve_hello(root, info, first_name):  # noqa: N805
+        return "Hello {}!".format(first_name)
+
+    def resolve_goodbye(root, info):  # noqa: N805
+        raise RuntimeError("oh no!")
+
+
+def test_capture_request_if_available_and_send_pii_is_on_async(
+    sentry_init, capture_events
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            GrapheneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    async_app = FastAPI()
+
+    @async_app.post("/graphql")
+    async def graphql_server_async(request: Request):
+        data = await request.json()
+        result = await schema.execute_async(data["query"])
+        return result.data
+
+    query = {"query": "query ErrorQuery {goodbye}"}
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
+    assert event["request"]["api_target"] == "graphql"
+    assert event["request"]["data"] == query
+
+
+def test_capture_request_if_available_and_send_pii_is_on_sync(
+    sentry_init, capture_events
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[GrapheneIntegration(), FlaskIntegration()],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server_sync():
+        data = request.get_json()
+        result = schema.execute(data["query"])
+        return jsonify(result.data), 200
+
+    query = {"query": "query ErrorQuery {goodbye}"}
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
+    assert event["request"]["api_target"] == "graphql"
+    assert event["request"]["data"] == query
+
+
+def test_do_not_capture_request_if_send_pii_is_off_async(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            GrapheneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    async_app = FastAPI()
+
+    @async_app.post("/graphql")
+    async def graphql_server_async(request: Request):
+        data = await request.json()
+        result = await schema.execute_async(data["query"])
+        return result.data
+
+    query = {"query": "query ErrorQuery {goodbye}"}
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
+    assert "data" not in event["request"]
+    assert "response" not in event["contexts"]
+
+
+def test_do_not_capture_request_if_send_pii_is_off_sync(sentry_init, capture_events):
+    sentry_init(
+        integrations=[GrapheneIntegration(), FlaskIntegration()],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server_sync():
+        data = request.get_json()
+        result = schema.execute(data["query"])
+        return jsonify(result.data), 200
+
+    query = {"query": "query ErrorQuery {goodbye}"}
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 1
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "graphene"
+    assert "data" not in event["request"]
+    assert "response" not in event["contexts"]
+
+
+def test_no_event_if_no_errors_async(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            GrapheneIntegration(),
+            FastApiIntegration(),
+            StarletteIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    async_app = FastAPI()
+
+    @async_app.post("/graphql")
+    async def graphql_server_async(request: Request):
+        data = await request.json()
+        result = await schema.execute_async(data["query"])
+        return result.data
+
+    query = {
+        "query": "query GreetingQuery { hello }",
+    }
+    client = TestClient(async_app)
+    client.post("/graphql", json=query)
+
+    assert len(events) == 0
+
+
+def test_no_event_if_no_errors_sync(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            GrapheneIntegration(),
+            FlaskIntegration(),
+        ],
+    )
+    events = capture_events()
+
+    schema = Schema(query=Query)
+
+    sync_app = Flask(__name__)
+
+    @sync_app.route("/graphql", methods=["POST"])
+    def graphql_server_sync():
+        data = request.get_json()
+        result = schema.execute(data["query"])
+        return jsonify(result.data), 200
+
+    query = {
+        "query": "query GreetingQuery { hello }",
+    }
+    client = sync_app.test_client()
+    client.post("/graphql", json=query)
+
+    assert len(events) == 0
diff --git a/tests/integrations/grpc/__init__.py b/tests/integrations/grpc/__init__.py
new file mode 100644
index 0000000000..f18dce91e2
--- /dev/null
+++ b/tests/integrations/grpc/__init__.py
@@ -0,0 +1,8 @@
+import sys
+from pathlib import Path
+
+import pytest
+
+# For imports inside gRPC autogenerated code to work
+sys.path.append(str(Path(__file__).parent))
+pytest.importorskip("grpc")
diff --git a/tests/integrations/grpc/compile_test_services.sh b/tests/integrations/grpc/compile_test_services.sh
new file mode 100755
index 0000000000..777a27e6e5
--- /dev/null
+++ b/tests/integrations/grpc/compile_test_services.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+# Run this script from the project root to generate the python code
+
+TARGET_PATH=./tests/integrations/grpc
+
+# Create python file
+python -m grpc_tools.protoc \
+    --proto_path=$TARGET_PATH/protos/ \
+    --python_out=$TARGET_PATH/ \
+    --pyi_out=$TARGET_PATH/ \
+    --grpc_python_out=$TARGET_PATH/ \
+    $TARGET_PATH/protos/grpc_test_service.proto
+
+echo Code generation successfull
diff --git a/tests/integrations/grpc/grpc_test_service_pb2.py b/tests/integrations/grpc/grpc_test_service_pb2.py
new file mode 100644
index 0000000000..84ea7f632a
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2.py
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: grpc_test_service.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf.internal import builder as _builder
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x17grpc_test_service.proto\x12\x10grpc_test_server\"\x1f\n\x0fgRPCTestMessage\x12\x0c\n\x04text\x18\x01 \x01(\t2\xf8\x02\n\x0fgRPCTestService\x12Q\n\tTestServe\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage\x12Y\n\x0fTestUnaryStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage0\x01\x12\\\n\x10TestStreamStream\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x30\x01\x12Y\n\x0fTestStreamUnary\x12!.grpc_test_server.gRPCTestMessage\x1a!.grpc_test_server.gRPCTestMessage(\x01\x62\x06proto3')
+
+_globals = globals()
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'grpc_test_service_pb2', _globals)
+if _descriptor._USE_C_DESCRIPTORS == False:
+  DESCRIPTOR._options = None
+  _globals['_GRPCTESTMESSAGE']._serialized_start=45
+  _globals['_GRPCTESTMESSAGE']._serialized_end=76
+  _globals['_GRPCTESTSERVICE']._serialized_start=79
+  _globals['_GRPCTESTSERVICE']._serialized_end=455
+# @@protoc_insertion_point(module_scope)
diff --git a/tests/integrations/grpc/grpc_test_service_pb2.pyi b/tests/integrations/grpc/grpc_test_service_pb2.pyi
new file mode 100644
index 0000000000..f16d8a2d65
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2.pyi
@@ -0,0 +1,11 @@
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from typing import ClassVar as _ClassVar, Optional as _Optional
+
+DESCRIPTOR: _descriptor.FileDescriptor
+
+class gRPCTestMessage(_message.Message):
+    __slots__ = ["text"]
+    TEXT_FIELD_NUMBER: _ClassVar[int]
+    text: str
+    def __init__(self, text: _Optional[str] = ...) -> None: ...
diff --git a/tests/integrations/grpc/grpc_test_service_pb2_grpc.py b/tests/integrations/grpc/grpc_test_service_pb2_grpc.py
new file mode 100644
index 0000000000..ad897608ca
--- /dev/null
+++ b/tests/integrations/grpc/grpc_test_service_pb2_grpc.py
@@ -0,0 +1,165 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import grpc_test_service_pb2 as grpc__test__service__pb2
+
+
+class gRPCTestServiceStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.TestServe = channel.unary_unary(
+                '/grpc_test_server.gRPCTestService/TestServe',
+                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                )
+        self.TestUnaryStream = channel.unary_stream(
+                '/grpc_test_server.gRPCTestService/TestUnaryStream',
+                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                )
+        self.TestStreamStream = channel.stream_stream(
+                '/grpc_test_server.gRPCTestService/TestStreamStream',
+                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                )
+        self.TestStreamUnary = channel.stream_unary(
+                '/grpc_test_server.gRPCTestService/TestStreamUnary',
+                request_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+                response_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                )
+
+
+class gRPCTestServiceServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def TestServe(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def TestUnaryStream(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def TestStreamStream(self, request_iterator, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def TestStreamUnary(self, request_iterator, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_gRPCTestServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'TestServe': grpc.unary_unary_rpc_method_handler(
+                    servicer.TestServe,
+                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            ),
+            'TestUnaryStream': grpc.unary_stream_rpc_method_handler(
+                    servicer.TestUnaryStream,
+                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            ),
+            'TestStreamStream': grpc.stream_stream_rpc_method_handler(
+                    servicer.TestStreamStream,
+                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            ),
+            'TestStreamUnary': grpc.stream_unary_rpc_method_handler(
+                    servicer.TestStreamUnary,
+                    request_deserializer=grpc__test__service__pb2.gRPCTestMessage.FromString,
+                    response_serializer=grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'grpc_test_server.gRPCTestService', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class gRPCTestService(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def TestServe(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/grpc_test_server.gRPCTestService/TestServe',
+            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            grpc__test__service__pb2.gRPCTestMessage.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def TestUnaryStream(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/grpc_test_server.gRPCTestService/TestUnaryStream',
+            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            grpc__test__service__pb2.gRPCTestMessage.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def TestStreamStream(request_iterator,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.stream_stream(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamStream',
+            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            grpc__test__service__pb2.gRPCTestMessage.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def TestStreamUnary(request_iterator,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.stream_unary(request_iterator, target, '/grpc_test_server.gRPCTestService/TestStreamUnary',
+            grpc__test__service__pb2.gRPCTestMessage.SerializeToString,
+            grpc__test__service__pb2.gRPCTestMessage.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/tests/integrations/grpc/protos/grpc_test_service.proto b/tests/integrations/grpc/protos/grpc_test_service.proto
new file mode 100644
index 0000000000..9eba747218
--- /dev/null
+++ b/tests/integrations/grpc/protos/grpc_test_service.proto
@@ -0,0 +1,14 @@
+syntax = "proto3";
+
+package grpc_test_server;
+
+service gRPCTestService{
+  rpc TestServe(gRPCTestMessage) returns (gRPCTestMessage);
+  rpc TestUnaryStream(gRPCTestMessage) returns (stream gRPCTestMessage);
+  rpc TestStreamStream(stream gRPCTestMessage) returns (stream gRPCTestMessage);
+  rpc TestStreamUnary(stream gRPCTestMessage) returns (gRPCTestMessage);
+}
+
+message gRPCTestMessage {
+  string text = 1;
+}
diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py
new file mode 100644
index 0000000000..3f49c0a0f4
--- /dev/null
+++ b/tests/integrations/grpc/test_grpc.py
@@ -0,0 +1,341 @@
+from __future__ import absolute_import
+
+import os
+from typing import List, Optional
+from concurrent import futures
+from unittest.mock import Mock
+
+import grpc
+import pytest
+
+from sentry_sdk import Hub, start_transaction
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.grpc import GRPCIntegration
+from tests.conftest import ApproxDict
+from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
+from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
+    gRPCTestServiceServicer,
+    add_gRPCTestServiceServicer_to_server,
+    gRPCTestServiceStub,
+)
+
+PORT = 50051
+PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel
+
+
+@pytest.mark.forked
+def test_grpc_server_starts_transaction(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    event = events.read_event()
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert span["op"] == "test"
+
+
+@pytest.mark.forked
+def test_grpc_server_other_interceptors(sentry_init, capture_events_forksafe):
+    """Ensure compatibility with additional server interceptors."""
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+    mock_intercept = lambda continuation, handler_call_details: continuation(
+        handler_call_details
+    )
+    mock_interceptor = Mock()
+    mock_interceptor.intercept_service.side_effect = mock_intercept
+
+    server = _set_up(interceptors=[mock_interceptor])
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    mock_interceptor.intercept_service.assert_called_once()
+
+    events.write_file.close()
+    event = events.read_event()
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert span["op"] == "test"
+
+
+@pytest.mark.forked
+def test_grpc_server_continues_transaction(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction() as transaction:
+            metadata = (
+                (
+                    "baggage",
+                    "sentry-trace_id={trace_id},sentry-environment=test,"
+                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
+                        trace_id=transaction.trace_id
+                    ),
+                ),
+                (
+                    "sentry-trace",
+                    "{trace_id}-{parent_span_id}-{sampled}".format(
+                        trace_id=transaction.trace_id,
+                        parent_span_id=transaction.span_id,
+                        sampled=1,
+                    ),
+                ),
+            )
+            stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    event = events.read_event()
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert span["op"] == "test"
+
+
+@pytest.mark.forked
+def test_grpc_client_starts_span(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    events.read_event()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
+    )
+    assert span["data"] == ApproxDict(
+        {
+            "type": "unary unary",
+            "method": "/grpc_test_server.gRPCTestService/TestServe",
+            "code": "OK",
+        }
+    )
+
+
+@pytest.mark.forked
+def test_grpc_client_unary_stream_starts_span(sentry_init, capture_events_forksafe):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            [el for el in stub.TestUnaryStream(gRPCTestMessage(text="test"))]
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
+    )
+    assert span["data"] == ApproxDict(
+        {
+            "type": "unary stream",
+            "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
+        }
+    )
+
+
+# using unittest.mock.Mock not possible because grpc verifies
+# that the interceptor is of the correct type
+class MockClientInterceptor(grpc.UnaryUnaryClientInterceptor):
+    call_counter = 0
+
+    def intercept_unary_unary(self, continuation, client_call_details, request):
+        self.__class__.call_counter += 1
+        return continuation(client_call_details, request)
+
+
+@pytest.mark.forked
+def test_grpc_client_other_interceptor(sentry_init, capture_events_forksafe):
+    """Ensure compatibility with additional client interceptors."""
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        channel = grpc.intercept_channel(channel, MockClientInterceptor())
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    assert MockClientInterceptor.call_counter == 1
+
+    events.write_file.close()
+    events.read_event()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
+    )
+    assert span["data"] == ApproxDict(
+        {
+            "type": "unary unary",
+            "method": "/grpc_test_server.gRPCTestService/TestServe",
+            "code": "OK",
+        }
+    )
+
+
+@pytest.mark.forked
+def test_grpc_client_and_servers_interceptors_integration(
+    sentry_init, capture_events_forksafe
+):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    events = capture_events_forksafe()
+
+    server = _set_up()
+
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with start_transaction():
+            stub.TestServe(gRPCTestMessage(text="test"))
+
+    _tear_down(server=server)
+
+    events.write_file.close()
+    server_transaction = events.read_event()
+    local_transaction = events.read_event()
+
+    assert (
+        server_transaction["contexts"]["trace"]["trace_id"]
+        == local_transaction["contexts"]["trace"]["trace_id"]
+    )
+
+
+@pytest.mark.forked
+def test_stream_stream(sentry_init):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    _set_up()
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        response_iterator = stub.TestStreamStream(iter((gRPCTestMessage(text="test"),)))
+        for response in response_iterator:
+            assert response.text == "test"
+
+
+def test_stream_unary(sentry_init):
+    """Test to verify stream-stream works.
+    Tracing not supported for it yet.
+    """
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    _set_up()
+    with grpc.insecure_channel("localhost:{}".format(PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        response = stub.TestStreamUnary(iter((gRPCTestMessage(text="test"),)))
+        assert response.text == "test"
+
+
+def _set_up(interceptors: Optional[List[grpc.ServerInterceptor]] = None):
+    server = grpc.server(
+        futures.ThreadPoolExecutor(max_workers=2),
+        interceptors=interceptors,
+    )
+
+    add_gRPCTestServiceServicer_to_server(TestService(), server)
+    server.add_insecure_port("[::]:{}".format(PORT))
+    server.start()
+
+    return server
+
+
+def _tear_down(server: grpc.Server):
+    server.stop(None)
+
+
+def _find_name(request):
+    return request.__class__
+
+
+class TestService(gRPCTestServiceServicer):
+    events = []
+
+    @staticmethod
+    def TestServe(request, context):  # noqa: N802
+        hub = Hub.current
+        with hub.start_span(op="test", description="test"):
+            pass
+
+        return gRPCTestMessage(text=request.text)
+
+    @staticmethod
+    def TestUnaryStream(request, context):  # noqa: N802
+        for _ in range(3):
+            yield gRPCTestMessage(text=request.text)
+
+    @staticmethod
+    def TestStreamStream(request, context):  # noqa: N802
+        for r in request:
+            yield r
+
+    @staticmethod
+    def TestStreamUnary(request, context):  # noqa: N802
+        requests = [r for r in request]
+        return requests.pop()
diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py
new file mode 100644
index 0000000000..3e21188ec8
--- /dev/null
+++ b/tests/integrations/grpc/test_grpc_aio.py
@@ -0,0 +1,259 @@
+from __future__ import absolute_import
+
+import asyncio
+import os
+
+import grpc
+import pytest
+import pytest_asyncio
+import sentry_sdk
+
+from sentry_sdk import Hub, start_transaction
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.grpc import GRPCIntegration
+from tests.conftest import ApproxDict
+from tests.integrations.grpc.grpc_test_service_pb2 import gRPCTestMessage
+from tests.integrations.grpc.grpc_test_service_pb2_grpc import (
+    gRPCTestServiceServicer,
+    add_gRPCTestServiceServicer_to_server,
+    gRPCTestServiceStub,
+)
+
+AIO_PORT = 50052
+AIO_PORT += os.getpid() % 100  # avoid port conflicts when running tests in parallel
+
+
+@pytest.fixture(scope="function")
+def event_loop(request):
+    """Create an instance of the default event loop for each test case."""
+    loop = asyncio.new_event_loop()
+    yield loop
+    loop.close()
+
+
+@pytest_asyncio.fixture(scope="function")
+async def grpc_server(sentry_init, event_loop):
+    sentry_init(traces_sample_rate=1.0, integrations=[GRPCIntegration()])
+    server = grpc.aio.server()
+    server.add_insecure_port("[::]:{}".format(AIO_PORT))
+    add_gRPCTestServiceServicer_to_server(TestService, server)
+
+    await event_loop.create_task(server.start())
+
+    try:
+        yield server
+    finally:
+        await server.stop(None)
+
+
+@pytest.mark.asyncio
+async def test_grpc_server_starts_transaction(capture_events, grpc_server):
+    events = capture_events()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        await stub.TestServe(gRPCTestMessage(text="test"))
+
+    (event,) = events
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert span["op"] == "test"
+
+
+@pytest.mark.asyncio
+async def test_grpc_server_continues_transaction(capture_events, grpc_server):
+    events = capture_events()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+
+        with sentry_sdk.start_transaction() as transaction:
+            metadata = (
+                (
+                    "baggage",
+                    "sentry-trace_id={trace_id},sentry-environment=test,"
+                    "sentry-transaction=test-transaction,sentry-sample_rate=1.0".format(
+                        trace_id=transaction.trace_id
+                    ),
+                ),
+                (
+                    "sentry-trace",
+                    "{trace_id}-{parent_span_id}-{sampled}".format(
+                        trace_id=transaction.trace_id,
+                        parent_span_id=transaction.span_id,
+                        sampled=1,
+                    ),
+                ),
+            )
+
+            await stub.TestServe(gRPCTestMessage(text="test"), metadata=metadata)
+
+    (event, _) = events
+    span = event["spans"][0]
+
+    assert event["type"] == "transaction"
+    assert event["transaction_info"] == {
+        "source": "custom",
+    }
+    assert event["contexts"]["trace"]["op"] == OP.GRPC_SERVER
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert span["op"] == "test"
+
+
+@pytest.mark.asyncio
+async def test_grpc_server_exception(capture_events, grpc_server):
+    events = capture_events()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        try:
+            await stub.TestServe(gRPCTestMessage(text="exception"))
+            raise AssertionError()
+        except Exception:
+            pass
+
+    (event, _) = events
+
+    assert event["exception"]["values"][0]["type"] == "TestService.TestException"
+    assert event["exception"]["values"][0]["value"] == "test"
+    assert event["exception"]["values"][0]["mechanism"]["handled"] is False
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "grpc"
+
+
+@pytest.mark.asyncio
+async def test_grpc_server_abort(capture_events, grpc_server):
+    events = capture_events()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        try:
+            await stub.TestServe(gRPCTestMessage(text="abort"))
+            raise AssertionError()
+        except Exception:
+            pass
+
+    assert len(events) == 1
+
+
+@pytest.mark.asyncio
+async def test_grpc_client_starts_span(
+    grpc_server, sentry_init, capture_events_forksafe
+):
+    events = capture_events_forksafe()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        with start_transaction():
+            await stub.TestServe(gRPCTestMessage(text="test"))
+
+    events.write_file.close()
+    events.read_event()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary unary call to /grpc_test_server.gRPCTestService/TestServe"
+    )
+    assert span["data"] == ApproxDict(
+        {
+            "type": "unary unary",
+            "method": "/grpc_test_server.gRPCTestService/TestServe",
+            "code": "OK",
+        }
+    )
+
+
+@pytest.mark.asyncio
+async def test_grpc_client_unary_stream_starts_span(
+    grpc_server, capture_events_forksafe
+):
+    events = capture_events_forksafe()
+
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        with start_transaction():
+            response = stub.TestUnaryStream(gRPCTestMessage(text="test"))
+            [_ async for _ in response]
+
+    events.write_file.close()
+    local_transaction = events.read_event()
+    span = local_transaction["spans"][0]
+
+    assert len(local_transaction["spans"]) == 1
+    assert span["op"] == OP.GRPC_CLIENT
+    assert (
+        span["description"]
+        == "unary stream call to /grpc_test_server.gRPCTestService/TestUnaryStream"
+    )
+    assert span["data"] == ApproxDict(
+        {
+            "type": "unary stream",
+            "method": "/grpc_test_server.gRPCTestService/TestUnaryStream",
+        }
+    )
+
+
+@pytest.mark.asyncio
+async def test_stream_stream(grpc_server):
+    """Test to verify stream-stream works.
+    Tracing not supported for it yet.
+    """
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        response = stub.TestStreamStream((gRPCTestMessage(text="test"),))
+        async for r in response:
+            assert r.text == "test"
+
+
+@pytest.mark.asyncio
+async def test_stream_unary(grpc_server):
+    """Test to verify stream-stream works.
+    Tracing not supported for it yet.
+    """
+    async with grpc.aio.insecure_channel("localhost:{}".format(AIO_PORT)) as channel:
+        stub = gRPCTestServiceStub(channel)
+        response = await stub.TestStreamUnary((gRPCTestMessage(text="test"),))
+        assert response.text == "test"
+
+
+class TestService(gRPCTestServiceServicer):
+    class TestException(Exception):
+        def __init__(self):
+            super().__init__("test")
+
+    @classmethod
+    async def TestServe(cls, request, context):  # noqa: N802
+        hub = Hub.current
+        with hub.start_span(op="test", description="test"):
+            pass
+
+        if request.text == "exception":
+            raise cls.TestException()
+
+        if request.text == "abort":
+            await context.abort(grpc.StatusCode.ABORTED)
+
+        return gRPCTestMessage(text=request.text)
+
+    @classmethod
+    async def TestUnaryStream(cls, request, context):  # noqa: N802
+        for _ in range(3):
+            yield gRPCTestMessage(text=request.text)
+
+    @classmethod
+    async def TestStreamStream(cls, request, context):  # noqa: N802
+        async for r in request:
+            yield r
+
+    @classmethod
+    async def TestStreamUnary(cls, request, context):  # noqa: N802
+        requests = [r async for r in request]
+        return requests.pop()
diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py
index 4623f13348..c4ca97321c 100644
--- a/tests/integrations/httpx/test_httpx.py
+++ b/tests/integrations/httpx/test_httpx.py
@@ -1,66 +1,308 @@
 import asyncio
 
+import pytest
 import httpx
+import responses
 
 from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import MATCH_ALL, SPANDATA
 from sentry_sdk.integrations.httpx import HttpxIntegration
+from tests.conftest import ApproxDict
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
-def test_crumb_capture_and_hint(sentry_init, capture_events):
+
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_crumb_capture_and_hint(sentry_init, capture_events, httpx_client):
     def before_breadcrumb(crumb, hint):
         crumb["data"]["extra"] = "foo"
         return crumb
 
     sentry_init(integrations=[HttpxIntegration()], before_breadcrumb=before_breadcrumb)
-    clients = (httpx.Client(), httpx.AsyncClient())
-    for i, c in enumerate(clients):
-        with start_transaction():
-            events = capture_events()
-
-            url = "https://httpbin.org/status/200"
-            if not asyncio.iscoroutinefunction(c.get):
-                response = c.get(url)
-            else:
-                response = asyncio.get_event_loop().run_until_complete(c.get(url))
-
-            assert response.status_code == 200
-            capture_message("Testing!")
-
-            (event,) = events
-            # send request twice so we need get breadcrumb by index
-            crumb = event["breadcrumbs"]["values"][i]
-            assert crumb["type"] == "http"
-            assert crumb["category"] == "httplib"
-            assert crumb["data"] == {
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction():
+        events = capture_events()
+
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url)
+            )
+        else:
+            response = httpx_client.get(url)
+
+        assert response.status_code == 200
+        capture_message("Testing!")
+
+        (event,) = events
+
+        crumb = event["breadcrumbs"]["values"][0]
+        assert crumb["type"] == "http"
+        assert crumb["category"] == "httplib"
+        assert crumb["data"] == ApproxDict(
+            {
                 "url": url,
-                "method": "GET",
-                "status_code": 200,
+                SPANDATA.HTTP_METHOD: "GET",
+                SPANDATA.HTTP_FRAGMENT: "",
+                SPANDATA.HTTP_QUERY: "",
+                SPANDATA.HTTP_STATUS_CODE: 200,
                 "reason": "OK",
                 "extra": "foo",
             }
+        )
 
 
-def test_outgoing_trace_headers(sentry_init):
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_outgoing_trace_headers(sentry_init, httpx_client):
     sentry_init(traces_sample_rate=1.0, integrations=[HttpxIntegration()])
-    clients = (httpx.Client(), httpx.AsyncClient())
-    for i, c in enumerate(clients):
-        with start_transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-            # make trace_id difference between transactions
-            trace_id=f"012345678901234567890123456789{i}",
-        ) as transaction:
-            url = "https://httpbin.org/status/200"
-            if not asyncio.iscoroutinefunction(c.get):
-                response = c.get(url)
-            else:
-                response = asyncio.get_event_loop().run_until_complete(c.get(url))
-
-            request_span = transaction._span_recorder.spans[-1]
-            assert response.request.headers[
-                "sentry-trace"
-            ] == "{trace_id}-{parent_span_id}-{sampled}".format(
-                trace_id=transaction.trace_id,
-                parent_span_id=request_span.span_id,
-                sampled=1,
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="01234567890123456789012345678901",
+    ) as transaction:
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url)
+            )
+        else:
+            response = httpx_client.get(url)
+
+        request_span = transaction._span_recorder.spans[-1]
+        assert response.request.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+
+
+@pytest.mark.parametrize(
+    "httpx_client",
+    (httpx.Client(), httpx.AsyncClient()),
+)
+def test_outgoing_trace_headers_append_to_baggage(sentry_init, httpx_client):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[HttpxIntegration()],
+        release="d08ebdb9309e1b004c6f52202de58a09c2268e42",
+    )
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
+    with start_transaction(
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="01234567890123456789012345678901",
+    ) as transaction:
+        if asyncio.iscoroutinefunction(httpx_client.get):
+            response = asyncio.get_event_loop().run_until_complete(
+                httpx_client.get(url, headers={"baGGage": "custom=data"})
             )
+        else:
+            response = httpx_client.get(url, headers={"baGGage": "custom=data"})
+
+        request_span = transaction._span_recorder.spans[-1]
+        assert response.request.headers[
+            "sentry-trace"
+        ] == "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert (
+            response.request.headers["baggage"]
+            == "custom=data,sentry-trace_id=01234567890123456789012345678901,sentry-environment=production,sentry-release=d08ebdb9309e1b004c6f52202de58a09c2268e42,sentry-transaction=/interactions/other-dogs/new-dog,sentry-sample_rate=1.0,sentry-sampled=true"
+        )
+
+
+@pytest.mark.parametrize(
+    "httpx_client,trace_propagation_targets,url,trace_propagated",
+    [
+        [
+            httpx.Client(),
+            None,
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.Client(),
+            [],
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.Client(),
+            [MATCH_ALL],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com/"],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com/"],
+            "https://example.com",
+            False,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com"],
+            "https://example.com",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://example.net",
+            False,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net",
+            True,
+        ],
+        [
+            httpx.Client(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net/some/thing",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            None,
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            [],
+            "https://example.com/",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            [MATCH_ALL],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com/"],
+            "https://example.com/",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com/"],
+            "https://example.com",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com"],
+            "https://example.com",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://example.net",
+            False,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net",
+            True,
+        ],
+        [
+            httpx.AsyncClient(),
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "https://good.example.net/some/thing",
+            True,
+        ],
+    ],
+)
+def test_option_trace_propagation_targets(
+    sentry_init,
+    httpx_client,
+    httpx_mock,  # this comes from pytest-httpx
+    trace_propagation_targets,
+    url,
+    trace_propagated,
+):
+    httpx_mock.add_response()
+
+    sentry_init(
+        release="test",
+        trace_propagation_targets=trace_propagation_targets,
+        traces_sample_rate=1.0,
+        integrations=[HttpxIntegration()],
+    )
+
+    if asyncio.iscoroutinefunction(httpx_client.get):
+        asyncio.get_event_loop().run_until_complete(httpx_client.get(url))
+    else:
+        httpx_client.get(url)
+
+    request_headers = httpx_mock.get_request().headers
+
+    if trace_propagated:
+        assert "sentry-trace" in request_headers
+    else:
+        assert "sentry-trace" not in request_headers
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(integrations=[HttpxIntegration()])
+
+    httpx_client = httpx.Client()
+    url = "http://example.com"
+    responses.add(responses.GET, url, status=200)
+
+    events = capture_events()
+    with mock.patch(
+        "sentry_sdk.integrations.httpx.parse_url",
+        side_effect=ValueError,
+    ):
+        response = httpx_client.get(url)
+
+    assert response.status_code == 200
+    capture_message("Testing!")
+
+    (event,) = events
+    assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict(
+        {
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_STATUS_CODE: 200,
+            "reason": "OK",
+            # no url related data
+        }
+    )
+
+    assert "url" not in event["breadcrumbs"]["values"][0]["data"]
+    assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"]
+    assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"]
diff --git a/tests/integrations/huey/__init__.py b/tests/integrations/huey/__init__.py
new file mode 100644
index 0000000000..448a7eb2f7
--- /dev/null
+++ b/tests/integrations/huey/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("huey")
diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py
new file mode 100644
index 0000000000..48a3da97f4
--- /dev/null
+++ b/tests/integrations/huey/test_huey.py
@@ -0,0 +1,192 @@
+import pytest
+from decimal import DivisionByZero
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.huey import HueyIntegration
+from sentry_sdk.utils import parse_version
+
+from huey import __version__ as HUEY_VERSION
+from huey.api import MemoryHuey, Result
+from huey.exceptions import RetryTask
+
+
+HUEY_VERSION = parse_version(HUEY_VERSION)
+
+
+@pytest.fixture
+def init_huey(sentry_init):
+    def inner():
+        sentry_init(
+            integrations=[HueyIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+            debug=True,
+        )
+
+        return MemoryHuey(name="sentry_sdk")
+
+    return inner
+
+
+@pytest.fixture(autouse=True)
+def flush_huey_tasks(init_huey):
+    huey = init_huey()
+    huey.flush()
+
+
+def execute_huey_task(huey, func, *args, **kwargs):
+    exceptions = kwargs.pop("exceptions", None)
+    result = func(*args, **kwargs)
+    task = huey.dequeue()
+    if exceptions is not None:
+        try:
+            huey.execute(task)
+        except exceptions:
+            pass
+    else:
+        huey.execute(task)
+    return result
+
+
+def test_task_result(init_huey):
+    huey = init_huey()
+
+    @huey.task()
+    def increase(num):
+        return num + 1
+
+    result = increase(3)
+
+    assert isinstance(result, Result)
+    assert len(huey) == 1
+    task = huey.dequeue()
+    assert huey.execute(task) == 4
+    assert result.get() == 4
+
+
+@pytest.mark.parametrize("task_fails", [True, False], ids=["error", "success"])
+def test_task_transaction(capture_events, init_huey, task_fails):
+    huey = init_huey()
+
+    @huey.task()
+    def division(a, b):
+        return a / b
+
+    events = capture_events()
+    execute_huey_task(
+        huey, division, 1, int(not task_fails), exceptions=(DivisionByZero,)
+    )
+
+    if task_fails:
+        error_event = events.pop(0)
+        assert error_event["exception"]["values"][0]["type"] == "ZeroDivisionError"
+        assert error_event["exception"]["values"][0]["mechanism"]["type"] == "huey"
+
+    (event,) = events
+    assert event["type"] == "transaction"
+    assert event["transaction"] == "division"
+    assert event["transaction_info"] == {"source": "task"}
+
+    if task_fails:
+        assert event["contexts"]["trace"]["status"] == "internal_error"
+    else:
+        assert event["contexts"]["trace"]["status"] == "ok"
+
+    assert "huey_task_id" in event["tags"]
+    assert "huey_task_retry" in event["tags"]
+
+
+def test_task_retry(capture_events, init_huey):
+    huey = init_huey()
+    context = {"retry": True}
+
+    @huey.task()
+    def retry_task(context):
+        if context["retry"]:
+            context["retry"] = False
+            raise RetryTask()
+
+    events = capture_events()
+    result = execute_huey_task(huey, retry_task, context)
+    (event,) = events
+
+    assert event["transaction"] == "retry_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert len(huey) == 1
+
+    task = huey.dequeue()
+    huey.execute(task)
+    (event, _) = events
+
+    assert event["transaction"] == "retry_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert len(huey) == 0
+
+
+@pytest.mark.parametrize("lock_name", ["lock.a", "lock.b"], ids=["locked", "unlocked"])
+@pytest.mark.skipif(HUEY_VERSION < (2, 5), reason="is_locked was added in 2.5")
+def test_task_lock(capture_events, init_huey, lock_name):
+    huey = init_huey()
+
+    task_lock_name = "lock.a"
+    should_be_locked = task_lock_name == lock_name
+
+    @huey.task()
+    @huey.lock_task(task_lock_name)
+    def maybe_locked_task():
+        pass
+
+    events = capture_events()
+
+    with huey.lock_task(lock_name):
+        assert huey.is_locked(task_lock_name) == should_be_locked
+        result = execute_huey_task(huey, maybe_locked_task)
+
+    (event,) = events
+
+    assert event["transaction"] == "maybe_locked_task"
+    assert event["tags"]["huey_task_id"] == result.task.id
+    assert (
+        event["contexts"]["trace"]["status"] == "aborted" if should_be_locked else "ok"
+    )
+    assert len(huey) == 0
+
+
+def test_huey_enqueue(init_huey, capture_events):
+    huey = init_huey()
+
+    @huey.task(name="different_task_name")
+    def dummy_task():
+        pass
+
+    events = capture_events()
+
+    with start_transaction() as transaction:
+        dummy_task()
+
+    (event,) = events
+
+    assert event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+    assert event["contexts"]["trace"]["span_id"] == transaction.span_id
+
+    assert len(event["spans"])
+    assert event["spans"][0]["op"] == "queue.submit.huey"
+    assert event["spans"][0]["description"] == "different_task_name"
+
+
+def test_huey_propagate_trace(init_huey, capture_events):
+    huey = init_huey()
+
+    events = capture_events()
+
+    @huey.task()
+    def propagated_trace_task():
+        pass
+
+    with start_transaction() as outer_transaction:
+        execute_huey_task(huey, propagated_trace_task)
+
+    assert (
+        events[0]["transaction"] == "propagated_trace_task"
+    )  # the "inner" transaction
+    assert events[0]["contexts"]["trace"]["trace_id"] == outer_transaction.trace_id
diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py
index 73843cc6eb..92d0674c09 100644
--- a/tests/integrations/logging/test_logging.py
+++ b/tests/integrations/logging/test_logging.py
@@ -1,3 +1,4 @@
+# coding: utf-8
 import sys
 
 import pytest
@@ -115,6 +116,45 @@ def test_logging_level(sentry_init, capture_events):
     assert not events
 
 
+def test_custom_log_level_names(sentry_init, capture_events):
+    levels = {
+        logging.DEBUG: "debug",
+        logging.INFO: "info",
+        logging.WARN: "warning",
+        logging.WARNING: "warning",
+        logging.ERROR: "error",
+        logging.CRITICAL: "fatal",
+        logging.FATAL: "fatal",
+    }
+
+    # set custom log level names
+    # fmt: off
+    logging.addLevelName(logging.DEBUG, u"custom level debüg: ")
+    # fmt: on
+    logging.addLevelName(logging.INFO, "")
+    logging.addLevelName(logging.WARN, "custom level warn: ")
+    logging.addLevelName(logging.WARNING, "custom level warning: ")
+    logging.addLevelName(logging.ERROR, None)
+    logging.addLevelName(logging.CRITICAL, "custom level critical: ")
+    logging.addLevelName(logging.FATAL, "custom level 🔥: ")
+
+    for logging_level, sentry_level in levels.items():
+        logger.setLevel(logging_level)
+        sentry_init(
+            integrations=[LoggingIntegration(event_level=logging_level)],
+            default_integrations=False,
+        )
+        events = capture_events()
+
+        logger.log(logging_level, "Trying level %s", logging_level)
+        assert events
+        assert events[0]["level"] == sentry_level
+        assert events[0]["logentry"]["message"] == "Trying level %s"
+        assert events[0]["logentry"]["params"] == [logging_level]
+
+        del events[:]
+
+
 def test_logging_filters(sentry_init, capture_events):
     sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
     events = capture_events()
@@ -145,11 +185,11 @@ def test_logging_captured_warnings(sentry_init, capture_events, recwarn):
     events = capture_events()
 
     logging.captureWarnings(True)
-    warnings.warn("first")
-    warnings.warn("second")
+    warnings.warn("first", stacklevel=2)
+    warnings.warn("second", stacklevel=2)
     logging.captureWarnings(False)
 
-    warnings.warn("third")
+    warnings.warn("third", stacklevel=2)
 
     assert len(events) == 2
 
diff --git a/tests/integrations/loguru/__init__.py b/tests/integrations/loguru/__init__.py
new file mode 100644
index 0000000000..9d67fb3799
--- /dev/null
+++ b/tests/integrations/loguru/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("loguru")
diff --git a/tests/integrations/loguru/test_loguru.py b/tests/integrations/loguru/test_loguru.py
new file mode 100644
index 0000000000..48133aab85
--- /dev/null
+++ b/tests/integrations/loguru/test_loguru.py
@@ -0,0 +1,117 @@
+import pytest
+from loguru import logger
+
+import sentry_sdk
+from sentry_sdk.integrations.loguru import LoguruIntegration, LoggingLevels
+
+logger.remove(0)  # don't print to console
+
+
+@pytest.mark.parametrize(
+    "level,created_event",
+    [
+        # None - no breadcrumb
+        # False - no event
+        # True - event created
+        (LoggingLevels.TRACE, None),
+        (LoggingLevels.DEBUG, None),
+        (LoggingLevels.INFO, False),
+        (LoggingLevels.SUCCESS, False),
+        (LoggingLevels.WARNING, False),
+        (LoggingLevels.ERROR, True),
+        (LoggingLevels.CRITICAL, True),
+    ],
+)
+@pytest.mark.parametrize("disable_breadcrumbs", [True, False])
+@pytest.mark.parametrize("disable_events", [True, False])
+def test_just_log(
+    sentry_init,
+    capture_events,
+    level,
+    created_event,
+    disable_breadcrumbs,
+    disable_events,
+):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=None if disable_breadcrumbs else LoggingLevels.INFO.value,
+                event_level=None if disable_events else LoggingLevels.ERROR.value,
+            )
+        ],
+        default_integrations=False,
+    )
+    events = capture_events()
+
+    getattr(logger, level.name.lower())("test")
+
+    formatted_message = (
+        " | "
+        + "{:9}".format(level.name.upper())
+        + "| tests.integrations.loguru.test_loguru:test_just_log:46 - test"
+    )
+
+    if not created_event:
+        assert not events
+
+        breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+        if (
+            not disable_breadcrumbs and created_event is not None
+        ):  # not None == not TRACE or DEBUG level
+            (breadcrumb,) = breadcrumbs
+            assert breadcrumb["level"] == level.name.lower()
+            assert breadcrumb["category"] == "tests.integrations.loguru.test_loguru"
+            assert breadcrumb["message"][23:] == formatted_message
+        else:
+            assert not breadcrumbs
+
+        return
+
+    if disable_events:
+        assert not events
+        return
+
+    (event,) = events
+    assert event["level"] == (level.name.lower())
+    assert event["logger"] == "tests.integrations.loguru.test_loguru"
+    assert event["logentry"]["message"][23:] == formatted_message
+
+
+def test_breadcrumb_format(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=LoggingLevels.INFO.value,
+                event_level=None,
+                breadcrumb_format="{message}",
+            )
+        ],
+        default_integrations=False,
+    )
+
+    logger.info("test")
+    formatted_message = "test"
+
+    breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+    (breadcrumb,) = breadcrumbs
+    assert breadcrumb["message"] == formatted_message
+
+
+def test_event_format(sentry_init, capture_events):
+    sentry_init(
+        integrations=[
+            LoguruIntegration(
+                level=None,
+                event_level=LoggingLevels.ERROR.value,
+                event_format="{message}",
+            )
+        ],
+        default_integrations=False,
+    )
+    events = capture_events()
+
+    logger.error("test")
+    formatted_message = "test"
+
+    (event,) = events
+    assert event["logentry"]["message"] == formatted_message
diff --git a/tests/integrations/openai/__init__.py b/tests/integrations/openai/__init__.py
new file mode 100644
index 0000000000..d6cc3d5505
--- /dev/null
+++ b/tests/integrations/openai/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("openai")
diff --git a/tests/integrations/openai/test_openai.py b/tests/integrations/openai/test_openai.py
new file mode 100644
index 0000000000..074d859274
--- /dev/null
+++ b/tests/integrations/openai/test_openai.py
@@ -0,0 +1,231 @@
+import pytest
+from openai import OpenAI, Stream, OpenAIError
+from openai.types import CompletionUsage, CreateEmbeddingResponse, Embedding
+from openai.types.chat import ChatCompletion, ChatCompletionMessage, ChatCompletionChunk
+from openai.types.chat.chat_completion import Choice
+from openai.types.chat.chat_completion_chunk import ChoiceDelta, Choice as DeltaChoice
+from openai.types.create_embedding_response import Usage as EmbeddingTokenUsage
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.openai import (
+    OpenAIIntegration,
+    COMPLETION_TOKENS_USED,
+    PROMPT_TOKENS_USED,
+    TOTAL_TOKENS_USED,
+)
+
+from unittest import mock  # python 3.3 and above
+
+
+EXAMPLE_CHAT_COMPLETION = ChatCompletion(
+    id="chat-id",
+    choices=[
+        Choice(
+            index=0,
+            finish_reason="stop",
+            message=ChatCompletionMessage(
+                role="assistant", content="the model response"
+            ),
+        )
+    ],
+    created=10000000,
+    model="model-id",
+    object="chat.completion",
+    usage=CompletionUsage(
+        completion_tokens=10,
+        prompt_tokens=20,
+        total_tokens=30,
+    ),
+)
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, include_prompts",
+    [(True, True), (True, False), (False, True), (False, False)],
+)
+def test_nonstreaming_chat_completion(
+    sentry_init, capture_events, send_default_pii, include_prompts
+):
+    sentry_init(
+        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    client = OpenAI(api_key="z")
+    client.chat.completions._post = mock.Mock(return_value=EXAMPLE_CHAT_COMPLETION)
+
+    with start_transaction(name="openai tx"):
+        response = (
+            client.chat.completions.create(
+                model="some-model", messages=[{"role": "system", "content": "hello"}]
+            )
+            .choices[0]
+            .message.content
+        )
+
+    assert response == "the model response"
+    tx = events[0]
+    assert tx["type"] == "transaction"
+    span = tx["spans"][0]
+    assert span["op"] == "ai.chat_completions.create.openai"
+
+    if send_default_pii and include_prompts:
+        assert "hello" in span["data"]["ai.input_messages"][0]["content"]
+        assert "the model response" in span["data"]["ai.responses"][0]["content"]
+    else:
+        assert "ai.input_messages" not in span["data"]
+        assert "ai.responses" not in span["data"]
+
+    assert span["data"][COMPLETION_TOKENS_USED] == 10
+    assert span["data"][PROMPT_TOKENS_USED] == 20
+    assert span["data"][TOTAL_TOKENS_USED] == 30
+
+
+# noinspection PyTypeChecker
+@pytest.mark.parametrize(
+    "send_default_pii, include_prompts",
+    [(True, True), (True, False), (False, True), (False, False)],
+)
+def test_streaming_chat_completion(
+    sentry_init, capture_events, send_default_pii, include_prompts
+):
+    sentry_init(
+        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    client = OpenAI(api_key="z")
+    returned_stream = Stream(cast_to=None, response=None, client=client)
+    returned_stream._iterator = [
+        ChatCompletionChunk(
+            id="1",
+            choices=[
+                DeltaChoice(
+                    index=0, delta=ChoiceDelta(content="hel"), finish_reason=None
+                )
+            ],
+            created=100000,
+            model="model-id",
+            object="chat.completion.chunk",
+        ),
+        ChatCompletionChunk(
+            id="1",
+            choices=[
+                DeltaChoice(
+                    index=1, delta=ChoiceDelta(content="lo "), finish_reason=None
+                )
+            ],
+            created=100000,
+            model="model-id",
+            object="chat.completion.chunk",
+        ),
+        ChatCompletionChunk(
+            id="1",
+            choices=[
+                DeltaChoice(
+                    index=2, delta=ChoiceDelta(content="world"), finish_reason="stop"
+                )
+            ],
+            created=100000,
+            model="model-id",
+            object="chat.completion.chunk",
+        ),
+    ]
+
+    client.chat.completions._post = mock.Mock(return_value=returned_stream)
+    with start_transaction(name="openai tx"):
+        response_stream = client.chat.completions.create(
+            model="some-model", messages=[{"role": "system", "content": "hello"}]
+        )
+        response_string = "".join(
+            map(lambda x: x.choices[0].delta.content, response_stream)
+        )
+    assert response_string == "hello world"
+    tx = events[0]
+    assert tx["type"] == "transaction"
+    span = tx["spans"][0]
+    assert span["op"] == "ai.chat_completions.create.openai"
+
+    if send_default_pii and include_prompts:
+        assert "hello" in span["data"]["ai.input_messages"][0]["content"]
+        assert "hello world" in span["data"]["ai.responses"][0]
+    else:
+        assert "ai.input_messages" not in span["data"]
+        assert "ai.responses" not in span["data"]
+
+    try:
+        import tiktoken  # type: ignore # noqa # pylint: disable=unused-import
+
+        assert span["data"][COMPLETION_TOKENS_USED] == 2
+        assert span["data"][PROMPT_TOKENS_USED] == 1
+        assert span["data"][TOTAL_TOKENS_USED] == 3
+    except ImportError:
+        pass  # if tiktoken is not installed, we can't guarantee token usage will be calculated properly
+
+
+def test_bad_chat_completion(sentry_init, capture_events):
+    sentry_init(integrations=[OpenAIIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    client = OpenAI(api_key="z")
+    client.chat.completions._post = mock.Mock(
+        side_effect=OpenAIError("API rate limit reached")
+    )
+    with pytest.raises(OpenAIError):
+        client.chat.completions.create(
+            model="some-model", messages=[{"role": "system", "content": "hello"}]
+        )
+
+    (event,) = events
+    assert event["level"] == "error"
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, include_prompts",
+    [(True, True), (True, False), (False, True), (False, False)],
+)
+def test_embeddings_create(
+    sentry_init, capture_events, send_default_pii, include_prompts
+):
+    sentry_init(
+        integrations=[OpenAIIntegration(include_prompts=include_prompts)],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    client = OpenAI(api_key="z")
+
+    returned_embedding = CreateEmbeddingResponse(
+        data=[Embedding(object="embedding", index=0, embedding=[1.0, 2.0, 3.0])],
+        model="some-model",
+        object="list",
+        usage=EmbeddingTokenUsage(
+            prompt_tokens=20,
+            total_tokens=30,
+        ),
+    )
+
+    client.embeddings._post = mock.Mock(return_value=returned_embedding)
+    with start_transaction(name="openai tx"):
+        response = client.embeddings.create(
+            input="hello", model="text-embedding-3-large"
+        )
+
+    assert len(response.data[0].embedding) == 3
+
+    tx = events[0]
+    assert tx["type"] == "transaction"
+    span = tx["spans"][0]
+    assert span["op"] == "ai.embeddings.create.openai"
+    if send_default_pii and include_prompts:
+        assert "hello" in span["data"]["ai.input_messages"][0]
+    else:
+        assert "ai.input_messages" not in span["data"]
+
+    assert span["data"][PROMPT_TOKENS_USED] == 20
+    assert span["data"][TOTAL_TOKENS_USED] == 30
diff --git a/tests/integrations/opentelemetry/__init__.py b/tests/integrations/opentelemetry/__init__.py
new file mode 100644
index 0000000000..75763c2fee
--- /dev/null
+++ b/tests/integrations/opentelemetry/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("opentelemetry")
diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py
new file mode 100644
index 0000000000..77286330a5
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_experimental.py
@@ -0,0 +1,34 @@
+try:
+    # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    # python < 3.3
+    from mock import MagicMock
+
+from sentry_sdk.integrations.opentelemetry.integration import OpenTelemetryIntegration
+
+
+def test_integration_enabled_if_option_is_on(sentry_init):
+    OpenTelemetryIntegration.setup_once = MagicMock()
+    sentry_init(
+        _experiments={
+            "otel_powered_performance": True,
+        }
+    )
+    OpenTelemetryIntegration.setup_once.assert_called_once()
+
+
+def test_integration_not_enabled_if_option_is_off(sentry_init):
+    OpenTelemetryIntegration.setup_once = MagicMock()
+    sentry_init(
+        _experiments={
+            "otel_powered_performance": False,
+        }
+    )
+    OpenTelemetryIntegration.setup_once.assert_not_called()
+
+
+def test_integration_not_enabled_if_option_is_missing(sentry_init):
+    OpenTelemetryIntegration.setup_once = MagicMock()
+    sentry_init()
+    OpenTelemetryIntegration.setup_once.assert_not_called()
diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py
new file mode 100644
index 0000000000..510118f67f
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_propagator.py
@@ -0,0 +1,252 @@
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock  # python < 3.3
+    from mock import MagicMock
+
+from opentelemetry.context import get_current
+from opentelemetry.trace.propagation import get_current_span
+from opentelemetry.trace import (
+    set_span_in_context,
+    TraceFlags,
+    SpanContext,
+)
+from sentry_sdk.integrations.opentelemetry.consts import (
+    SENTRY_BAGGAGE_KEY,
+    SENTRY_TRACE_KEY,
+)
+
+from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
+from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_extract_no_context_no_sentry_trace_header():
+    """
+    No context and NO Sentry trace data in getter.
+    Extract should return empty context.
+    """
+    carrier = None
+    context = None
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == {}
+
+
+def test_extract_context_no_sentry_trace_header():
+    """
+    Context but NO Sentry trace data in getter.
+    Extract should return context as is.
+    """
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.return_value = None
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert modified_context == context
+
+
+def test_extract_empty_context_sentry_trace_header_no_baggage():
+    """
+    Empty context but Sentry trace data but NO Baggage in getter.
+    Extract should return context that has empty baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    carrier = None
+    context = {}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        None,
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 3
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == ""
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_extract_context_sentry_trace_header_baggage():
+    """
+    Empty context but Sentry trace data and Baggage in getter.
+    Extract should return context that has baggage in it and also a NoopSpan with span_id and trace_id.
+    """
+    baggage_header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    carrier = None
+    context = {"some": "value"}
+    getter = MagicMock()
+    getter.get.side_effect = [
+        ["1234567890abcdef1234567890abcdef-1234567890abcdef-1"],
+        [baggage_header],
+    ]
+
+    modified_context = SentryPropagator().extract(carrier, context, getter)
+
+    assert len(modified_context.keys()) == 4
+
+    assert modified_context[SENTRY_TRACE_KEY] == {
+        "trace_id": "1234567890abcdef1234567890abcdef",
+        "parent_span_id": "1234567890abcdef",
+        "parent_sampled": True,
+    }
+
+    assert modified_context[SENTRY_BAGGAGE_KEY].serialize() == (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    span_context = get_current_span(modified_context).get_span_context()
+    assert span_context.span_id == int("1234567890abcdef", 16)
+    assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+
+
+def test_inject_empty_otel_span_map():
+    """
+    Empty otel_span_map.
+    So there is no sentry_span to be found in inject()
+    and the function is returned early and no setters are called.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_not_called()
+
+
+def test_inject_sentry_span_no_baggage():
+    """
+    Inject a sentry span with no baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_span.containing_transaction.get_baggage = mock.Mock(return_value=None)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_called_once_with(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+
+def test_inject_sentry_span_baggage():
+    """
+    Inject a sentry span with baggage.
+    """
+    carrier = None
+    context = get_current()
+    setter = MagicMock()
+    setter.set = MagicMock()
+
+    trace_id = "1234567890abcdef1234567890abcdef"
+    span_id = "1234567890abcdef"
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        trace_flags=TraceFlags(TraceFlags.SAMPLED),
+        is_remote=True,
+    )
+    span = MagicMock()
+    span.get_span_context.return_value = span_context
+
+    sentry_span = MagicMock()
+    sentry_span.to_traceparent = mock.Mock(
+        return_value="1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+    )
+    sentry_items = {
+        "sentry-trace_id": "771a43a4192642f0b136d5159a501700",
+        "sentry-public_key": "49d0f7386ad645858ae85020e393bef3",
+        "sentry-sample_rate": 0.01337,
+        "sentry-user_id": "Amélie",
+    }
+    baggage = Baggage(sentry_items=sentry_items)
+    sentry_span.containing_transaction.get_baggage = MagicMock(return_value=baggage)
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map[span_id] = sentry_span
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.propagator.trace.get_current_span",
+        return_value=span,
+    ):
+        full_context = set_span_in_context(span, context)
+        SentryPropagator().inject(carrier, full_context, setter)
+
+        setter.set.assert_any_call(
+            carrier,
+            "sentry-trace",
+            "1234567890abcdef1234567890abcdef-1234567890abcdef-1",
+        )
+
+        setter.set.assert_any_call(
+            carrier,
+            "baggage",
+            baggage.serialize(),
+        )
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
new file mode 100644
index 0000000000..02e3059ca8
--- /dev/null
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -0,0 +1,625 @@
+from datetime import datetime
+from datetime import timezone
+import time
+import pytest
+
+try:
+    from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
+except ImportError:
+    import mock
+    from mock import MagicMock  # python < 3.3
+
+from sentry_sdk.integrations.opentelemetry.span_processor import (
+    SentrySpanProcessor,
+    link_trace_context_to_error_event,
+)
+from sentry_sdk.tracing import Span, Transaction
+
+from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
+
+
+def test_is_sentry_span():
+    otel_span = MagicMock()
+
+    hub = MagicMock()
+    hub.client = None
+
+    span_processor = SentrySpanProcessor()
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    client = MagicMock()
+    client.options = {"instrumenter": "otel"}
+    client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    hub.client = client
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://example.com",
+    }
+    assert not span_processor._is_sentry_span(hub, otel_span)
+
+    otel_span.attributes = {
+        "http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
+    }
+    assert span_processor._is_sentry_span(hub, otel_span)
+
+
+def test_get_otel_context():
+    otel_span = MagicMock()
+    otel_span.attributes = {"foo": "bar"}
+    otel_span.resource = MagicMock()
+    otel_span.resource.attributes = {"baz": "qux"}
+
+    span_processor = SentrySpanProcessor()
+    otel_context = span_processor._get_otel_context(otel_span)
+
+    assert otel_context == {
+        "attributes": {"foo": "bar"},
+        "resource": {"baz": "qux"},
+    }
+
+
+def test_get_trace_data_with_span_and_trace():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = None
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] is None
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_span_and_trace_and_parent():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    span_processor = SentrySpanProcessor()
+    sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+    assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+    assert sentry_trace_data["span_id"] == "1234567890abcdef"
+    assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+    assert sentry_trace_data["parent_sampled"] is None
+    assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is True
+        assert sentry_trace_data["baggage"] is None
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-0"
+            ),
+            None,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"] is False
+        assert sentry_trace_data["baggage"] is None
+
+
+def test_get_trace_data_with_sentry_trace_and_baggage():
+    otel_span = MagicMock()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    baggage = (
+        "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+        "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie"
+    )
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_value",
+        side_effect=[
+            extract_sentrytrace_data(
+                "1234567890abcdef1234567890abcdef-1234567890abcdef-1"
+            ),
+            baggage,
+        ],
+    ):
+        span_processor = SentrySpanProcessor()
+        sentry_trace_data = span_processor._get_trace_data(otel_span, parent_context)
+        assert sentry_trace_data["trace_id"] == "1234567890abcdef1234567890abcdef"
+        assert sentry_trace_data["span_id"] == "1234567890abcdef"
+        assert sentry_trace_data["parent_span_id"] == "abcdef1234567890"
+        assert sentry_trace_data["parent_sampled"]
+        assert sentry_trace_data["baggage"] == baggage
+
+
+def test_update_span_with_otel_data_http_method():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.CLIENT
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "net.peer.name": "example.com",
+        "http.target": "/",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.client"
+    assert sentry_span.description == "GET example.com /"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.response.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert sentry_span._data["net.peer.name"] == "example.com"
+    assert sentry_span._data["http.target"] == "/"
+
+
+@pytest.mark.parametrize(
+    "otel_status, expected_status",
+    [
+        pytest.param(Status(StatusCode.UNSET), None, id="unset"),
+        pytest.param(Status(StatusCode.OK), "ok", id="ok"),
+        pytest.param(Status(StatusCode.ERROR), "internal_error", id="error"),
+    ],
+)
+def test_update_span_with_otel_status(otel_status, expected_status):
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.INTERNAL
+    otel_span.status = otel_status
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_status(sentry_span, otel_span)
+
+    assert sentry_span.get_trace_context().get("status") == expected_status
+
+
+def test_update_span_with_otel_data_http_method2():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.kind = SpanKind.SERVER
+    otel_span.attributes = {
+        "http.method": "GET",
+        "http.status_code": 429,
+        "http.status_text": "xxx",
+        "http.user_agent": "curl/7.64.1",
+        "http.url": "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "http.server"
+    assert sentry_span.description == "GET https://example.com/status/403"
+    assert sentry_span.status == "resource_exhausted"
+
+    assert sentry_span._data["http.method"] == "GET"
+    assert sentry_span._data["http.response.status_code"] == 429
+    assert sentry_span._data["http.status_text"] == "xxx"
+    assert sentry_span._data["http.user_agent"] == "curl/7.64.1"
+    assert (
+        sentry_span._data["http.url"]
+        == "https://example.com/status/403?password=123&username=test@example.com&author=User123&auth=1234567890abcdef"
+    )
+
+
+def test_update_span_with_otel_data_db_query():
+    sentry_span = Span()
+
+    otel_span = MagicMock()
+    otel_span.name = "Test OTel Span"
+    otel_span.attributes = {
+        "db.system": "postgresql",
+        "db.statement": "SELECT * FROM table where pwd = '123456'",
+    }
+
+    span_processor = SentrySpanProcessor()
+    span_processor._update_span_with_otel_data(sentry_span, otel_span)
+
+    assert sentry_span.op == "db"
+    assert sentry_span.description == "SELECT * FROM table where pwd = '123456'"
+
+    assert sentry_span._data["db.system"] == "postgresql"
+    assert (
+        sentry_span._data["db.statement"] == "SELECT * FROM table where pwd = '123456'"
+    )
+
+
+def test_on_start_transaction():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        span_processor = SentrySpanProcessor()
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_hub.current.start_transaction.assert_called_once_with(
+            name="Sample OTel Span",
+            span_id="1234567890abcdef",
+            parent_span_id="abcdef1234567890",
+            trace_id="1234567890abcdef1234567890abcdef",
+            baggage=None,
+            start_timestamp=datetime.fromtimestamp(
+                otel_span.start_time / 1e9, timezone.utc
+            ),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 1
+        assert list(span_processor.otel_span_map.keys())[0] == "1234567890abcdef"
+
+
+def test_on_start_child():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        fake_span = MagicMock()
+
+        span_processor = SentrySpanProcessor()
+        span_processor.otel_span_map["abcdef1234567890"] = fake_span
+        span_processor.on_start(otel_span, parent_context)
+
+        fake_span.start_child.assert_called_once_with(
+            span_id="1234567890abcdef",
+            description="Sample OTel Span",
+            start_timestamp=datetime.fromtimestamp(
+                otel_span.start_time / 1e9, timezone.utc
+            ),
+            instrumenter="otel",
+        )
+
+        assert len(span_processor.otel_span_map.keys()) == 2
+        assert "abcdef1234567890" in span_processor.otel_span_map.keys()
+        assert "1234567890abcdef" in span_processor.otel_span_map.keys()
+
+
+def test_on_end_no_sentry_span():
+    """
+    If on_end is called on a span that is not in the otel_span_map, it should be a no-op.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    span_processor = SentrySpanProcessor()
+    span_processor.otel_span_map = {}
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+
+    span_processor.on_end(otel_span)
+
+    span_processor._get_otel_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_not_called()
+
+
+def test_on_end_sentry_transaction():
+    """
+    Test on_end for a sentry Transaction.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.status = Status(StatusCode.OK)
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    fake_sentry_span = MagicMock(spec=Transaction)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_called_once()
+    span_processor._update_span_with_otel_data.assert_not_called()
+    fake_sentry_span.set_status.assert_called_once_with("ok")
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_on_end_sentry_span():
+    """
+    Test on_end for a sentry Span.
+    """
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.end_time = time.time_ns()
+    otel_span.status = Status(StatusCode.OK)
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+
+    fake_sentry_span = MagicMock(spec=Span)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+    span_processor.otel_span_map["1234567890abcdef"] = fake_sentry_span
+
+    span_processor.on_end(otel_span)
+
+    fake_sentry_span.set_context.assert_not_called()
+    span_processor._update_span_with_otel_data.assert_called_once_with(
+        fake_sentry_span, otel_span
+    )
+    fake_sentry_span.set_status.assert_called_once_with("ok")
+    fake_sentry_span.finish.assert_called_once()
+
+
+def test_link_trace_context_to_error_event():
+    """
+    Test that the trace context is added to the error event.
+    """
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    span_id = "1234567890abcdef"
+    trace_id = "1234567890abcdef1234567890abcdef"
+
+    fake_trace_context = {
+        "bla": "blub",
+        "foo": "bar",
+        "baz": 123,
+    }
+
+    sentry_span = MagicMock()
+    sentry_span.get_trace_context = MagicMock(return_value=fake_trace_context)
+
+    otel_span_map = {
+        span_id: sentry_span,
+    }
+
+    span_context = SpanContext(
+        trace_id=int(trace_id, 16),
+        span_id=int(span_id, 16),
+        is_remote=True,
+    )
+    otel_span = MagicMock()
+    otel_span.get_span_context = MagicMock(return_value=span_context)
+
+    fake_event = {"event_id": "1234567890abcdef1234567890abcdef"}
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.get_current_span",
+        return_value=otel_span,
+    ):
+        event = link_trace_context_to_error_event(fake_event, otel_span_map)
+
+        assert event
+        assert event == fake_event  # the event is changed in place inside the function
+        assert "contexts" in event
+        assert "trace" in event["contexts"]
+        assert event["contexts"]["trace"] == fake_trace_context
+
+
+def test_pruning_old_spans_on_start():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    parent_context = {}
+    fake_client = MagicMock()
+    fake_client.options = {"instrumenter": "otel"}
+    fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+
+    current_hub = MagicMock()
+    current_hub.client = fake_client
+
+    fake_hub = MagicMock()
+    fake_hub.current = current_hub
+
+    with mock.patch(
+        "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+    ):
+        span_processor = SentrySpanProcessor()
+
+        span_processor.otel_span_map = {
+            "111111111abcdef": MagicMock(),  # should stay
+            "2222222222abcdef": MagicMock(),  # should go
+            "3333333333abcdef": MagicMock(),  # should go
+        }
+        current_time_minutes = int(time.time() / 60)
+        span_processor.open_spans = {
+            current_time_minutes - 3: {"111111111abcdef"},  # should stay
+            current_time_minutes
+            - 11: {"2222222222abcdef", "3333333333abcdef"},  # should go
+        }
+
+        span_processor.on_start(otel_span, parent_context)
+        assert sorted(list(span_processor.otel_span_map.keys())) == [
+            "111111111abcdef",
+            "1234567890abcdef",
+        ]
+        assert sorted(list(span_processor.open_spans.values())) == [
+            {"111111111abcdef"},
+            {"1234567890abcdef"},
+        ]
+
+
+def test_pruning_old_spans_on_end():
+    otel_span = MagicMock()
+    otel_span.name = "Sample OTel Span"
+    otel_span.start_time = time.time_ns()
+    span_context = SpanContext(
+        trace_id=int("1234567890abcdef1234567890abcdef", 16),
+        span_id=int("1234567890abcdef", 16),
+        is_remote=True,
+    )
+    otel_span.get_span_context.return_value = span_context
+    otel_span.parent = MagicMock()
+    otel_span.parent.span_id = int("abcdef1234567890", 16)
+
+    fake_sentry_span = MagicMock(spec=Span)
+    fake_sentry_span.set_context = MagicMock()
+    fake_sentry_span.finish = MagicMock()
+
+    span_processor = SentrySpanProcessor()
+    span_processor._get_otel_context = MagicMock()
+    span_processor._update_span_with_otel_data = MagicMock()
+
+    span_processor.otel_span_map = {
+        "111111111abcdef": MagicMock(),  # should stay
+        "2222222222abcdef": MagicMock(),  # should go
+        "3333333333abcdef": MagicMock(),  # should go
+        "1234567890abcdef": fake_sentry_span,  # should go (because it is closed)
+    }
+    current_time_minutes = int(time.time() / 60)
+    span_processor.open_spans = {
+        current_time_minutes: {"1234567890abcdef"},  # should go (because it is closed)
+        current_time_minutes - 3: {"111111111abcdef"},  # should stay
+        current_time_minutes
+        - 11: {"2222222222abcdef", "3333333333abcdef"},  # should go
+    }
+
+    span_processor.on_end(otel_span)
+    assert sorted(list(span_processor.otel_span_map.keys())) == ["111111111abcdef"]
+    assert sorted(list(span_processor.open_spans.values())) == [{"111111111abcdef"}]
diff --git a/tests/integrations/pure_eval/__init__.py b/tests/integrations/pure_eval/__init__.py
index 3f645e75f6..47ad99aa8d 100644
--- a/tests/integrations/pure_eval/__init__.py
+++ b/tests/integrations/pure_eval/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-pure_eval = pytest.importorskip("pure_eval")
+pytest.importorskip("pure_eval")
diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py
index e7da025144..2d1a92026e 100644
--- a/tests/integrations/pure_eval/test_pure_eval.py
+++ b/tests/integrations/pure_eval/test_pure_eval.py
@@ -8,8 +8,8 @@
 
 
 @pytest.mark.parametrize("integrations", [[], [PureEvalIntegration()]])
-def test_with_locals_enabled(sentry_init, capture_events, integrations):
-    sentry_init(with_locals=True, integrations=integrations)
+def test_include_local_variables_enabled(sentry_init, capture_events, integrations):
+    sentry_init(include_local_variables=True, integrations=integrations)
     events = capture_events()
 
     def foo():
diff --git a/tests/integrations/pymongo/__init__.py b/tests/integrations/pymongo/__init__.py
new file mode 100644
index 0000000000..91223b0630
--- /dev/null
+++ b/tests/integrations/pymongo/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("pymongo")
diff --git a/tests/integrations/pymongo/test_pymongo.py b/tests/integrations/pymongo/test_pymongo.py
new file mode 100644
index 0000000000..89701c9f3a
--- /dev/null
+++ b/tests/integrations/pymongo/test_pymongo.py
@@ -0,0 +1,424 @@
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.pymongo import PyMongoIntegration, _strip_pii
+
+from mockupdb import MockupDB, OpQuery
+from pymongo import MongoClient
+import pytest
+
+
+@pytest.fixture(scope="session")
+def mongo_server():
+    server = MockupDB(verbose=True)
+    server.autoresponds("ismaster", maxWireVersion=6)
+    server.run()
+    server.autoresponds(
+        {"find": "test_collection"}, cursor={"id": 123, "firstBatch": []}
+    )
+    # Find query changed somewhere between PyMongo 3.1 and 3.12.
+    # This line is to respond to "find" queries sent by old PyMongo the same way it's done above.
+    server.autoresponds(OpQuery({"foobar": 1}), cursor={"id": 123, "firstBatch": []})
+    server.autoresponds({"insert": "test_collection"}, ok=1)
+    server.autoresponds({"insert": "erroneous"}, ok=0, errmsg="test error")
+    yield server
+    server.stop()
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_transactions(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    with start_transaction():
+        list(
+            connection["test_db"]["test_collection"].find({"foobar": 1})
+        )  # force query execution
+        connection["test_db"]["test_collection"].insert_one({"foo": 2})
+        try:
+            connection["test_db"]["erroneous"].insert_many([{"bar": 3}, {"baz": 4}])
+            pytest.fail("Request should raise")
+        except Exception:
+            pass
+
+    (event,) = events
+    (find, insert_success, insert_fail) = event["spans"]
+
+    common_tags = {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+    for span in find, insert_success, insert_fail:
+        assert span["data"][SPANDATA.DB_SYSTEM] == "mongodb"
+        assert span["data"][SPANDATA.DB_NAME] == "test_db"
+        assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
+        assert span["data"][SPANDATA.SERVER_PORT] == mongo_server.port
+        for field, value in common_tags.items():
+            assert span["tags"][field] == value
+
+    assert find["op"] == "db.query"
+    assert insert_success["op"] == "db.query"
+    assert insert_fail["op"] == "db.query"
+
+    assert find["tags"]["db.operation"] == "find"
+    assert insert_success["tags"]["db.operation"] == "insert"
+    assert insert_fail["tags"]["db.operation"] == "insert"
+
+    assert find["description"].startswith("find {")
+    assert insert_success["description"].startswith("insert {")
+    assert insert_fail["description"].startswith("insert {")
+    if with_pii:
+        assert "1" in find["description"]
+        assert "2" in insert_success["description"]
+        assert "3" in insert_fail["description"] and "4" in insert_fail["description"]
+    else:
+        # All values in filter replaced by "%s"
+        assert "1" not in find["description"]
+        # All keys below top level replaced by "%s"
+        assert "2" not in insert_success["description"]
+        assert (
+            "3" not in insert_fail["description"]
+            and "4" not in insert_fail["description"]
+        )
+
+    assert find["tags"]["status"] == "ok"
+    assert insert_success["tags"]["status"] == "ok"
+    assert insert_fail["tags"]["status"] == "internal_error"
+
+
+@pytest.mark.parametrize("with_pii", [False, True])
+def test_breadcrumbs(sentry_init, capture_events, mongo_server, with_pii):
+    sentry_init(
+        integrations=[PyMongoIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=with_pii,
+    )
+    events = capture_events()
+
+    connection = MongoClient(mongo_server.uri)
+
+    list(
+        connection["test_db"]["test_collection"].find({"foobar": 1})
+    )  # force query execution
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb["category"] == "query"
+    assert crumb["message"].startswith("find {")
+    if with_pii:
+        assert "1" in crumb["message"]
+    else:
+        assert "1" not in crumb["message"]
+    assert crumb["type"] == "db.query"
+    assert crumb["data"] == {
+        "db.name": "test_db",
+        "db.system": "mongodb",
+        "db.operation": "find",
+        "net.peer.name": mongo_server.host,
+        "net.peer.port": str(mongo_server.port),
+    }
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "anton2",
+                        "email": "anton@somewhere.io",
+                        "password": "c4e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf2",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "indiana4",
+                        "email": "indy@jones.org",
+                        "password": "63e86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf016b",
+                        "_id": "635bc7403cb4f8a736f61cf3",
+                    }
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"}
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"username": "notthere"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+            "command_stripped": {
+                "find": "my_collection",
+                "filter": {"username": "%s"},
+                "limit": 1,
+                "singleBatch": True,
+            },
+        },
+        {
+            "command": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {
+                        "username": "userx1",
+                        "email": "x@somewhere.io",
+                        "password": "ccc86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf4",
+                    },
+                    {
+                        "username": "userx2",
+                        "email": "x@somewhere.io",
+                        "password": "xxx86722fb56d946f7ddeecdae47e1c4458bf98a0a3ee5d5113111adf7bf0175",
+                        "_id": "635bc7403cb4f8a736f61cf5",
+                    },
+                ],
+            },
+            "command_stripped": {
+                "insert": "my_collection",
+                "ordered": True,
+                "documents": [
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                    {"username": "%s", "email": "%s", "password": "%s", "_id": "%s"},
+                ],
+            },
+        },
+        {
+            "command": {
+                "find": "my_collection",
+                "filter": {"email": "ada@lovelace.com"},
+            },
+            "command_stripped": {"find": "my_collection", "filter": {"email": "%s"}},
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [{"$match": {}}, {"$group": {"_id": 1, "n": {"$sum": 1}}}],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "x@somewhere.io"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": {},
+            },
+            "command_stripped": {
+                "aggregate": "my_collection",
+                "pipeline": [
+                    {"$match": {"email": "%s"}},
+                    {"$group": {"_id": 1, "n": {"$sum": 1}}},
+                ],
+                "cursor": "%s",
+            },
+        },
+        {
+            "command": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+            "command_stripped": {
+                "createIndexes": "my_collection",
+                "indexes": [{"name": "username_1", "key": [("username", 1)]}],
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton@somewhere.io"}),
+                    (
+                        "u",
+                        {
+                            "email": "anton2@somwehre.io",
+                            "extra_field": "extra_content",
+                            "new": "bla",
+                        },
+                    ),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "anton2@somwehre.io"}),
+                    ("u", {"$rename": {"new": "new_field"}}),
+                    ("multi", False),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": [
+                    ("q", {"email": "x@somewhere.io"}),
+                    ("u", {"$rename": {"password": "pwd"}}),
+                    ("multi", True),
+                    ("upsert", False),
+                ],
+            },
+            "command_stripped": {
+                "update": "my_collection",
+                "ordered": True,
+                "updates": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"username": "userx2"}), ("limit", 1)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": [("q", {"email": "xplus@somewhere.io"}), ("limit", 0)],
+            },
+            "command_stripped": {
+                "delete": "my_collection",
+                "ordered": True,
+                "deletes": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "ada@lovelace.com"},
+                "new": False,
+                "remove": True,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "remove": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton2@somewhere.io"},
+                "new": False,
+                "update": {"email": "anton3@somwehre.io", "extra_field": "xxx"},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"email": "%s", "extra_field": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "findAndModify": "my_collection",
+                "query": {"email": "anton3@somewhere.io"},
+                "new": False,
+                "update": {"$rename": {"extra_field": "extra_field2"}},
+                "upsert": False,
+            },
+            "command_stripped": {
+                "findAndModify": "my_collection",
+                "query": {"email": "%s"},
+                "new": "%s",
+                "update": {"$rename": "%s"},
+                "upsert": "%s",
+            },
+        },
+        {
+            "command": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+            "command_stripped": {
+                "renameCollection": "test.my_collection",
+                "to": "test.new_collection",
+            },
+        },
+        {
+            "command": {"drop": "new_collection"},
+            "command_stripped": {"drop": "new_collection"},
+        },
+    ],
+)
+def test_strip_pii(testcase):
+    assert _strip_pii(testcase["command"]) == testcase["command_stripped"]
diff --git a/tests/integrations/pyramid/__init__.py b/tests/integrations/pyramid/__init__.py
index b63de1d1d3..a77a4d54ca 100644
--- a/tests/integrations/pyramid/__init__.py
+++ b/tests/integrations/pyramid/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-pyramid = pytest.importorskip("pyramid")
+pytest.importorskip("pyramid")
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index 9c6fd51222..6237174604 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -1,24 +1,31 @@
 import json
 import logging
-import pkg_resources
-import pytest
-
 from io import BytesIO
 
 import pyramid.testing
-
+import pytest
 from pyramid.authorization import ACLAuthorizationPolicy
 from pyramid.response import Response
+from werkzeug.test import Client
 
 from sentry_sdk import capture_message, add_breadcrumb
 from sentry_sdk.integrations.pyramid import PyramidIntegration
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH
+from tests.conftest import unpack_werkzeug_response
 
-from werkzeug.test import Client
 
+try:
+    from importlib.metadata import version
 
-PYRAMID_VERSION = tuple(
-    map(int, pkg_resources.get_distribution("pyramid").version.split("."))
-)
+    PYRAMID_VERSION = tuple(map(int, version("pyramid").split(".")))
+
+except ImportError:
+    # < py3.8
+    import pkg_resources
+
+    PYRAMID_VERSION = tuple(
+        map(int, pkg_resources.get_distribution("pyramid").version.split("."))
+    )
 
 
 def hi(request):
@@ -26,12 +33,19 @@ def hi(request):
     return Response("hi")
 
 
+def hi_with_id(request):
+    capture_message("hi with id")
+    return Response("hi with id")
+
+
 @pytest.fixture
 def pyramid_config():
     config = pyramid.testing.setUp()
     try:
         config.add_route("hi", "/message")
         config.add_view(hi, route_name="hi")
+        config.add_route("hi_with_id", "/message/{message_id}")
+        config.add_view(hi_with_id, route_name="hi_with_id")
         yield config
     finally:
         pyramid.testing.tearDown()
@@ -82,20 +96,23 @@ def errors(request):
     (event,) = events
     (breadcrumb,) = event["breadcrumbs"]["values"]
     assert breadcrumb["message"] == "hi2"
-    assert event["exception"]["values"][0]["mechanism"]["type"] == "pyramid"
+    # Checking only the last value in the exceptions list,
+    # because Pyramid >= 1.9 returns a chained exception and before just a single exception
+    assert event["exception"]["values"][-1]["mechanism"]["type"] == "pyramid"
+    assert event["exception"]["values"][-1]["type"] == "ZeroDivisionError"
 
 
 def test_has_context(route, get_client, sentry_init, capture_events):
     sentry_init(integrations=[PyramidIntegration()])
     events = capture_events()
 
-    @route("/message/{msg}")
+    @route("/context_message/{msg}")
     def hi2(request):
         capture_message(request.matchdict["msg"])
         return Response("hi")
 
     client = get_client()
-    client.get("/message/yoo")
+    client.get("/context_message/yoo")
 
     (event,) = events
     assert event["message"] == "yoo"
@@ -104,26 +121,38 @@ def hi2(request):
         "headers": {"Host": "localhost"},
         "method": "GET",
         "query_string": "",
-        "url": "http://localhost/message/yoo",
+        "url": "http://localhost/context_message/yoo",
     }
     assert event["transaction"] == "hi2"
 
 
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction",
-    [("route_name", "hi"), ("route_pattern", "/message")],
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "route_name", "hi", "component"),
+        ("/message", "route_pattern", "/message", "route"),
+        ("/message/123456", "route_name", "hi_with_id", "component"),
+        ("/message/123456", "route_pattern", "/message/{message_id}", "route"),
+    ],
 )
 def test_transaction_style(
-    sentry_init, get_client, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    get_client,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(integrations=[PyramidIntegration(transaction_style=transaction_style)])
 
     events = capture_events()
     client = get_client()
-    client.get("/message")
+    client.get(url)
 
     (event,) = events
     assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
 
 
 def test_large_json_request(sentry_init, capture_events, route, get_client):
@@ -146,9 +175,9 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"]["bar"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]["bar"]) == 512
+    assert len(event["request"]["data"]["foo"]["bar"]) == 1024
 
 
 @pytest.mark.parametrize("data", [{}, []], ids=["empty-dict", "empty-list"])
@@ -173,8 +202,33 @@ def index(request):
     assert event["request"]["data"] == data
 
 
+def test_json_not_truncated_if_max_request_body_size_is_always(
+    sentry_init, capture_events, route, get_client
+):
+    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
+
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    @route("/")
+    def index(request):
+        assert request.json == data
+        assert request.text == json.dumps(data)
+        capture_message("hi")
+        return Response("ok")
+
+    events = capture_events()
+
+    client = get_client()
+    client.post("/", content_type="application/json", data=json.dumps(data))
+
+    (event,) = events
+    assert event["request"]["data"] == data
+
+
 def test_files_and_form(sentry_init, capture_events, route, get_client):
-    sentry_init(integrations=[PyramidIntegration()], request_bodies="always")
+    sentry_init(integrations=[PyramidIntegration()], max_request_body_size="always")
 
     data = {"foo": "a" * 2000, "file": (BytesIO(b"hello"), "hello.txt")}
 
@@ -190,13 +244,11 @@ def index(request):
 
     (event,) = events
     assert event["_meta"]["request"]["data"]["foo"] == {
-        "": {"len": 2000, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 2000, "rem": [["!limit", "x", 1021, 1024]]}
     }
-    assert len(event["request"]["data"]["foo"]) == 512
+    assert len(event["request"]["data"]["foo"]) == 1024
 
-    assert event["_meta"]["request"]["data"]["file"] == {
-        "": {"len": 0, "rem": [["!raw", "x", 0, 0]]}
-    }
+    assert event["_meta"]["request"]["data"]["file"] == {"": {"rem": [["!raw", "x"]]}}
     assert not event["request"]["data"]["file"]
 
 
@@ -264,8 +316,8 @@ def errorhandler(exc, request):
     pyramid_config.add_view(errorhandler, context=Exception)
 
     client = get_client()
-    app_iter, status, headers = client.get("/")
-    assert b"".join(app_iter) == b"bad request"
+    app_iter, status, headers = unpack_werkzeug_response(client.get("/"))
+    assert app_iter == b"bad request"
     assert status.lower() == "500 internal server error"
 
     (error,) = errors
diff --git a/tests/integrations/quart/__init__.py b/tests/integrations/quart/__init__.py
index ea02dfb3a6..2bf976c50d 100644
--- a/tests/integrations/quart/__init__.py
+++ b/tests/integrations/quart/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-quart = pytest.importorskip("quart")
+pytest.importorskip("quart")
diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py
index d827b3c4aa..0f693088c9 100644
--- a/tests/integrations/quart/test_quart.py
+++ b/tests/integrations/quart/test_quart.py
@@ -1,11 +1,8 @@
-import pytest
-
-quart = pytest.importorskip("quart")
-
-from quart import Quart, Response, abort, stream_with_context
-from quart.views import View
+import json
+import threading
 
-from quart_auth import AuthManager, AuthUser, login_user
+import pytest
+import pytest_asyncio
 
 from sentry_sdk import (
     set_tag,
@@ -17,15 +14,26 @@
 from sentry_sdk.integrations.logging import LoggingIntegration
 import sentry_sdk.integrations.quart as quart_sentry
 
+from quart import Quart, Response, abort, stream_with_context
+from quart.views import View
+
+from quart_auth import AuthUser, login_user
 
-auth_manager = AuthManager()
+try:
+    from quart_auth import QuartAuth
 
+    auth_manager = QuartAuth()
+except ImportError:
+    from quart_auth import AuthManager
 
-@pytest.fixture
+    auth_manager = AuthManager()
+
+
+@pytest_asyncio.fixture
 async def app():
     app = Quart(__name__)
-    app.debug = True
-    app.config["TESTING"] = True
+    app.debug = False
+    app.config["TESTING"] = False
     app.secret_key = "haha"
 
     auth_manager.init_app(app)
@@ -35,6 +43,25 @@ async def hi():
         capture_message("hi")
         return "ok"
 
+    @app.route("/message/")
+    async def hi_with_id(message_id):
+        capture_message("hi with id")
+        return "ok with id"
+
+    @app.get("/sync/thread_ids")
+    def _thread_ids_sync():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
+    @app.get("/async/thread_ids")
+    async def _thread_ids_async():
+        return {
+            "main": str(threading.main_thread().ident),
+            "active": str(threading.current_thread().ident),
+        }
+
     return app
 
 
@@ -63,10 +90,22 @@ async def test_has_context(sentry_init, app, capture_events):
 
 @pytest.mark.asyncio
 @pytest.mark.parametrize(
-    "transaction_style,expected_transaction", [("endpoint", "hi"), ("url", "/message")]
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        ("/message", "endpoint", "hi", "component"),
+        ("/message", "url", "/message", "route"),
+        ("/message/123456", "endpoint", "hi_with_id", "component"),
+        ("/message/123456", "url", "/message/", "route"),
+    ],
 )
 async def test_transaction_style(
-    sentry_init, app, capture_events, transaction_style, expected_transaction
+    sentry_init,
+    app,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
 ):
     sentry_init(
         integrations=[
@@ -76,7 +115,7 @@ async def test_transaction_style(
     events = capture_events()
 
     client = app.test_client()
-    response = await client.get("/message")
+    response = await client.get(url)
     assert response.status_code == 200
 
     (event,) = events
@@ -84,22 +123,15 @@ async def test_transaction_style(
 
 
 @pytest.mark.asyncio
-@pytest.mark.parametrize("debug", (True, False))
-@pytest.mark.parametrize("testing", (True, False))
 async def test_errors(
     sentry_init,
     capture_exceptions,
     capture_events,
     app,
-    debug,
-    testing,
     integration_enabled_params,
 ):
     sentry_init(debug=True, **integration_enabled_params)
 
-    app.debug = debug
-    app.testing = testing
-
     @app.route("/")
     async def index():
         1 / 0
@@ -284,9 +316,6 @@ def foo():
 async def test_500(sentry_init, capture_events, app):
     sentry_init(integrations=[quart_sentry.QuartIntegration()])
 
-    app.debug = False
-    app.testing = False
-
     @app.route("/")
     async def index():
         1 / 0
@@ -310,9 +339,6 @@ async def error_handler(err):
 async def test_error_in_errorhandler(sentry_init, capture_events, app):
     sentry_init(integrations=[quart_sentry.QuartIntegration()])
 
-    app.debug = False
-    app.testing = False
-
     @app.route("/")
     async def index():
         raise ValueError()
@@ -505,3 +531,30 @@ async def dispatch_request(self):
 
     assert event["message"] == "hi"
     assert event["transaction"] == "hello_class"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, app):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    async with app.test_client() as client:
+        response = await client.get(endpoint)
+        assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
diff --git a/tests/integrations/redis/asyncio/__init__.py b/tests/integrations/redis/asyncio/__init__.py
new file mode 100644
index 0000000000..bd93246a9a
--- /dev/null
+++ b/tests/integrations/redis/asyncio/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("fakeredis.aioredis")
diff --git a/tests/integrations/redis/asyncio/test_redis_asyncio.py b/tests/integrations/redis/asyncio/test_redis_asyncio.py
new file mode 100644
index 0000000000..4f024a2824
--- /dev/null
+++ b/tests/integrations/redis/asyncio/test_redis_asyncio.py
@@ -0,0 +1,85 @@
+import pytest
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.redis import RedisIntegration
+from tests.conftest import ApproxDict
+
+from fakeredis.aioredis import FakeRedis
+
+
+@pytest.mark.asyncio
+async def test_async_basic(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    connection = FakeRedis()
+
+    await connection.get("foobar")
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": {
+            "db.operation": "GET",
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": False,
+        },
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
+
+
+@pytest.mark.parametrize(
+    "is_transaction, send_default_pii, expected_first_ten",
+    [
+        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+@pytest.mark.asyncio
+async def test_async_redis_pipeline(
+    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    connection = FakeRedis()
+    with start_transaction():
+        pipeline = connection.pipeline(transaction=is_transaction)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        await pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == ApproxDict(
+        {
+            "redis.commands": {
+                "count": 3,
+                "first_ten": expected_first_ten,
+            },
+            SPANDATA.DB_SYSTEM: "redis",
+            SPANDATA.DB_NAME: "0",
+            SPANDATA.SERVER_ADDRESS: connection.connection_pool.connection_kwargs.get(
+                "host"
+            ),
+            SPANDATA.SERVER_PORT: 6379,
+        }
+    )
+    assert span["tags"] == {
+        "redis.transaction": is_transaction,
+        "redis.is_cluster": False,
+    }
diff --git a/tests/integrations/redis/cluster/__init__.py b/tests/integrations/redis/cluster/__init__.py
new file mode 100644
index 0000000000..008b24295f
--- /dev/null
+++ b/tests/integrations/redis/cluster/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis.cluster")
diff --git a/tests/integrations/redis/cluster/test_redis_cluster.py b/tests/integrations/redis/cluster/test_redis_cluster.py
new file mode 100644
index 0000000000..a16d66588c
--- /dev/null
+++ b/tests/integrations/redis/cluster/test_redis_cluster.py
@@ -0,0 +1,146 @@
+import pytest
+from sentry_sdk import capture_message
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.api import start_transaction
+from sentry_sdk.integrations.redis import RedisIntegration
+from tests.conftest import ApproxDict
+
+import redis
+
+
+@pytest.fixture(autouse=True)
+def monkeypatch_rediscluster_class(reset_integrations):
+    pipeline_cls = redis.cluster.ClusterPipeline
+    redis.cluster.NodesManager.initialize = lambda *_, **__: None
+    redis.RedisCluster.command = lambda *_: []
+    redis.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(None, None)
+    redis.RedisCluster.get_default_node = lambda *_, **__: redis.cluster.ClusterNode(
+        "localhost", 6379
+    )
+    pipeline_cls.execute = lambda *_, **__: None
+    redis.RedisCluster.execute_command = lambda *_, **__: []
+
+
+def test_rediscluster_breadcrumb(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    rc = redis.RedisCluster(host="localhost", port=6379)
+    rc.get("foobar")
+    capture_message("hi")
+
+    (event,) = events
+    crumbs = event["breadcrumbs"]["values"]
+
+    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
+    # but must be accounted for
+    assert len(crumbs) in (1, 2)
+    assert len(crumbs) == 1 or crumbs[0]["message"] == "COMMAND"
+
+    crumb = crumbs[-1]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": {
+            "db.operation": "GET",
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": True,
+        },
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, description",
+    [
+        (False, "SET 'bar' [Filtered]"),
+        (True, "SET 'bar' 1"),
+    ],
+)
+def test_rediscluster_basic(sentry_init, capture_events, send_default_pii, description):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    with start_transaction():
+        rc = redis.RedisCluster(host="localhost", port=6379)
+        rc.set("bar", 1)
+
+    (event,) = events
+    spans = event["spans"]
+
+    # on initializing a RedisCluster, a COMMAND call is made - this is not important for the test
+    # but must be accounted for
+    assert len(spans) in (1, 2)
+    assert len(spans) == 1 or spans[0]["description"] == "COMMAND"
+
+    span = spans[-1]
+    assert span["op"] == "db.redis"
+    assert span["description"] == description
+    assert span["data"] == ApproxDict(
+        {
+            SPANDATA.DB_SYSTEM: "redis",
+            # ClusterNode converts localhost to 127.0.0.1
+            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
+            SPANDATA.SERVER_PORT: 6379,
+        }
+    )
+    assert span["tags"] == {
+        "db.operation": "SET",
+        "redis.command": "SET",
+        "redis.is_cluster": True,
+        "redis.key": "bar",
+    }
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, expected_first_ten",
+    [
+        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+def test_rediscluster_pipeline(
+    sentry_init, capture_events, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    rc = redis.RedisCluster(host="localhost", port=6379)
+    with start_transaction():
+        pipeline = rc.pipeline()
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == ApproxDict(
+        {
+            "redis.commands": {
+                "count": 3,
+                "first_ten": expected_first_ten,
+            },
+            SPANDATA.DB_SYSTEM: "redis",
+            # ClusterNode converts localhost to 127.0.0.1
+            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
+            SPANDATA.SERVER_PORT: 6379,
+        }
+    )
+    assert span["tags"] == {
+        "redis.transaction": False,  # For Cluster, this is always False
+        "redis.is_cluster": True,
+    }
diff --git a/tests/integrations/redis/cluster_asyncio/__init__.py b/tests/integrations/redis/cluster_asyncio/__init__.py
new file mode 100644
index 0000000000..663979a4e2
--- /dev/null
+++ b/tests/integrations/redis/cluster_asyncio/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("redis.asyncio.cluster")
diff --git a/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py
new file mode 100644
index 0000000000..a6d8962afe
--- /dev/null
+++ b/tests/integrations/redis/cluster_asyncio/test_redis_cluster_asyncio.py
@@ -0,0 +1,149 @@
+import pytest
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
+from sentry_sdk.integrations.redis import RedisIntegration
+from tests.conftest import ApproxDict
+
+from redis.asyncio import cluster
+
+
+async def fake_initialize(*_, **__):
+    return None
+
+
+async def fake_execute_command(*_, **__):
+    return []
+
+
+async def fake_execute(*_, **__):
+    return None
+
+
+@pytest.fixture(autouse=True)
+def monkeypatch_rediscluster_asyncio_class(reset_integrations):
+    pipeline_cls = cluster.ClusterPipeline
+    cluster.NodesManager.initialize = fake_initialize
+    cluster.RedisCluster.get_default_node = lambda *_, **__: cluster.ClusterNode(
+        "localhost", 6379
+    )
+    cluster.RedisCluster.pipeline = lambda self, *_, **__: pipeline_cls(self)
+    pipeline_cls.execute = fake_execute
+    cluster.RedisCluster.execute_command = fake_execute_command
+
+
+@pytest.mark.asyncio
+async def test_async_breadcrumb(sentry_init, capture_events):
+    sentry_init(integrations=[RedisIntegration()])
+    events = capture_events()
+
+    connection = cluster.RedisCluster(host="localhost", port=6379)
+
+    await connection.get("foobar")
+    capture_message("hi")
+
+    (event,) = events
+    (crumb,) = event["breadcrumbs"]["values"]
+
+    assert crumb == {
+        "category": "redis",
+        "message": "GET 'foobar'",
+        "data": ApproxDict(
+            {
+                "db.operation": "GET",
+                "redis.key": "foobar",
+                "redis.command": "GET",
+                "redis.is_cluster": True,
+            }
+        ),
+        "timestamp": crumb["timestamp"],
+        "type": "redis",
+    }
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, description",
+    [
+        (False, "SET 'bar' [Filtered]"),
+        (True, "SET 'bar' 1"),
+    ],
+)
+@pytest.mark.asyncio
+async def test_async_basic(sentry_init, capture_events, send_default_pii, description):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    connection = cluster.RedisCluster(host="localhost", port=6379)
+    with start_transaction():
+        await connection.set("bar", 1)
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == description
+    assert span["data"] == ApproxDict(
+        {
+            SPANDATA.DB_SYSTEM: "redis",
+            # ClusterNode converts localhost to 127.0.0.1
+            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
+            SPANDATA.SERVER_PORT: 6379,
+        }
+    )
+    assert span["tags"] == {
+        "redis.is_cluster": True,
+        "db.operation": "SET",
+        "redis.command": "SET",
+        "redis.key": "bar",
+    }
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, expected_first_ten",
+    [
+        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+@pytest.mark.asyncio
+async def test_async_redis_pipeline(
+    sentry_init, capture_events, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    connection = cluster.RedisCluster(host="localhost", port=6379)
+    with start_transaction():
+        pipeline = connection.pipeline()
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        await pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == ApproxDict(
+        {
+            "redis.commands": {
+                "count": 3,
+                "first_ten": expected_first_ten,
+            },
+            SPANDATA.DB_SYSTEM: "redis",
+            # ClusterNode converts localhost to 127.0.0.1
+            SPANDATA.SERVER_ADDRESS: "127.0.0.1",
+            SPANDATA.SERVER_PORT: 6379,
+        }
+    )
+    assert span["tags"] == {
+        "redis.transaction": False,
+        "redis.is_cluster": True,
+    }
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index 3708995068..d25e630f6a 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -1,8 +1,24 @@
-from sentry_sdk import capture_message
+import pytest
+
+from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.redis import RedisIntegration
 
 from fakeredis import FakeStrictRedis
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+MOCK_CONNECTION_POOL = mock.MagicMock()
+MOCK_CONNECTION_POOL.connection_kwargs = {
+    "host": "localhost",
+    "port": 63791,
+    "db": 1,
+}
+
 
 def test_basic(sentry_init, capture_events):
     sentry_init(integrations=[RedisIntegration()])
@@ -19,7 +35,264 @@ def test_basic(sentry_init, capture_events):
     assert crumb == {
         "category": "redis",
         "message": "GET 'foobar'",
-        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "data": {
+            "redis.key": "foobar",
+            "redis.command": "GET",
+            "redis.is_cluster": False,
+            "db.operation": "GET",
+        },
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
+
+
+@pytest.mark.parametrize(
+    "is_transaction, send_default_pii, expected_first_ten",
+    [
+        (False, False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+def test_redis_pipeline(
+    sentry_init, capture_events, is_transaction, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        pipeline = connection.pipeline(transaction=is_transaction)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
+    assert span["data"]["redis.commands"] == {
+        "count": 3,
+        "first_ten": expected_first_ten,
+    }
+    assert span["tags"] == {
+        "redis.transaction": is_transaction,
+        "redis.is_cluster": False,
+    }
+
+
+def test_sensitive_data(sentry_init, capture_events):
+    # fakeredis does not support the AUTH command, so we need to mock it
+    with mock.patch(
+        "sentry_sdk.integrations.redis._COMMANDS_INCLUDING_SENSITIVE_DATA", ["get"]
+    ):
+        sentry_init(
+            integrations=[RedisIntegration()],
+            traces_sample_rate=1.0,
+            send_default_pii=True,
+        )
+        events = capture_events()
+
+        connection = FakeStrictRedis()
+        with start_transaction():
+            connection.get(
+                "this is super secret"
+            )  # because fakeredis does not support AUTH we use GET instead
+
+        (event,) = events
+        spans = event["spans"]
+        assert spans[0]["op"] == "db.redis"
+        assert spans[0]["description"] == "GET [Filtered]"
+
+
+def test_pii_data_redacted(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        connection.set("somekey1", "my secret string1")
+        connection.set("somekey2", "my secret string2")
+        connection.get("somekey2")
+        connection.delete("somekey1", "somekey2")
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' [Filtered]"
+    assert spans[1]["description"] == "SET 'somekey2' [Filtered]"
+    assert spans[2]["description"] == "GET 'somekey2'"
+    assert spans[3]["description"] == "DEL 'somekey1' [Filtered]"
+
+
+def test_pii_data_sent(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        connection.set("somekey1", "my secret string1")
+        connection.set("somekey2", "my secret string2")
+        connection.get("somekey2")
+        connection.delete("somekey1", "somekey2")
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' 'my secret string1'"
+    assert spans[1]["description"] == "SET 'somekey2' 'my secret string2'"
+    assert spans[2]["description"] == "GET 'somekey2'"
+    assert spans[3]["description"] == "DEL 'somekey1' 'somekey2'"
+
+
+def test_data_truncation(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        long_string = "a" * 100000
+        connection.set("somekey1", long_string)
+        short_string = "b" * 10
+        connection.set("somekey2", short_string)
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
+        long_string[: 1024 - len("...") - len("SET 'somekey1' '")],
+    )
+    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
+
+
+def test_data_truncation_custom(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration(max_data_size=30)],
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+    with start_transaction():
+        long_string = "a" * 100000
+        connection.set("somekey1", long_string)
+        short_string = "b" * 10
+        connection.set("somekey2", short_string)
+
+    (event,) = events
+    spans = event["spans"]
+    assert spans[0]["op"] == "db.redis"
+    assert spans[0]["description"] == "SET 'somekey1' '%s..." % (
+        long_string[: 30 - len("...") - len("SET 'somekey1' '")],
+    )
+    assert spans[1]["description"] == "SET 'somekey2' '%s'" % (short_string,)
+
+
+def test_breadcrumbs(sentry_init, capture_events):
+    sentry_init(
+        integrations=[RedisIntegration(max_data_size=30)],
+        send_default_pii=True,
+    )
+    events = capture_events()
+
+    connection = FakeStrictRedis()
+
+    long_string = "a" * 100000
+    connection.set("somekey1", long_string)
+    short_string = "b" * 10
+    connection.set("somekey2", short_string)
+
+    capture_message("hi")
+
+    (event,) = events
+    crumbs = event["breadcrumbs"]["values"]
+
+    assert crumbs[0] == {
+        "message": "SET 'somekey1' 'aaaaaaaaaaa...",
+        "type": "redis",
+        "category": "redis",
+        "data": {
+            "db.operation": "SET",
+            "redis.is_cluster": False,
+            "redis.command": "SET",
+            "redis.key": "somekey1",
+        },
+        "timestamp": crumbs[0]["timestamp"],
+    }
+    assert crumbs[1] == {
+        "message": "SET 'somekey2' 'bbbbbbbbbb'",
+        "type": "redis",
+        "category": "redis",
+        "data": {
+            "db.operation": "SET",
+            "redis.is_cluster": False,
+            "redis.command": "SET",
+            "redis.key": "somekey2",
+        },
+        "timestamp": crumbs[1]["timestamp"],
+    }
+
+
+def test_db_connection_attributes_client(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[RedisIntegration()],
+    )
+    events = capture_events()
+
+    with start_transaction():
+        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
+        connection.get("foobar")
+
+    (event,) = events
+    (span,) = event["spans"]
+
+    assert span["op"] == "db.redis"
+    assert span["description"] == "GET 'foobar'"
+    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
+    assert span["data"][SPANDATA.DB_NAME] == "1"
+    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
+    assert span["data"][SPANDATA.SERVER_PORT] == 63791
+
+
+def test_db_connection_attributes_pipeline(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[RedisIntegration()],
+    )
+    events = capture_events()
+
+    with start_transaction():
+        connection = FakeStrictRedis(connection_pool=MOCK_CONNECTION_POOL)
+        pipeline = connection.pipeline(transaction=False)
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"][SPANDATA.DB_SYSTEM] == "redis"
+    assert span["data"][SPANDATA.DB_NAME] == "1"
+    assert span["data"][SPANDATA.SERVER_ADDRESS] == "localhost"
+    assert span["data"][SPANDATA.SERVER_PORT] == 63791
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 425ff13b2f..88f987758b 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -1,17 +1,43 @@
 import pytest
+
 from sentry_sdk import capture_message
+from sentry_sdk.api import start_transaction
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.redis import RedisIntegration
+from tests.conftest import ApproxDict
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 
 import rediscluster
 
+
+MOCK_CONNECTION_POOL = mock.MagicMock()
+MOCK_CONNECTION_POOL.connection_kwargs = {
+    "host": "localhost",
+    "port": 63791,
+    "db": 1,
+}
+
+
 rediscluster_classes = [rediscluster.RedisCluster]
 
 if hasattr(rediscluster, "StrictRedisCluster"):
     rediscluster_classes.append(rediscluster.StrictRedisCluster)
 
 
-@pytest.fixture(scope="module", autouse=True)
-def monkeypatch_rediscluster_classes():
+@pytest.fixture(autouse=True)
+def monkeypatch_rediscluster_classes(reset_integrations):
+    try:
+        pipeline_cls = rediscluster.pipeline.ClusterPipeline
+    except AttributeError:
+        pipeline_cls = rediscluster.StrictClusterPipeline
+    rediscluster.RedisCluster.pipeline = lambda *_, **__: pipeline_cls(
+        connection_pool=MOCK_CONNECTION_POOL
+    )
+    pipeline_cls.execute = lambda *_, **__: None
     for cls in rediscluster_classes:
         cls.execute_command = lambda *_, **__: None
 
@@ -21,7 +47,7 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
     sentry_init(integrations=[RedisIntegration()])
     events = capture_events()
 
-    rc = rediscluster_cls(connection_pool=True)
+    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
     rc.get("foobar")
     capture_message("hi")
 
@@ -31,7 +57,120 @@ def test_rediscluster_basic(rediscluster_cls, sentry_init, capture_events):
     assert crumb == {
         "category": "redis",
         "message": "GET 'foobar'",
-        "data": {"redis.key": "foobar", "redis.command": "GET"},
+        "data": ApproxDict(
+            {
+                "db.operation": "GET",
+                "redis.key": "foobar",
+                "redis.command": "GET",
+                "redis.is_cluster": True,
+            }
+        ),
         "timestamp": crumb["timestamp"],
         "type": "redis",
     }
+
+
+@pytest.mark.parametrize(
+    "send_default_pii, expected_first_ten",
+    [
+        (False, ["GET 'foo'", "SET 'bar' [Filtered]", "SET 'baz' [Filtered]"]),
+        (True, ["GET 'foo'", "SET 'bar' 1", "SET 'baz' 2"]),
+    ],
+)
+def test_rediscluster_pipeline(
+    sentry_init, capture_events, send_default_pii, expected_first_ten
+):
+    sentry_init(
+        integrations=[RedisIntegration()],
+        traces_sample_rate=1.0,
+        send_default_pii=send_default_pii,
+    )
+    events = capture_events()
+
+    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
+    with start_transaction():
+        pipeline = rc.pipeline()
+        pipeline.get("foo")
+        pipeline.set("bar", 1)
+        pipeline.set("baz", 2)
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == ApproxDict(
+        {
+            "redis.commands": {
+                "count": 3,
+                "first_ten": expected_first_ten,
+            },
+            SPANDATA.DB_SYSTEM: "redis",
+            SPANDATA.DB_NAME: "1",
+            SPANDATA.SERVER_ADDRESS: "localhost",
+            SPANDATA.SERVER_PORT: 63791,
+        }
+    )
+    assert span["tags"] == {
+        "redis.transaction": False,  # For Cluster, this is always False
+        "redis.is_cluster": True,
+    }
+
+
+@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
+def test_db_connection_attributes_client(sentry_init, capture_events, rediscluster_cls):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[RedisIntegration()],
+    )
+    events = capture_events()
+
+    rc = rediscluster_cls(connection_pool=MOCK_CONNECTION_POOL)
+    with start_transaction():
+        rc.get("foobar")
+
+    (event,) = events
+    (span,) = event["spans"]
+
+    assert span["data"] == ApproxDict(
+        {
+            SPANDATA.DB_SYSTEM: "redis",
+            SPANDATA.DB_NAME: "1",
+            SPANDATA.SERVER_ADDRESS: "localhost",
+            SPANDATA.SERVER_PORT: 63791,
+        }
+    )
+
+
+@pytest.mark.parametrize("rediscluster_cls", rediscluster_classes)
+def test_db_connection_attributes_pipeline(
+    sentry_init, capture_events, rediscluster_cls
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[RedisIntegration()],
+    )
+    events = capture_events()
+
+    rc = rediscluster.RedisCluster(connection_pool=MOCK_CONNECTION_POOL)
+    with start_transaction():
+        pipeline = rc.pipeline()
+        pipeline.get("foo")
+        pipeline.execute()
+
+    (event,) = events
+    (span,) = event["spans"]
+    assert span["op"] == "db.redis"
+    assert span["description"] == "redis.pipeline.execute"
+    assert span["data"] == ApproxDict(
+        {
+            "redis.commands": {
+                "count": 1,
+                "first_ten": ["GET 'foo'"],
+            },
+            SPANDATA.DB_SYSTEM: "redis",
+            SPANDATA.DB_NAME: "1",
+            SPANDATA.SERVER_ADDRESS: "localhost",
+            SPANDATA.SERVER_PORT: 63791,
+        }
+    )
diff --git a/tests/integrations/requests/__init__.py b/tests/integrations/requests/__init__.py
new file mode 100644
index 0000000000..a711908293
--- /dev/null
+++ b/tests/integrations/requests/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("requests")
diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py
index 02c6636853..1f4dd412d7 100644
--- a/tests/integrations/requests/test_requests.py
+++ b/tests/integrations/requests/test_requests.py
@@ -1,25 +1,73 @@
-import pytest
+import requests
+import responses
 
-requests = pytest.importorskip("requests")
+import pytest
 
 from sentry_sdk import capture_message
+from sentry_sdk.consts import SPANDATA
 from sentry_sdk.integrations.stdlib import StdlibIntegration
+from tests.conftest import ApproxDict
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
 
 def test_crumb_capture(sentry_init, capture_events):
     sentry_init(integrations=[StdlibIntegration()])
+
+    url = "http://example.com/"
+    responses.add(responses.GET, url, status=200)
+
     events = capture_events()
 
-    response = requests.get("https://httpbin.org/status/418")
+    response = requests.get(url)
     capture_message("Testing!")
 
     (event,) = events
     (crumb,) = event["breadcrumbs"]["values"]
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
-    assert crumb["data"] == {
-        "url": "https://httpbin.org/status/418",
-        "method": "GET",
-        "status_code": response.status_code,
-        "reason": response.reason,
-    }
+    assert crumb["data"] == ApproxDict(
+        {
+            "url": url,
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_FRAGMENT: "",
+            SPANDATA.HTTP_QUERY: "",
+            SPANDATA.HTTP_STATUS_CODE: response.status_code,
+            "reason": response.reason,
+        }
+    )
+
+
+@pytest.mark.tests_internal_exceptions
+def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
+    sentry_init(integrations=[StdlibIntegration()])
+
+    url = "https://example.com"
+    responses.add(responses.GET, url, status=200)
+
+    events = capture_events()
+
+    with mock.patch(
+        "sentry_sdk.integrations.stdlib.parse_url",
+        side_effect=ValueError,
+    ):
+        response = requests.get(url)
+
+    capture_message("Testing!")
+
+    (event,) = events
+    assert event["breadcrumbs"]["values"][0]["data"] == ApproxDict(
+        {
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_STATUS_CODE: response.status_code,
+            "reason": response.reason,
+            # no url related data
+        }
+    )
+
+    assert "url" not in event["breadcrumbs"]["values"][0]["data"]
+    assert SPANDATA.HTTP_FRAGMENT not in event["breadcrumbs"]["values"][0]["data"]
+    assert SPANDATA.HTTP_QUERY not in event["breadcrumbs"]["values"][0]["data"]
diff --git a/tests/integrations/rq/__init__.py b/tests/integrations/rq/__init__.py
index d9714d465a..9766a19465 100644
--- a/tests/integrations/rq/__init__.py
+++ b/tests/integrations/rq/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-rq = pytest.importorskip("rq")
+pytest.importorskip("rq")
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index 651bf22248..b0d71e8f7d 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -1,6 +1,8 @@
 import pytest
 from fakeredis import FakeStrictRedis
+from sentry_sdk import configure_scope, start_transaction
 from sentry_sdk.integrations.rq import RqIntegration
+from sentry_sdk.utils import parse_version
 
 import rq
 
@@ -13,19 +15,23 @@
 @pytest.fixture(autouse=True)
 def _patch_rq_get_server_version(monkeypatch):
     """
-    Patch up RQ 1.5 to work with fakeredis.
+    Patch RQ lower than 1.5.1 to work with fakeredis.
 
     https://github.com/jamesls/fakeredis/issues/273
     """
 
     from distutils.version import StrictVersion
 
-    if tuple(map(int, rq.VERSION.split("."))) >= (1, 5):
+    if parse_version(rq.VERSION) <= (1, 5, 1):
         for k in (
             "rq.job.Job.get_redis_server_version",
             "rq.worker.Worker.get_redis_server_version",
         ):
-            monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
+            try:
+                monkeypatch.setattr(k, lambda _: StrictVersion("4.0.0"))
+            except AttributeError:
+                # old RQ Job/Worker doesn't have a get_redis_server_version attr
+                pass
 
 
 def crashing_job(foo):
@@ -58,13 +64,18 @@ def test_basic(sentry_init, capture_events):
     assert exception["stacktrace"]["frames"][-1]["vars"]["foo"] == "42"
 
     assert event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
-    assert event["extra"]["rq-job"] == {
-        "args": [],
-        "description": "tests.integrations.rq.test_rq.crashing_job(foo=42)",
-        "func": "tests.integrations.rq.test_rq.crashing_job",
-        "job_id": event["extra"]["rq-job"]["job_id"],
-        "kwargs": {"foo": 42},
-    }
+
+    extra = event["extra"]["rq-job"]
+    assert extra["args"] == []
+    assert extra["kwargs"] == {"foo": 42}
+    assert extra["description"] == "tests.integrations.rq.test_rq.crashing_job(foo=42)"
+    assert extra["func"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert "job_id" in extra
+    assert "enqueued_at" in extra
+
+    # older versions don't persist started_at correctly
+    if tuple(map(int, rq.VERSION.split("."))) >= (0, 9):
+        assert "started_at" in extra
 
 
 def test_transport_shutdown(sentry_init, capture_events_forksafe):
@@ -88,7 +99,6 @@ def test_transport_shutdown(sentry_init, capture_events_forksafe):
 def test_transaction_with_error(
     sentry_init, capture_events, DictionaryContaining  # noqa:N803
 ):
-
     sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
     events = capture_events()
 
@@ -101,7 +111,7 @@ def test_transaction_with_error(
     error_event, envelope = events
 
     assert error_event["transaction"] == "tests.integrations.rq.test_rq.chew_up_shoes"
-    assert error_event["contexts"]["trace"]["op"] == "rq.task"
+    assert error_event["contexts"]["trace"]["op"] == "queue.task.rq"
     assert error_event["exception"]["values"][0]["type"] == "Exception"
     assert (
         error_event["exception"]["values"][0]["value"]
@@ -121,6 +131,71 @@ def test_transaction_with_error(
     )
 
 
+def test_error_has_trace_context_if_tracing_disabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    queue.enqueue(crashing_job, foo=None)
+    worker.work(burst=True)
+
+    (error_event,) = events
+
+    assert error_event["contexts"]["trace"]
+
+
+def test_tracing_enabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    with start_transaction(op="rq transaction") as transaction:
+        queue.enqueue(crashing_job, foo=None)
+        worker.work(burst=True)
+
+    error_event, envelope, _ = events
+
+    assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+    assert error_event["contexts"]["trace"]["trace_id"] == transaction.trace_id
+
+    assert envelope["contexts"]["trace"] == error_event["contexts"]["trace"]
+
+
+def test_tracing_disabled(
+    sentry_init,
+    capture_events,
+):
+    sentry_init(integrations=[RqIntegration()])
+    events = capture_events()
+
+    queue = rq.Queue(connection=FakeStrictRedis())
+    worker = rq.SimpleWorker([queue], connection=queue.connection)
+
+    with configure_scope() as scope:
+        queue.enqueue(crashing_job, foo=None)
+        worker.work(burst=True)
+
+        (error_event,) = events
+
+        assert (
+            error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+        )
+        assert (
+            error_event["contexts"]["trace"]["trace_id"]
+            == scope._propagation_context["trace_id"]
+        )
+
+
 def test_transaction_no_error(
     sentry_init, capture_events, DictionaryContaining  # noqa:N803
 ):
@@ -136,7 +211,7 @@ def test_transaction_no_error(
     envelope = events[0]
 
     assert envelope["type"] == "transaction"
-    assert envelope["contexts"]["trace"]["op"] == "rq.task"
+    assert envelope["contexts"]["trace"]["op"] == "queue.task.rq"
     assert envelope["transaction"] == "tests.integrations.rq.test_rq.do_trick"
     assert envelope["extra"]["rq-job"] == DictionaryContaining(
         {
@@ -179,7 +254,7 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(
 
 
 @pytest.mark.skipif(
-    rq.__version__.split(".") < ["1", "5"], reason="At least rq-1.5 required"
+    parse_version(rq.__version__) < (1, 5), reason="At least rq-1.5 required"
 )
 def test_job_with_retries(sentry_init, capture_events):
     sentry_init(integrations=[RqIntegration()])
diff --git a/tests/integrations/sanic/__init__.py b/tests/integrations/sanic/__init__.py
index 53449e2f0e..d6b67797a3 100644
--- a/tests/integrations/sanic/__init__.py
+++ b/tests/integrations/sanic/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-sanic = pytest.importorskip("sanic")
+pytest.importorskip("sanic")
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
index b91f94bfe9..b338a5e6fb 100644
--- a/tests/integrations/sanic/test_sanic.py
+++ b/tests/integrations/sanic/test_sanic.py
@@ -1,44 +1,102 @@
-import sys
-
-import random
 import asyncio
+import contextlib
+import os
+import random
+import sys
 from unittest.mock import Mock
 
 import pytest
 
 from sentry_sdk import capture_message, configure_scope
 from sentry_sdk.integrations.sanic import SanicIntegration
+from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL
 
 from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
 from sanic.response import HTTPResponse
 from sanic.exceptions import SanicException
 
+try:
+    from sanic_testing import TestManager
+except ImportError:
+    TestManager = None
+
+try:
+    from sanic_testing.reusable import ReusableClient
+except ImportError:
+    ReusableClient = None
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable, Container
+    from typing import Any, Optional
+
 SANIC_VERSION = tuple(map(int, SANIC_VERSION_RAW.split(".")))
+PERFORMANCE_SUPPORTED = SANIC_VERSION >= (21, 9)
 
 
 @pytest.fixture
 def app():
-    if SANIC_VERSION >= (20, 12):
-        # Build (20.12.0) adds a feature where the instance is stored in an internal class
+    if SANIC_VERSION < (19,):
+        """
+        Older Sanic versions 0.8 and 18 bind to the same fixed port which
+        creates problems when we run tests concurrently.
+        """
+        old_test_client = Sanic.test_client.__get__
+
+        def new_test_client(self):
+            client = old_test_client(self, Sanic)
+            client.port += os.getpid() % 100
+            return client
+
+        Sanic.test_client = property(new_test_client)
+
+    if SANIC_VERSION >= (20, 12) and SANIC_VERSION < (22, 6):
+        # Some builds (20.12.0 intruduced and 22.6.0 removed again) have a feature where the instance is stored in an internal class
         # registry for later retrieval, and so add register=False to disable that
-        app = Sanic("Test", register=False)
+        sanic_app = Sanic("Test", register=False)
     else:
-        app = Sanic("Test")
+        sanic_app = Sanic("Test")
+
+    if TestManager is not None:
+        TestManager(sanic_app)
 
-    @app.route("/message")
+    @sanic_app.route("/message")
     def hi(request):
         capture_message("hi")
         return response.text("ok")
 
-    return app
+    @sanic_app.route("/message/")
+    def hi_with_id(request, message_id):
+        capture_message("hi with id")
+        return response.text("ok with id")
+
+    @sanic_app.route("/500")
+    def fivehundred(_):
+        1 / 0
+
+    return sanic_app
+
+
+def get_client(app):
+    @contextlib.contextmanager
+    def simple_client(app):
+        yield app.test_client
+
+    if ReusableClient is not None:
+        return ReusableClient(app)
+    else:
+        return simple_client(app)
 
 
 def test_request_data(sentry_init, app, capture_events):
     sentry_init(integrations=[SanicIntegration()])
     events = capture_events()
 
-    request, response = app.test_client.get("/message?foo=bar")
-    assert response.status == 200
+    c = get_client(app)
+    with c as client:
+        _, response = client.get("/message?foo=bar")
+        assert response.status == 200
 
     (event,) = events
     assert event["transaction"] == "hi"
@@ -62,6 +120,29 @@ def test_request_data(sentry_init, app, capture_events):
     assert "transaction" not in event
 
 
+@pytest.mark.parametrize(
+    "url,expected_transaction,expected_source",
+    [
+        ("/message", "hi", "component"),
+        ("/message/123456", "hi_with_id", "component"),
+    ],
+)
+def test_transaction_name(
+    sentry_init, app, capture_events, url, expected_transaction, expected_source
+):
+    sentry_init(integrations=[SanicIntegration()])
+    events = capture_events()
+
+    c = get_client(app)
+    with c as client:
+        _, response = client.get(url)
+        assert response.status == 200
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
 def test_errors(sentry_init, app, capture_events):
     sentry_init(integrations=[SanicIntegration()])
     events = capture_events()
@@ -70,8 +151,10 @@ def test_errors(sentry_init, app, capture_events):
     def myerror(request):
         raise ValueError("oh no")
 
-    request, response = app.test_client.get("/error")
-    assert response.status == 500
+    c = get_client(app)
+    with c as client:
+        _, response = client.get("/error")
+        assert response.status == 500
 
     (event,) = events
     assert event["transaction"] == "myerror"
@@ -93,8 +176,10 @@ def test_bad_request_not_captured(sentry_init, app, capture_events):
     def index(request):
         raise SanicException("...", status_code=400)
 
-    request, response = app.test_client.get("/")
-    assert response.status == 400
+    c = get_client(app)
+    with c as client:
+        _, response = client.get("/")
+        assert response.status == 400
 
     assert not events
 
@@ -111,8 +196,10 @@ def myerror(request):
     def myhandler(request, exception):
         1 / 0
 
-    request, response = app.test_client.get("/error")
-    assert response.status == 500
+    c = get_client(app)
+    with c as client:
+        _, response = client.get("/error")
+        assert response.status == 500
 
     event1, event2 = events
 
@@ -142,7 +229,6 @@ def test_concurrency(sentry_init, app):
     because that's the only way we could reproduce leakage with such a low
     amount of concurrent tasks.
     """
-
     sentry_init(integrations=[SanicIntegration()])
 
     @app.route("/context-check/")
@@ -189,7 +275,6 @@ def __init__(self, request_body):
                 def respond(self, response):
                     responses.append(response)
                     patched_response = HTTPResponse()
-                    patched_response.send = lambda end_stream: asyncio.sleep(0.001)
                     return patched_response
 
                 def __aiter__(self):
@@ -245,3 +330,116 @@ async def runner():
 
     with configure_scope() as scope:
         assert not scope._tags
+
+
+class TransactionTestConfig:
+    """
+    Data class to store configurations for each performance transaction test run, including
+    both the inputs and relevant expected results.
+    """
+
+    def __init__(
+        self,
+        integration_args,
+        url,
+        expected_status,
+        expected_transaction_name,
+        expected_source=None,
+    ):
+        # type: (Iterable[Optional[Container[int]]], str, int, Optional[str], Optional[str]) -> None
+        """
+        expected_transaction_name of None indicates we expect to not receive a transaction
+        """
+        self.integration_args = integration_args
+        self.url = url
+        self.expected_status = expected_status
+        self.expected_transaction_name = expected_transaction_name
+        self.expected_source = expected_source
+
+
+@pytest.mark.skipif(
+    not PERFORMANCE_SUPPORTED, reason="Performance not supported on this Sanic version"
+)
+@pytest.mark.parametrize(
+    "test_config",
+    [
+        TransactionTestConfig(
+            # Transaction for successful page load
+            integration_args=(),
+            url="/message",
+            expected_status=200,
+            expected_transaction_name="hi",
+            expected_source=TRANSACTION_SOURCE_COMPONENT,
+        ),
+        TransactionTestConfig(
+            # Transaction still recorded when we have an internal server error
+            integration_args=(),
+            url="/500",
+            expected_status=500,
+            expected_transaction_name="fivehundred",
+            expected_source=TRANSACTION_SOURCE_COMPONENT,
+        ),
+        TransactionTestConfig(
+            # By default, no transaction when we have a 404 error
+            integration_args=(),
+            url="/404",
+            expected_status=404,
+            expected_transaction_name=None,
+        ),
+        TransactionTestConfig(
+            # With no ignored HTTP statuses, we should get transactions for 404 errors
+            integration_args=(None,),
+            url="/404",
+            expected_status=404,
+            expected_transaction_name="/404",
+            expected_source=TRANSACTION_SOURCE_URL,
+        ),
+        TransactionTestConfig(
+            # Transaction can be suppressed for other HTTP statuses, too, by passing config to the integration
+            integration_args=({200},),
+            url="/message",
+            expected_status=200,
+            expected_transaction_name=None,
+        ),
+    ],
+)
+def test_transactions(test_config, sentry_init, app, capture_events):
+    # type: (TransactionTestConfig, Any, Any, Any) -> None
+
+    # Init the SanicIntegration with the desired arguments
+    sentry_init(
+        integrations=[SanicIntegration(*test_config.integration_args)],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    # Make request to the desired URL
+    c = get_client(app)
+    with c as client:
+        _, response = client.get(test_config.url)
+        assert response.status == test_config.expected_status
+
+    # Extract the transaction events by inspecting the event types. We should at most have 1 transaction event.
+    transaction_events = [
+        e for e in events if "type" in e and e["type"] == "transaction"
+    ]
+    assert len(transaction_events) <= 1
+
+    # Get the only transaction event, or set to None if there are no transaction events.
+    (transaction_event, *_) = [*transaction_events, None]
+
+    # We should have no transaction event if and only if we expect no transactions
+    assert (transaction_event is None) == (
+        test_config.expected_transaction_name is None
+    )
+
+    # If a transaction was expected, ensure it is correct
+    assert (
+        transaction_event is None
+        or transaction_event["transaction"] == test_config.expected_transaction_name
+    )
+    assert (
+        transaction_event is None
+        or transaction_event["transaction_info"]["source"]
+        == test_config.expected_source
+    )
diff --git a/tests/integrations/socket/__init__.py b/tests/integrations/socket/__init__.py
new file mode 100644
index 0000000000..893069b21b
--- /dev/null
+++ b/tests/integrations/socket/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("socket")
diff --git a/tests/integrations/socket/test_socket.py b/tests/integrations/socket/test_socket.py
new file mode 100644
index 0000000000..4f93c1f2a5
--- /dev/null
+++ b/tests/integrations/socket/test_socket.py
@@ -0,0 +1,58 @@
+import socket
+
+from sentry_sdk import start_transaction
+from sentry_sdk.integrations.socket import SocketIntegration
+from tests.conftest import ApproxDict
+
+
+def test_getaddrinfo_trace(sentry_init, capture_events):
+    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction():
+        socket.getaddrinfo("example.com", 443)
+
+    (event,) = events
+    (span,) = event["spans"]
+
+    assert span["op"] == "socket.dns"
+    assert span["description"] == "example.com:443"
+    assert span["data"] == ApproxDict(
+        {
+            "host": "example.com",
+            "port": 443,
+        }
+    )
+
+
+def test_create_connection_trace(sentry_init, capture_events):
+    timeout = 10
+
+    sentry_init(integrations=[SocketIntegration()], traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction():
+        socket.create_connection(("example.com", 443), timeout, None)
+
+    (event,) = events
+    (connect_span, dns_span) = event["spans"]
+    # as getaddrinfo gets called in create_connection it should also contain a dns span
+
+    assert connect_span["op"] == "socket.connection"
+    assert connect_span["description"] == "example.com:443"
+    assert connect_span["data"] == ApproxDict(
+        {
+            "address": ["example.com", 443],
+            "timeout": timeout,
+            "source_address": None,
+        }
+    )
+
+    assert dns_span["op"] == "socket.dns"
+    assert dns_span["description"] == "example.com:443"
+    assert dns_span["data"] == ApproxDict(
+        {
+            "host": "example.com",
+            "port": 443,
+        }
+    )
diff --git a/tests/integrations/spark/__init__.py b/tests/integrations/spark/__init__.py
new file mode 100644
index 0000000000..aa6d24a492
--- /dev/null
+++ b/tests/integrations/spark/__init__.py
@@ -0,0 +1,4 @@
+import pytest
+
+pytest.importorskip("pyspark")
+pytest.importorskip("py4j")
diff --git a/tests/integrations/spark/test_spark.py b/tests/integrations/spark/test_spark.py
index 00c0055f12..c1c111ee11 100644
--- a/tests/integrations/spark/test_spark.py
+++ b/tests/integrations/spark/test_spark.py
@@ -8,10 +8,6 @@
 
 from sentry_sdk.integrations.spark.spark_worker import SparkWorkerIntegration
 
-
-pytest.importorskip("pyspark")
-pytest.importorskip("py4j")
-
 from pyspark import SparkContext
 
 from py4j.protocol import Py4JJavaError
diff --git a/tests/integrations/sqlalchemy/__init__.py b/tests/integrations/sqlalchemy/__init__.py
index b430bf6d43..33c43a6872 100644
--- a/tests/integrations/sqlalchemy/__init__.py
+++ b/tests/integrations/sqlalchemy/__init__.py
@@ -1,3 +1,9 @@
+import os
+import sys
 import pytest
 
 pytest.importorskip("sqlalchemy")
+
+# Load `sqlalchemy_helpers` into the module search path to test query source path names relative to module. See
+# `test_query_source_with_module_in_search_path`
+sys.path.insert(0, os.path.join(os.path.dirname(__file__)))
diff --git a/tests/integrations/sqlalchemy/sqlalchemy_helpers/__init__.py b/tests/integrations/sqlalchemy/sqlalchemy_helpers/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/sqlalchemy/sqlalchemy_helpers/helpers.py b/tests/integrations/sqlalchemy/sqlalchemy_helpers/helpers.py
new file mode 100644
index 0000000000..ca65a88d25
--- /dev/null
+++ b/tests/integrations/sqlalchemy/sqlalchemy_helpers/helpers.py
@@ -0,0 +1,7 @@
+def add_model_to_session(model, session):
+    session.add(model)
+    session.commit()
+
+
+def query_first_model_from_session(model_klass, session):
+    return session.query(model_klass).first()
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index 421a72ebae..08c8e29ec4 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -1,15 +1,26 @@
-import sys
+import os
 import pytest
+import sys
+from datetime import datetime
 
+from sentry_sdk._compat import PY2
 from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
 from sqlalchemy.exc import IntegrityError
 from sqlalchemy.ext.declarative import declarative_base
 from sqlalchemy.orm import relationship, sessionmaker
+from sqlalchemy import text
 
 from sentry_sdk import capture_message, start_transaction, configure_scope
+from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
 from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
-from sentry_sdk.utils import json_dumps, MAX_STRING_LENGTH
 from sentry_sdk.serializer import MAX_EVENT_BYTES
+from sentry_sdk.tracing_utils import record_sql_queries
+from sentry_sdk.utils import json_dumps
+
+try:
+    from unittest import mock
+except ImportError:
+    import mock
 
 
 def test_orm_queries(sentry_init, capture_events):
@@ -74,7 +85,6 @@ class Address(Base):
     sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
 )
 def test_transactions(sentry_init, capture_events, render_span_tree):
-
     sentry_init(
         integrations=[SqlalchemyIntegration()],
         _experiments={"record_sql_params": True},
@@ -119,6 +129,12 @@ class Address(Base):
 
     (event,) = events
 
+    for span in event["spans"]:
+        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
+        assert span["data"][SPANDATA.DB_NAME] == ":memory:"
+        assert SPANDATA.SERVER_ADDRESS not in span["data"]
+        assert SPANDATA.SERVER_PORT not in span["data"]
+
     assert (
         render_span_tree(event)
         == """\
@@ -139,18 +155,73 @@ class Address(Base):
     )
 
 
+@pytest.mark.skipif(
+    sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
+)
+def test_transactions_no_engine_url(sentry_init, capture_events):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        _experiments={"record_sql_params": True},
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    Base = declarative_base()  # noqa: N806
+
+    class Person(Base):
+        __tablename__ = "person"
+        id = Column(Integer, primary_key=True)
+        name = Column(String(250), nullable=False)
+
+    class Address(Base):
+        __tablename__ = "address"
+        id = Column(Integer, primary_key=True)
+        street_name = Column(String(250))
+        street_number = Column(String(250))
+        post_code = Column(String(250), nullable=False)
+        person_id = Column(Integer, ForeignKey("person.id"))
+        person = relationship(Person)
+
+    engine = create_engine("sqlite:///:memory:")
+    engine.url = None
+    Base.metadata.create_all(engine)
+
+    Session = sessionmaker(bind=engine)  # noqa: N806
+    session = Session()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        with session.begin_nested():
+            session.query(Person).first()
+
+        for _ in range(2):
+            with pytest.raises(IntegrityError):
+                with session.begin_nested():
+                    session.add(Person(id=1, name="bob"))
+                    session.add(Person(id=1, name="bob"))
+
+        with session.begin_nested():
+            session.query(Person).first()
+
+    (event,) = events
+
+    for span in event["spans"]:
+        assert span["data"][SPANDATA.DB_SYSTEM] == "sqlite"
+        assert SPANDATA.DB_NAME not in span["data"]
+        assert SPANDATA.SERVER_ADDRESS not in span["data"]
+        assert SPANDATA.SERVER_PORT not in span["data"]
+
+
 def test_long_sql_query_preserved(sentry_init, capture_events):
     sentry_init(
         traces_sample_rate=1,
         integrations=[SqlalchemyIntegration()],
-        _experiments={"smart_transaction_trimming": True},
     )
     events = capture_events()
 
     engine = create_engine("sqlite:///:memory:")
     with start_transaction(name="test"):
         with engine.connect() as con:
-            con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+            con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))
 
     (event,) = events
     description = event["spans"][0]["description"]
@@ -158,15 +229,14 @@ def test_long_sql_query_preserved(sentry_init, capture_events):
     assert description.endswith("SELECT 98 UNION SELECT 99")
 
 
-def test_too_large_event_truncated(sentry_init, capture_events):
+def test_large_event_not_truncated(sentry_init, capture_events):
     sentry_init(
         traces_sample_rate=1,
         integrations=[SqlalchemyIntegration()],
-        _experiments={"smart_transaction_trimming": True},
     )
     events = capture_events()
 
-    long_str = "x" * (MAX_STRING_LENGTH + 10)
+    long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)
 
     with configure_scope() as scope:
 
@@ -178,49 +248,415 @@ def processor(event, hint):
     engine = create_engine("sqlite:///:memory:")
     with start_transaction(name="test"):
         with engine.connect() as con:
-            for _ in range(2000):
-                con.execute(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+            for _ in range(1500):
+                con.execute(
+                    text(" UNION ".join("SELECT {}".format(i) for i in range(100)))
+                )
 
     (event,) = events
 
-    # Because of attached metadata in the "_meta" key, we may send out a little
-    # bit more than MAX_EVENT_BYTES.
-    max_bytes = 1.2 * MAX_EVENT_BYTES
-    assert len(json_dumps(event)) < max_bytes
+    assert len(json_dumps(event)) > MAX_EVENT_BYTES
 
     # Some spans are discarded.
     assert len(event["spans"]) == 1000
 
-    # Some spans have their descriptions truncated. Because the test always
-    # generates the same amount of descriptions and truncation is deterministic,
-    # the number here should never change across test runs.
-    #
-    # Which exact span descriptions are truncated depends on the span durations
-    # of each SQL query and is non-deterministic.
-    assert len(event["_meta"]["spans"]) == 537
-
-    for i, span in enumerate(event["spans"]):
-        description = span["description"]
-
-        assert description.startswith("SELECT ")
-        if str(i) in event["_meta"]["spans"]:
-            # Description must have been truncated
-            assert len(description) == 10
-            assert description.endswith("...")
-        else:
-            # Description was not truncated, check for original length
-            assert len(description) == 1583
-            assert description.endswith("SELECT 98 UNION SELECT 99")
-
-    # Smoke check the meta info for one of the spans.
-    assert next(iter(event["_meta"]["spans"].values())) == {
-        "description": {"": {"len": 1583, "rem": [["!limit", "x", 7, 10]]}}
-    }
+    # Span descriptions are not truncated.
+    description = event["spans"][0]["description"]
+    assert len(description) == 1583
+    assert description.startswith("SELECT 0")
+    assert description.endswith("SELECT 98 UNION SELECT 99")
+
+    description = event["spans"][999]["description"]
+    assert len(description) == 1583
+    assert description.startswith("SELECT 0")
+    assert description.endswith("SELECT 98 UNION SELECT 99")
 
     # Smoke check that truncation of other fields has not changed.
-    assert len(event["message"]) == MAX_STRING_LENGTH
+    assert len(event["message"]) == DEFAULT_MAX_VALUE_LENGTH
 
     # The _meta for other truncated fields should be there as well.
     assert event["_meta"]["message"] == {
-        "": {"len": 522, "rem": [["!limit", "x", 509, 512]]}
+        "": {"len": 1034, "rem": [["!limit", "x", 1021, 1024]]}
+    }
+
+
+def test_engine_name_not_string(sentry_init):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+    )
+
+    engine = create_engine("sqlite:///:memory:")
+    engine.dialect.name = b"sqlite"
+
+    with engine.connect() as con:
+        con.execute(text("SELECT 0"))
+
+
+def test_query_source_disabled(sentry_init, capture_events):
+    sentry_options = {
+        "integrations": [SqlalchemyIntegration()],
+        "enable_tracing": True,
+        "enable_db_query_source": False,
+        "db_query_source_threshold_ms": 0,
+    }
+
+    sentry_init(**sentry_options)
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+        session.add(bob)
+
+        assert session.query(Person).first() == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO not in data
+            assert SPANDATA.CODE_NAMESPACE not in data
+            assert SPANDATA.CODE_FILEPATH not in data
+            assert SPANDATA.CODE_FUNCTION not in data
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+@pytest.mark.parametrize("enable_db_query_source", [None, True])
+def test_query_source_enabled(sentry_init, capture_events, enable_db_query_source):
+    sentry_options = {
+        "integrations": [SqlalchemyIntegration()],
+        "enable_tracing": True,
+        "db_query_source_threshold_ms": 0,
     }
+    if enable_db_query_source is not None:
+        sentry_options["enable_db_query_source"] = enable_db_query_source
+
+    sentry_init(**sentry_options)
+
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+        session.add(bob)
+
+        assert session.query(Person).first() == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+def test_query_source(sentry_init, capture_events):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+        session.add(bob)
+
+        assert session.query(Person).first() == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+            assert (
+                data.get(SPANDATA.CODE_NAMESPACE)
+                == "tests.integrations.sqlalchemy.test_sqlalchemy"
+            )
+            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                "tests/integrations/sqlalchemy/test_sqlalchemy.py"
+            )
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert data.get(SPANDATA.CODE_FUNCTION) == "test_query_source"
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+def test_query_source_with_module_in_search_path(sentry_init, capture_events):
+    """
+    Test that query source is relative to the path of the module it ran in
+    """
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=0,
+    )
+    events = capture_events()
+
+    from sqlalchemy_helpers.helpers import (
+        add_model_to_session,
+        query_first_model_from_session,
+    )
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+
+        add_model_to_session(bob, session)
+
+        assert query_first_model_from_session(Person, session) == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+            if not PY2:
+                assert data.get(SPANDATA.CODE_NAMESPACE) == "sqlalchemy_helpers.helpers"
+                assert (
+                    data.get(SPANDATA.CODE_FILEPATH) == "sqlalchemy_helpers/helpers.py"
+                )
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert data.get(SPANDATA.CODE_FUNCTION) == "query_first_model_from_session"
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+def test_no_query_source_if_duration_too_short(sentry_init, capture_events):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+        session.add(bob)
+
+        class fake_record_sql_queries:  # noqa: N801
+            def __init__(self, *args, **kwargs):
+                with record_sql_queries(*args, **kwargs) as span:
+                    self.span = span
+
+                self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
+                self.span.timestamp = datetime(2024, 1, 1, microsecond=99999)
+
+            def __enter__(self):
+                return self.span
+
+            def __exit__(self, type, value, traceback):
+                pass
+
+        with mock.patch(
+            "sentry_sdk.integrations.sqlalchemy.record_sql_queries",
+            fake_record_sql_queries,
+        ):
+            assert session.query(Person).first() == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO not in data
+            assert SPANDATA.CODE_NAMESPACE not in data
+            assert SPANDATA.CODE_FILEPATH not in data
+            assert SPANDATA.CODE_FUNCTION not in data
+
+            break
+    else:
+        raise AssertionError("No db span found")
+
+
+def test_query_source_if_duration_over_threshold(sentry_init, capture_events):
+    sentry_init(
+        integrations=[SqlalchemyIntegration()],
+        enable_tracing=True,
+        enable_db_query_source=True,
+        db_query_source_threshold_ms=100,
+    )
+    events = capture_events()
+
+    with start_transaction(name="test_transaction", sampled=True):
+        Base = declarative_base()  # noqa: N806
+
+        class Person(Base):
+            __tablename__ = "person"
+            id = Column(Integer, primary_key=True)
+            name = Column(String(250), nullable=False)
+
+        engine = create_engine("sqlite:///:memory:")
+        Base.metadata.create_all(engine)
+
+        Session = sessionmaker(bind=engine)  # noqa: N806
+        session = Session()
+
+        bob = Person(name="Bob")
+        session.add(bob)
+
+        class fake_record_sql_queries:  # noqa: N801
+            def __init__(self, *args, **kwargs):
+                with record_sql_queries(*args, **kwargs) as span:
+                    self.span = span
+
+                self.span.start_timestamp = datetime(2024, 1, 1, microsecond=0)
+                self.span.timestamp = datetime(2024, 1, 1, microsecond=101000)
+
+            def __enter__(self):
+                return self.span
+
+            def __exit__(self, type, value, traceback):
+                pass
+
+        with mock.patch(
+            "sentry_sdk.integrations.sqlalchemy.record_sql_queries",
+            fake_record_sql_queries,
+        ):
+            assert session.query(Person).first() == bob
+
+    (event,) = events
+
+    for span in event["spans"]:
+        if span.get("op") == "db" and span.get("description").startswith(
+            "SELECT person"
+        ):
+            data = span.get("data", {})
+
+            assert SPANDATA.CODE_LINENO in data
+            assert SPANDATA.CODE_NAMESPACE in data
+            assert SPANDATA.CODE_FILEPATH in data
+            assert SPANDATA.CODE_FUNCTION in data
+
+            assert type(data.get(SPANDATA.CODE_LINENO)) == int
+            assert data.get(SPANDATA.CODE_LINENO) > 0
+            assert (
+                data.get(SPANDATA.CODE_NAMESPACE)
+                == "tests.integrations.sqlalchemy.test_sqlalchemy"
+            )
+            assert data.get(SPANDATA.CODE_FILEPATH).endswith(
+                "tests/integrations/sqlalchemy/test_sqlalchemy.py"
+            )
+
+            is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
+            assert is_relative_path
+
+            assert (
+                data.get(SPANDATA.CODE_FUNCTION)
+                == "test_query_source_if_duration_over_threshold"
+            )
+            break
+    else:
+        raise AssertionError("No db span found")
diff --git a/tests/integrations/starlette/__init__.py b/tests/integrations/starlette/__init__.py
new file mode 100644
index 0000000000..c89ddf99a8
--- /dev/null
+++ b/tests/integrations/starlette/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlette")
diff --git a/tests/integrations/starlette/photo.jpg b/tests/integrations/starlette/photo.jpg
new file mode 100644
index 0000000000..52fbeef721
Binary files /dev/null and b/tests/integrations/starlette/photo.jpg differ
diff --git a/tests/integrations/starlette/templates/trace_meta.html b/tests/integrations/starlette/templates/trace_meta.html
new file mode 100644
index 0000000000..139fd16101
--- /dev/null
+++ b/tests/integrations/starlette/templates/trace_meta.html
@@ -0,0 +1 @@
+{{ sentry_trace_meta }}
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
new file mode 100644
index 0000000000..202f8b53de
--- /dev/null
+++ b/tests/integrations/starlette/test_starlette.py
@@ -0,0 +1,1116 @@
+import asyncio
+import base64
+import functools
+import json
+import logging
+import os
+import re
+import threading
+
+import pytest
+
+from sentry_sdk import last_event_id, capture_exception
+from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.utils import parse_version
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+from sentry_sdk import capture_message
+from sentry_sdk.integrations.starlette import (
+    StarletteIntegration,
+    StarletteRequestExtractor,
+)
+
+import starlette
+from starlette.authentication import (
+    AuthCredentials,
+    AuthenticationBackend,
+    AuthenticationError,
+    SimpleUser,
+)
+from starlette.middleware import Middleware
+from starlette.middleware.authentication import AuthenticationMiddleware
+from starlette.middleware.trustedhost import TrustedHostMiddleware
+from starlette.testclient import TestClient
+
+
+STARLETTE_VERSION = parse_version(starlette.__version__)
+
+PICTURE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "photo.jpg")
+
+BODY_JSON = {"some": "json", "for": "testing", "nested": {"numbers": 123}}
+
+BODY_FORM = """--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="username"\r\n\r\nJane\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="password"\r\n\r\nhello123\r\n--fd721ef49ea403a6\r\nContent-Disposition: form-data; name="photo"; filename="photo.jpg"\r\nContent-Type: image/jpg\r\nContent-Transfer-Encoding: base64\r\n\r\n{{image_data}}\r\n--fd721ef49ea403a6--\r\n""".replace(
+    "{{image_data}}", str(base64.b64encode(open(PICTURE, "rb").read()))
+)
+
+FORM_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": BODY_FORM.encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+JSON_RECEIVE_MESSAGES = [
+    {"type": "http.request", "body": json.dumps(BODY_JSON).encode("utf-8")},
+    {"type": "http.disconnect"},
+]
+
+PARSED_FORM = starlette.datastructures.FormData(
+    [
+        ("username", "Jane"),
+        ("password", "hello123"),
+        (
+            "photo",
+            starlette.datastructures.UploadFile(
+                filename="photo.jpg",
+                file=open(PICTURE, "rb"),
+            ),
+        ),
+    ]
+)
+
+# Dummy ASGI scope for creating mock Starlette requests
+SCOPE = {
+    "client": ("172.29.0.10", 34784),
+    "headers": [
+        [b"host", b"example.com"],
+        [b"user-agent", b"Mozilla/5.0 Gecko/20100101 Firefox/60.0"],
+        [b"content-type", b"application/json"],
+        [b"accept-language", b"en-US,en;q=0.5"],
+        [b"accept-encoding", b"gzip, deflate, br"],
+        [b"upgrade-insecure-requests", b"1"],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ],
+    "http_version": "0.0",
+    "method": "GET",
+    "path": "/path",
+    "query_string": b"qs=hello",
+    "scheme": "http",
+    "server": ("172.28.0.10", 8000),
+    "type": "http",
+}
+
+
+async def _mock_receive(msg):
+    return msg
+
+
+from sentry_sdk import Hub
+from starlette.templating import Jinja2Templates
+
+
+def starlette_app_factory(middleware=None, debug=True):
+    template_dir = os.path.join(
+        os.getcwd(), "tests", "integrations", "starlette", "templates"
+    )
+    templates = Jinja2Templates(directory=template_dir)
+
+    async def _homepage(request):
+        1 / 0
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    async def _custom_error(request):
+        raise Exception("Too Hot")
+
+    async def _message(request):
+        capture_message("hi")
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    async def _message_with_id(request):
+        capture_message("hi")
+        return starlette.responses.JSONResponse({"status": "ok"})
+
+    def _thread_ids_sync(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _thread_ids_async(request):
+        return starlette.responses.JSONResponse(
+            {
+                "main": threading.main_thread().ident,
+                "active": threading.current_thread().ident,
+            }
+        )
+
+    async def _render_template(request):
+        hub = Hub.current
+        capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+
+        template_context = {
+            "request": request,
+            "msg": "Hello Template World!",
+        }
+        return templates.TemplateResponse("trace_meta.html", template_context)
+
+    app = starlette.applications.Starlette(
+        debug=debug,
+        routes=[
+            starlette.routing.Route("/some_url", _homepage),
+            starlette.routing.Route("/custom_error", _custom_error),
+            starlette.routing.Route("/message", _message),
+            starlette.routing.Route("/message/{message_id}", _message_with_id),
+            starlette.routing.Route("/sync/thread_ids", _thread_ids_sync),
+            starlette.routing.Route("/async/thread_ids", _thread_ids_async),
+            starlette.routing.Route("/render_template", _render_template),
+        ],
+        middleware=middleware,
+    )
+
+    return app
+
+
+def async_return(result):
+    f = asyncio.Future()
+    f.set_result(result)
+    return f
+
+
+class BasicAuthBackend(AuthenticationBackend):
+    async def authenticate(self, conn):
+        if "Authorization" not in conn.headers:
+            return
+
+        auth = conn.headers["Authorization"]
+        try:
+            scheme, credentials = auth.split()
+            if scheme.lower() != "basic":
+                return
+            decoded = base64.b64decode(credentials).decode("ascii")
+        except (ValueError, UnicodeDecodeError):
+            raise AuthenticationError("Invalid basic auth credentials")
+
+        username, _, password = decoded.partition(":")
+
+        # TODO: You'd want to verify the username and password here.
+
+        return AuthCredentials(["authenticated"]), SimpleUser(username)
+
+
+class AsyncIterator:
+    def __init__(self, data):
+        self.iter = iter(bytes(data, "utf-8"))
+
+    def __aiter__(self):
+        return self
+
+    async def __anext__(self):
+        try:
+            return bytes([next(self.iter)])
+        except StopIteration:
+            raise StopAsyncIteration
+
+
+class SampleMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        # only handle http requests
+        if scope["type"] != "http":
+            await self.app(scope, receive, send)
+            return
+
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware:
+    def __init__(self, app):
+        self.app = app
+
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_content_length(sentry_init):
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert await extractor.content_length() == len(json.dumps(BODY_JSON))
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_cookies(sentry_init):
+    starlette_request = starlette.requests.Request(SCOPE)
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert extractor.cookies() == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_json(sentry_init):
+    starlette_request = starlette.requests.Request(SCOPE)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    assert extractor.is_json()
+    assert await extractor.json() == BODY_JSON
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_form(sentry_init):
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+    ]
+    # TODO add test for content-type: "application/x-www-form-urlencoded"
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    form_data = await extractor.form()
+    assert form_data.keys() == PARSED_FORM.keys()
+    assert form_data["username"] == PARSED_FORM["username"]
+    assert form_data["password"] == PARSED_FORM["password"]
+    assert form_data["photo"].filename == PARSED_FORM["photo"].filename
+
+    # Make sure we still can read the body
+    # after alreading it with extractor.form() above.
+    body = await extractor.request.body()
+    assert body
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_body_consumed_twice(
+    sentry_init, capture_events
+):
+    """
+    Starlette does cache when you read the request data via `request.json()`
+    or `request.body()`, but it does NOT when using `request.form()`.
+    So we have an edge case when the Sentry Starlette reads the body using `.form()`
+    and the user wants to read the body using `.body()`.
+    Because the underlying stream can not be consumed twice and is not cached.
+
+    We have fixed this in `StarletteRequestExtractor.form()` by consuming the body
+    first with `.body()` (to put it into the `_body` cache and then consume it with `.form()`.
+
+    If this behavior is changed in Starlette and the `request.form()` in Starlette
+    is also caching the body, this test will fail.
+
+    See also https://github.com/encode/starlette/discussions/1933
+    """
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    await extractor.request.form()
+
+    with pytest.raises(RuntimeError):
+        await extractor.request.body()
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info_too_big(sentry_init):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"multipart/form-data; boundary=fd721ef49ea403a6"],
+        [b"content-length", str(len(BODY_FORM)).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in FORM_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    # Because request is too big only the AnnotatedValue is extracted.
+    assert request_info["data"].metadata == {"rem": [["!config", "x"]]}
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info(sentry_init):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert request_info["cookies"] == {
+        "tasty_cookie": "strawberry",
+        "yummy_cookie": "choco",
+    }
+    assert request_info["data"] == BODY_JSON
+
+
+@pytest.mark.asyncio
+async def test_starlettrequestextractor_extract_request_info_no_pii(sentry_init):
+    sentry_init(
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    scope = SCOPE.copy()
+    scope["headers"] = [
+        [b"content-type", b"application/json"],
+        [b"content-length", str(len(json.dumps(BODY_JSON))).encode()],
+        [b"cookie", b"yummy_cookie=choco; tasty_cookie=strawberry"],
+    ]
+
+    starlette_request = starlette.requests.Request(scope)
+
+    # Mocking async `_receive()` that works in Python 3.7+
+    side_effect = [_mock_receive(msg) for msg in JSON_RECEIVE_MESSAGES]
+    starlette_request._receive = mock.Mock(side_effect=side_effect)
+
+    extractor = StarletteRequestExtractor(starlette_request)
+
+    request_info = await extractor.extract_request_info()
+
+    assert request_info
+    assert "cookies" not in request_info
+    assert request_info["data"] == BODY_JSON
+
+
+@pytest.mark.parametrize(
+    "url,transaction_style,expected_transaction,expected_source",
+    [
+        (
+            "/message",
+            "url",
+            "/message",
+            "route",
+        ),
+        (
+            "/message",
+            "endpoint",
+            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
+            "component",
+        ),
+    ],
+)
+def test_transaction_style(
+    sentry_init,
+    capture_events,
+    url,
+    transaction_style,
+    expected_transaction,
+    expected_source,
+):
+    sentry_init(
+        integrations=[StarletteIntegration(transaction_style=transaction_style)],
+    )
+    starlette_app = starlette_app_factory()
+
+    events = capture_events()
+
+    client = TestClient(starlette_app)
+    client.get(url)
+
+    (event,) = events
+    assert event["transaction"] == expected_transaction
+    assert event["transaction_info"] == {"source": expected_source}
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message",
+    [
+        ("/some_url", ZeroDivisionError, "division by zero"),
+        ("/custom_error", Exception, "Too Hot"),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+):
+    sentry_init(integrations=[StarletteIntegration()])
+    starlette_app = starlette_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlette_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlette"
+
+
+def test_user_information_error(sentry_init, capture_events):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/custom_error", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (event,) = events
+    user = event.get("user", None)
+    assert user
+    assert "username" in user
+    assert user["username"] == "Gabriela"
+
+
+def test_user_information_error_no_pii(sentry_init, capture_events):
+    sentry_init(
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/custom_error", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (event,) = events
+    assert "user" not in event
+
+
+def test_user_information_transaction(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=True,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    client.get("/message", auth=("Gabriela", "hello123"))
+
+    (_, transaction_event) = events
+    user = transaction_event.get("user", None)
+    assert user
+    assert "username" in user
+    assert user["username"] == "Gabriela"
+
+
+def test_user_information_transaction_no_pii(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        send_default_pii=False,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    client.get("/message", auth=("Gabriela", "hello123"))
+
+    (_, transaction_event) = events
+    assert "user" not in transaction_event
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(AuthenticationMiddleware, backend=BasicAuthBackend())]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        "ServerErrorMiddleware",
+        "AuthenticationMiddleware",
+        "ExceptionMiddleware",
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlette":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlette.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(middleware=[Middleware(SampleMiddleware)])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SampleMiddleware",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SampleMiddleware.__call__..do_stuff",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SampleReceiveSendMiddleware)]
+    )
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarletteIntegration()],
+    )
+    starlette_app = starlette_app_factory(
+        middleware=[Middleware(SamplePartialReceiveSendMiddleware)]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message", auth=("Gabriela", "hello123"))
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlette",
+            "description": "ServerErrorMiddleware",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.receive",
+            "description": (
+                "_ASGIAdapter.send..receive"
+                if STARLETTE_VERSION < (0, 21)
+                else "_TestClientTransport.handle_request..receive"
+            ),
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "ServerErrorMiddleware.__call__.._send",
+            "tags": {"starlette.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlette.middleware_name": "ServerErrorMiddleware"},
+        },
+        {
+            "op": "middleware.starlette",
+            "description": "ExceptionMiddleware",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+        {
+            "op": "middleware.starlette.send",
+            "description": "functools.partial(.my_send at ",
+            "tags": {"starlette.middleware_name": "ExceptionMiddleware"},
+        },
+    ]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+        idx += 1
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarletteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlette.responses.PlainTextResponse(last_event_id(), status_code=500)
+
+    app = starlette_app_factory(debug=False)
+    app.add_exception_handler(500, handler)
+
+    client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+
+    event = events[0]
+    assert response.content.strip().decode("ascii") == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
+
+
+def test_legacy_setup(
+    sentry_init,
+    capture_events,
+):
+    # Check that behaviour does not change
+    # if the user just adds the new Integration
+    # and forgets to remove SentryAsgiMiddleware
+    sentry_init()
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    events = capture_events()
+
+    client = TestClient(asgi_app)
+    client.get("/message/123456")
+
+    (event,) = events
+    assert event["transaction"] == "/message/{message_id}"
+
+
+@pytest.mark.parametrize("endpoint", ["/sync/thread_ids", "/async/thread_ids"])
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_active_thread_id(sentry_init, capture_envelopes, teardown_profiling, endpoint):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = starlette_app_factory()
+    asgi_app = SentryAsgiMiddleware(app)
+
+    envelopes = capture_envelopes()
+
+    client = TestClient(asgi_app)
+    response = client.get(endpoint)
+    assert response.status_code == 200
+
+    data = json.loads(response.content)
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
+
+    for profile in profiles:
+        transactions = profile.payload.json["transactions"]
+        assert len(transactions) == 1
+        assert str(data["active"]) == transactions[0]["active_thread_id"]
+
+
+def test_original_request_not_scrubbed(sentry_init, capture_events):
+    sentry_init(integrations=[StarletteIntegration()])
+
+    events = capture_events()
+
+    async def _error(request):
+        logging.critical("Oh no!")
+        assert request.headers["Authorization"] == "Bearer ohno"
+        assert await request.json() == {"password": "ohno"}
+        return starlette.responses.JSONResponse({"status": "Oh no!"})
+
+    app = starlette.applications.Starlette(
+        routes=[
+            starlette.routing.Route("/error", _error, methods=["POST"]),
+        ],
+    )
+
+    client = TestClient(app)
+    client.post(
+        "/error",
+        json={"password": "ohno"},
+        headers={"Authorization": "Bearer ohno"},
+    )
+
+    event = events[0]
+    assert event["request"]["data"] == {"password": "[Filtered]"}
+    assert event["request"]["headers"]["authorization"] == "[Filtered]"
+
+
+@pytest.mark.skipif(STARLETTE_VERSION < (0, 24), reason="Requires Starlette >= 0.24")
+def test_template_tracing_meta(sentry_init, capture_events):
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[StarletteIntegration()],
+    )
+    events = capture_events()
+
+    app = starlette_app_factory()
+
+    client = TestClient(app)
+    response = client.get("/render_template")
+    assert response.status_code == 200
+
+    rendered_meta = response.text
+    traceparent, baggage = events[0]["message"].split("\n")
+    assert traceparent != ""
+    assert baggage != ""
+
+    match = re.match(
+        r'^',
+        rendered_meta,
+    )
+    assert match is not None
+    assert match.group(1) == traceparent
+
+    # Python 2 does not preserve sort order
+    rendered_baggage = match.group(2)
+    assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "tests.integrations.starlette.test_starlette.starlette_app_factory.._message_with_id",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "/message/{message_id}",
+            "route",
+        ),
+    ],
+)
+def test_transaction_name(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    capture_envelopes,
+):
+    """
+    Tests that the transaction name is something meaningful.
+    """
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[StarletteIntegration(transaction_style=transaction_style)],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    envelopes = capture_envelopes()
+
+    app = starlette_app_factory()
+    client = TestClient(app)
+    client.get(request_url)
+
+    (_, transaction_envelope) = envelopes
+    transaction_event = transaction_envelope.get_transaction_event()
+
+    assert transaction_event["transaction"] == expected_transaction_name
+    assert (
+        transaction_event["transaction_info"]["source"] == expected_transaction_source
+    )
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "http://testserver/message/123456",
+            "url",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "http://testserver/message/123456",
+            "url",
+        ),
+    ],
+)
+def test_transaction_name_in_traces_sampler(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+):
+    """
+    Tests that a custom traces_sampler has a meaningful transaction name.
+    In this case the URL or endpoint, because we do not have the route yet.
+    """
+
+    def dummy_traces_sampler(sampling_context):
+        assert (
+            sampling_context["transaction_context"]["name"] == expected_transaction_name
+        )
+        assert (
+            sampling_context["transaction_context"]["source"]
+            == expected_transaction_source
+        )
+
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[StarletteIntegration(transaction_style=transaction_style)],
+        traces_sampler=dummy_traces_sampler,
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    app = starlette_app_factory()
+    client = TestClient(app)
+    client.get(request_url)
+
+
+@pytest.mark.parametrize(
+    "request_url,transaction_style,expected_transaction_name,expected_transaction_source",
+    [
+        (
+            "/message/123456",
+            "endpoint",
+            "starlette.middleware.trustedhost.TrustedHostMiddleware",
+            "component",
+        ),
+        (
+            "/message/123456",
+            "url",
+            "http://testserver/message/123456",
+            "url",
+        ),
+    ],
+)
+def test_transaction_name_in_middleware(
+    sentry_init,
+    request_url,
+    transaction_style,
+    expected_transaction_name,
+    expected_transaction_source,
+    capture_envelopes,
+):
+    """
+    Tests that the transaction name is something meaningful.
+    """
+    sentry_init(
+        auto_enabling_integrations=False,  # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
+        integrations=[
+            StarletteIntegration(transaction_style=transaction_style),
+        ],
+        traces_sample_rate=1.0,
+        debug=True,
+    )
+
+    envelopes = capture_envelopes()
+
+    middleware = [
+        Middleware(
+            TrustedHostMiddleware,
+            allowed_hosts=["example.com", "*.example.com"],
+        ),
+    ]
+
+    app = starlette_app_factory(middleware=middleware)
+    client = TestClient(app)
+    client.get(request_url)
+
+    (transaction_envelope,) = envelopes
+    transaction_event = transaction_envelope.get_transaction_event()
+
+    assert transaction_event["contexts"]["response"]["status_code"] == 400
+    assert transaction_event["transaction"] == expected_transaction_name
+    assert (
+        transaction_event["transaction_info"]["source"] == expected_transaction_source
+    )
diff --git a/tests/integrations/starlite/__init__.py b/tests/integrations/starlite/__init__.py
new file mode 100644
index 0000000000..4c1037671d
--- /dev/null
+++ b/tests/integrations/starlite/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("starlite")
diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py
new file mode 100644
index 0000000000..0412133f5e
--- /dev/null
+++ b/tests/integrations/starlite/test_starlite.py
@@ -0,0 +1,317 @@
+import functools
+
+import pytest
+
+from sentry_sdk import capture_exception, capture_message, last_event_id
+from sentry_sdk.integrations.starlite import StarliteIntegration
+
+from typing import Any, Dict
+
+import starlite
+from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
+from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
+from starlite.middleware.session.memory_backend import MemoryBackendConfig
+from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
+from starlite.testing import TestClient
+
+
+class SampleMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send) -> None:
+        async def do_stuff(message):
+            if message["type"] == "http.response.start":
+                # do something here.
+                pass
+            await send(message)
+
+        await self.app(scope, receive, do_stuff)
+
+
+class SampleReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        await self.app(scope, receive, send)
+
+
+class SamplePartialReceiveSendMiddleware(AbstractMiddleware):
+    async def __call__(self, scope, receive, send):
+        message = await receive()
+        assert message
+        assert message["type"] == "http.request"
+
+        send_output = await send({"type": "something-unimportant"})
+        assert send_output is None
+
+        async def my_receive(*args, **kwargs):
+            pass
+
+        async def my_send(*args, **kwargs):
+            pass
+
+        partial_receive = functools.partial(my_receive)
+        partial_send = functools.partial(my_send)
+
+        await self.app(scope, partial_receive, partial_send)
+
+
+def starlite_app_factory(middleware=None, debug=True, exception_handlers=None):
+    class MyController(Controller):
+        path = "/controller"
+
+        @get("/error")
+        async def controller_error(self) -> None:
+            raise Exception("Whoa")
+
+    @get("/some_url")
+    async def homepage_handler() -> Dict[str, Any]:
+        1 / 0
+        return {"status": "ok"}
+
+    @get("/custom_error", name="custom_name")
+    async def custom_error() -> Any:
+        raise Exception("Too Hot")
+
+    @get("/message")
+    async def message() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    @get("/message/{message_id:str}")
+    async def message_with_id() -> Dict[str, Any]:
+        capture_message("hi")
+        return {"status": "ok"}
+
+    logging_config = LoggingConfig()
+
+    app = Starlite(
+        route_handlers=[
+            homepage_handler,
+            custom_error,
+            message,
+            message_with_id,
+            MyController,
+        ],
+        debug=debug,
+        middleware=middleware,
+        logging_config=logging_config,
+        exception_handlers=exception_handlers,
+    )
+
+    return app
+
+
+@pytest.mark.parametrize(
+    "test_url,expected_error,expected_message,expected_tx_name",
+    [
+        (
+            "/some_url",
+            ZeroDivisionError,
+            "division by zero",
+            "tests.integrations.starlite.test_starlite.starlite_app_factory..homepage_handler",
+        ),
+        (
+            "/custom_error",
+            Exception,
+            "Too Hot",
+            "custom_name",
+        ),
+        (
+            "/controller/error",
+            Exception,
+            "Whoa",
+            "partial(.MyController.controller_error>)",
+        ),
+    ],
+)
+def test_catch_exceptions(
+    sentry_init,
+    capture_exceptions,
+    capture_events,
+    test_url,
+    expected_error,
+    expected_message,
+    expected_tx_name,
+):
+    sentry_init(integrations=[StarliteIntegration()])
+    starlite_app = starlite_app_factory()
+    exceptions = capture_exceptions()
+    events = capture_events()
+
+    client = TestClient(starlite_app)
+    try:
+        client.get(test_url)
+    except Exception:
+        pass
+
+    (exc,) = exceptions
+    assert isinstance(exc, expected_error)
+    assert str(exc) == expected_message
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "starlite"
+    assert event["transaction"] == expected_tx_name
+
+
+def test_middleware_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+
+    logging_config = LoggingMiddlewareConfig()
+    session_config = MemoryBackendConfig()
+    rate_limit_config = RateLimitConfig(rate_limit=("hour", 5))
+
+    starlite_app = starlite_app_factory(
+        middleware=[
+            session_config.middleware,
+            logging_config.middleware,
+            rate_limit_config.middleware,
+        ]
+    )
+    events = capture_events()
+
+    client = TestClient(
+        starlite_app, raise_server_exceptions=False, base_url="http://testserver.local"
+    )
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = ["SessionMiddleware", "LoggingMiddleware", "RateLimitMiddleware"]
+
+    idx = 0
+    for span in transaction_event["spans"]:
+        if span["op"] == "middleware.starlite":
+            assert span["description"] == expected[idx]
+            assert span["tags"]["starlite.middleware_name"] == expected[idx]
+            idx += 1
+
+
+def test_middleware_callback_spans(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleMiddleware])
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SampleMiddleware",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlite.middleware_name": "SampleMiddleware"},
+        },
+    ]
+    for idx, span in enumerate(transaction_event["spans"]):
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"] == expected[idx]["description"]
+        assert span["tags"] == expected[idx]["tags"]
+
+
+def test_middleware_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(middleware=[SampleReceiveSendMiddleware])
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        # NOTE: the assert statements checking
+        # for correct behaviour are in `SampleReceiveSendMiddleware`!
+        client.get("/message")
+    except Exception:
+        pass
+
+
+def test_middleware_partial_receive_send(sentry_init, capture_events):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[StarliteIntegration()],
+    )
+    starlette_app = starlite_app_factory(
+        middleware=[SamplePartialReceiveSendMiddleware]
+    )
+    events = capture_events()
+
+    client = TestClient(starlette_app, raise_server_exceptions=False)
+    try:
+        client.get("/message")
+    except Exception:
+        pass
+
+    (_, transaction_event) = events
+
+    expected = [
+        {
+            "op": "middleware.starlite",
+            "description": "SamplePartialReceiveSendMiddleware",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.receive",
+            "description": "TestClientTransport.create_receive..receive",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+        {
+            "op": "middleware.starlite.send",
+            "description": "SentryAsgiMiddleware._run_app.._sentry_wrapped_send",
+            "tags": {"starlite.middleware_name": "SamplePartialReceiveSendMiddleware"},
+        },
+    ]
+
+    for idx, span in enumerate(transaction_event["spans"]):
+        assert span["op"] == expected[idx]["op"]
+        assert span["description"].startswith(expected[idx]["description"])
+        assert span["tags"] == expected[idx]["tags"]
+
+
+def test_last_event_id(sentry_init, capture_events):
+    sentry_init(
+        integrations=[StarliteIntegration()],
+    )
+    events = capture_events()
+
+    def handler(request, exc):
+        capture_exception(exc)
+        return starlite.response.Response(last_event_id(), status_code=500)
+
+    app = starlite_app_factory(
+        debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
+    )
+
+    client = TestClient(app, raise_server_exceptions=False)
+    response = client.get("/custom_error")
+    assert response.status_code == 500
+    event = events[-1]
+    assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
+    (exception,) = event["exception"]["values"]
+    assert exception["type"] == "Exception"
+    assert exception["value"] == "Too Hot"
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index c90f9eb891..6055b86ab8 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -1,5 +1,4 @@
-import platform
-import sys
+import random
 
 import pytest
 
@@ -12,39 +11,51 @@
 
 try:
     # py2
-    from httplib import HTTPSConnection
+    from httplib import HTTPConnection, HTTPSConnection
 except ImportError:
     # py3
-    from http.client import HTTPSConnection
+    from http.client import HTTPConnection, HTTPSConnection
 
 try:
     from unittest import mock  # python 3.3 and above
 except ImportError:
     import mock  # python < 3.3
 
+
 from sentry_sdk import capture_message, start_transaction
+from sentry_sdk.consts import MATCH_ALL, SPANDATA
+from sentry_sdk.tracing import Transaction
 from sentry_sdk.integrations.stdlib import StdlibIntegration
 
+from tests.conftest import ApproxDict, create_mock_http_server
+
+PORT = create_mock_http_server()
+
 
 def test_crumb_capture(sentry_init, capture_events):
     sentry_init(integrations=[StdlibIntegration()])
     events = capture_events()
 
-    url = "https://httpbin.org/status/200"
-    response = urlopen(url)
-    assert response.getcode() == 200
+    url = "http://localhost:{}/some/random/url".format(PORT)
+    urlopen(url)
+
     capture_message("Testing!")
 
     (event,) = events
     (crumb,) = event["breadcrumbs"]["values"]
+
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
-    assert crumb["data"] == {
-        "url": url,
-        "method": "GET",
-        "status_code": 200,
-        "reason": "OK",
-    }
+    assert crumb["data"] == ApproxDict(
+        {
+            "url": url,
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_STATUS_CODE: 200,
+            "reason": "OK",
+            SPANDATA.HTTP_FRAGMENT: "",
+            SPANDATA.HTTP_QUERY: "",
+        }
+    )
 
 
 def test_crumb_capture_hint(sentry_init, capture_events):
@@ -55,25 +66,36 @@ def before_breadcrumb(crumb, hint):
     sentry_init(integrations=[StdlibIntegration()], before_breadcrumb=before_breadcrumb)
     events = capture_events()
 
-    url = "https://httpbin.org/status/200"
-    response = urlopen(url)
-    assert response.getcode() == 200
+    url = "http://localhost:{}/some/random/url".format(PORT)
+    urlopen(url)
+
     capture_message("Testing!")
 
     (event,) = events
     (crumb,) = event["breadcrumbs"]["values"]
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
-    assert crumb["data"] == {
-        "url": url,
-        "method": "GET",
-        "status_code": 200,
-        "reason": "OK",
-        "extra": "foo",
-    }
+    assert crumb["data"] == ApproxDict(
+        {
+            "url": url,
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_STATUS_CODE: 200,
+            "reason": "OK",
+            "extra": "foo",
+            SPANDATA.HTTP_FRAGMENT: "",
+            SPANDATA.HTTP_QUERY: "",
+        }
+    )
+
+
+def test_empty_realurl(sentry_init):
+    """
+    Ensure that after using sentry_sdk.init you can putrequest a
+    None url.
+    """
 
-    if platform.python_implementation() != "PyPy":
-        assert sys.getrefcount(response) == 2
+    sentry_init(dsn="")
+    HTTPConnection("example.com", port=443).putrequest("POST", None)
 
 
 def test_httplib_misuse(sentry_init, capture_events, request):
@@ -89,19 +111,19 @@ def test_httplib_misuse(sentry_init, capture_events, request):
     sentry_init()
     events = capture_events()
 
-    conn = HTTPSConnection("httpbin.org", 443)
+    conn = HTTPConnection("localhost", PORT)
 
     # make sure we release the resource, even if the test fails
     request.addfinalizer(conn.close)
 
-    conn.request("GET", "/anything/foo")
+    conn.request("GET", "/200")
 
-    with pytest.raises(Exception):
+    with pytest.raises(Exception):  # noqa: B017
         # This raises an exception, because we didn't call `getresponse` for
         # the previous request yet.
         #
         # This call should not affect our breadcrumb.
-        conn.request("POST", "/anything/bar")
+        conn.request("POST", "/200")
 
     response = conn.getresponse()
     assert response._method == "GET"
@@ -113,17 +135,19 @@ def test_httplib_misuse(sentry_init, capture_events, request):
 
     assert crumb["type"] == "http"
     assert crumb["category"] == "httplib"
-    assert crumb["data"] == {
-        "url": "https://httpbin.org/anything/foo",
-        "method": "GET",
-        "status_code": 200,
-        "reason": "OK",
-    }
-
-
-def test_outgoing_trace_headers(
-    sentry_init, monkeypatch, StringContaining  # noqa: N803
-):
+    assert crumb["data"] == ApproxDict(
+        {
+            "url": "http://localhost:{}/200".format(PORT),
+            SPANDATA.HTTP_METHOD: "GET",
+            SPANDATA.HTTP_STATUS_CODE: 200,
+            "reason": "OK",
+            SPANDATA.HTTP_FRAGMENT: "",
+            SPANDATA.HTTP_QUERY: "",
+        }
+    )
+
+
+def test_outgoing_trace_headers(sentry_init, monkeypatch):
     # HTTPSConnection.send is passed a string containing (among other things)
     # the headers on the request. Mock it so we can check the headers, and also
     # so it doesn't try to actually talk to the internet.
@@ -132,22 +156,194 @@ def test_outgoing_trace_headers(
 
     sentry_init(traces_sample_rate=1.0)
 
+    headers = {}
+    headers["baggage"] = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+    )
+
+    transaction = Transaction.continue_from_headers(headers)
+
     with start_transaction(
+        transaction=transaction,
         name="/interactions/other-dogs/new-dog",
         op="greeting.sniff",
         trace_id="12312012123120121231201212312012",
     ) as transaction:
+        HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        request_span = transaction._span_recorder.spans[-1]
+        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert request_headers["sentry-trace"] == expected_sentry_trace
+
+        expected_outgoing_baggage_items = [
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700",
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3",
+            "sentry-sample_rate=0.01337",
+            "sentry-user_id=Am%C3%A9lie",
+        ]
+
+        assert sorted(request_headers["baggage"].split(",")) == sorted(
+            expected_outgoing_baggage_items
+        )
+
+
+def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    # make sure transaction is always sampled
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    sentry_init(traces_sample_rate=0.5, release="foo")
+    transaction = Transaction.continue_from_headers({})
 
+    with start_transaction(transaction=transaction, name="Head SDK tx") as transaction:
         HTTPSConnection("www.squirrelchasers.com").request("GET", "/top-chasers")
 
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
         request_span = transaction._span_recorder.spans[-1]
+        expected_sentry_trace = "{trace_id}-{parent_span_id}-{sampled}".format(
+            trace_id=transaction.trace_id,
+            parent_span_id=request_span.span_id,
+            sampled=1,
+        )
+        assert request_headers["sentry-trace"] == expected_sentry_trace
+
+        expected_outgoing_baggage_items = [
+            "sentry-trace_id=%s" % transaction.trace_id,
+            "sentry-sample_rate=0.5",
+            "sentry-sampled=%s" % "true" if transaction.sampled else "false",
+            "sentry-release=foo",
+            "sentry-environment=production",
+        ]
+
+        assert sorted(request_headers["baggage"].split(",")) == sorted(
+            expected_outgoing_baggage_items
+        )
+
+
+@pytest.mark.parametrize(
+    "trace_propagation_targets,host,path,trace_propagated",
+    [
+        [
+            [],
+            "example.com",
+            "/",
+            False,
+        ],
+        [
+            None,
+            "example.com",
+            "/",
+            False,
+        ],
+        [
+            [MATCH_ALL],
+            "example.com",
+            "/",
+            True,
+        ],
+        [
+            ["https://example.com/"],
+            "example.com",
+            "/",
+            True,
+        ],
+        [
+            ["https://example.com/"],
+            "example.com",
+            "",
+            False,
+        ],
+        [
+            ["https://example.com"],
+            "example.com",
+            "",
+            True,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "example.net",
+            "",
+            False,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "good.example.net",
+            "",
+            True,
+        ],
+        [
+            ["https://example.com", r"https?:\/\/[\w\-]+(\.[\w\-]+)+\.net"],
+            "good.example.net",
+            "/some/thing",
+            True,
+        ],
+    ],
+)
+def test_option_trace_propagation_targets(
+    sentry_init, monkeypatch, trace_propagation_targets, host, path, trace_propagated
+):
+    # HTTPSConnection.send is passed a string containing (among other things)
+    # the headers on the request. Mock it so we can check the headers, and also
+    # so it doesn't try to actually talk to the internet.
+    mock_send = mock.Mock()
+    monkeypatch.setattr(HTTPSConnection, "send", mock_send)
+
+    sentry_init(
+        trace_propagation_targets=trace_propagation_targets,
+        traces_sample_rate=1.0,
+    )
 
-        expected_sentry_trace = (
-            "sentry-trace: {trace_id}-{parent_span_id}-{sampled}".format(
-                trace_id=transaction.trace_id,
-                parent_span_id=request_span.span_id,
-                sampled=1,
-            )
+    headers = {
+        "baggage": (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
         )
+    }
 
-        mock_send.assert_called_with(StringContaining(expected_sentry_trace))
+    transaction = Transaction.continue_from_headers(headers)
+
+    with start_transaction(
+        transaction=transaction,
+        name="/interactions/other-dogs/new-dog",
+        op="greeting.sniff",
+        trace_id="12312012123120121231201212312012",
+    ) as transaction:
+        HTTPSConnection(host).request("GET", path)
+
+        (request_str,) = mock_send.call_args[0]
+        request_headers = {}
+        for line in request_str.decode("utf-8").split("\r\n")[1:]:
+            if line:
+                key, val = line.split(": ")
+                request_headers[key] = val
+
+        if trace_propagated:
+            assert "sentry-trace" in request_headers
+            assert "baggage" in request_headers
+        else:
+            assert "sentry-trace" not in request_headers
+            assert "baggage" not in request_headers
diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py
index 31da043ac3..00f417c2f3 100644
--- a/tests/integrations/stdlib/test_subprocess.py
+++ b/tests/integrations/stdlib/test_subprocess.py
@@ -8,6 +8,7 @@
 from sentry_sdk import capture_message, start_transaction
 from sentry_sdk._compat import PY2
 from sentry_sdk.integrations.stdlib import StdlibIntegration
+from tests.conftest import ApproxDict
 
 
 if PY2:
@@ -125,7 +126,7 @@ def test_subprocess_basic(
 
     assert message_event["message"] == "hi"
 
-    data = {"subprocess.cwd": os.getcwd()} if with_cwd else {}
+    data = ApproxDict({"subprocess.cwd": os.getcwd()} if with_cwd else {})
 
     (crumb,) = message_event["breadcrumbs"]["values"]
     assert crumb == {
@@ -179,6 +180,19 @@ def test_subprocess_basic(
         assert sys.executable + " -c" in subprocess_init_span["description"]
 
 
+def test_subprocess_empty_env(sentry_init, monkeypatch):
+    monkeypatch.setenv("TEST_MARKER", "should_not_be_seen")
+    sentry_init(integrations=[StdlibIntegration()], traces_sample_rate=1.0)
+    with start_transaction(name="foo"):
+        args = [
+            sys.executable,
+            "-c",
+            "import os; print(os.environ.get('TEST_MARKER', None))",
+        ]
+        output = subprocess.check_output(args, env={}, universal_newlines=True)
+    assert "should_not_be_seen" not in output
+
+
 def test_subprocess_invalid_args(sentry_init):
     sentry_init(integrations=[StdlibIntegration()])
 
diff --git a/tests/integrations/strawberry/__init__.py b/tests/integrations/strawberry/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/integrations/strawberry/test_strawberry_py3.py b/tests/integrations/strawberry/test_strawberry_py3.py
new file mode 100644
index 0000000000..e84c5f6fa5
--- /dev/null
+++ b/tests/integrations/strawberry/test_strawberry_py3.py
@@ -0,0 +1,629 @@
+import pytest
+
+strawberry = pytest.importorskip("strawberry")
+pytest.importorskip("fastapi")
+pytest.importorskip("flask")
+
+from unittest import mock
+
+from fastapi import FastAPI
+from fastapi.testclient import TestClient
+from flask import Flask
+from strawberry.extensions.tracing import (
+    SentryTracingExtension,
+    SentryTracingExtensionSync,
+)
+from strawberry.fastapi import GraphQLRouter
+from strawberry.flask.views import GraphQLView
+
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+from sentry_sdk.integrations.flask import FlaskIntegration
+from sentry_sdk.integrations.starlette import StarletteIntegration
+from sentry_sdk.integrations.strawberry import (
+    StrawberryIntegration,
+    SentryAsyncExtension,
+    SentrySyncExtension,
+)
+from tests.conftest import ApproxDict
+
+
+parameterize_strawberry_test = pytest.mark.parametrize(
+    "client_factory,async_execution,framework_integrations",
+    (
+        (
+            "async_app_client_factory",
+            True,
+            [FastApiIntegration(), StarletteIntegration()],
+        ),
+        ("sync_app_client_factory", False, [FlaskIntegration()]),
+    ),
+)
+
+
+@strawberry.type
+class Query:
+    @strawberry.field
+    def hello(self) -> str:
+        return "Hello World"
+
+    @strawberry.field
+    def error(self) -> int:
+        return 1 / 0
+
+
+@strawberry.type
+class Mutation:
+    @strawberry.mutation
+    def change(self, attribute: str) -> str:
+        return attribute
+
+
+@pytest.fixture
+def async_app_client_factory():
+    def create_app(schema):
+        async_app = FastAPI()
+        async_app.include_router(GraphQLRouter(schema), prefix="/graphql")
+        return TestClient(async_app)
+
+    return create_app
+
+
+@pytest.fixture
+def sync_app_client_factory():
+    def create_app(schema):
+        sync_app = Flask(__name__)
+        sync_app.add_url_rule(
+            "/graphql",
+            view_func=GraphQLView.as_view("graphql_view", schema=schema),
+        )
+        return sync_app.test_client()
+
+    return create_app
+
+
+def test_async_execution_uses_async_extension(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration(async_execution=True)])
+
+    with mock.patch(
+        "sentry_sdk.integrations.strawberry._get_installed_modules",
+        return_value={"flask": "2.3.3"},
+    ):
+        # actual installed modules should not matter, the explicit option takes
+        # precedence
+        schema = strawberry.Schema(Query)
+        assert SentryAsyncExtension in schema.extensions
+
+
+def test_sync_execution_uses_sync_extension(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration(async_execution=False)])
+
+    with mock.patch(
+        "sentry_sdk.integrations.strawberry._get_installed_modules",
+        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
+    ):
+        # actual installed modules should not matter, the explicit option takes
+        # precedence
+        schema = strawberry.Schema(Query)
+        assert SentrySyncExtension in schema.extensions
+
+
+def test_infer_execution_type_from_installed_packages_async(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration()])
+
+    with mock.patch(
+        "sentry_sdk.integrations.strawberry._get_installed_modules",
+        return_value={"fastapi": "0.103.1", "starlette": "0.27.0"},
+    ):
+        schema = strawberry.Schema(Query)
+        assert SentryAsyncExtension in schema.extensions
+
+
+def test_infer_execution_type_from_installed_packages_sync(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration()])
+
+    with mock.patch(
+        "sentry_sdk.integrations.strawberry._get_installed_modules",
+        return_value={"flask": "2.3.3"},
+    ):
+        schema = strawberry.Schema(Query)
+        assert SentrySyncExtension in schema.extensions
+
+
+def test_replace_existing_sentry_async_extension(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration()])
+
+    schema = strawberry.Schema(Query, extensions=[SentryTracingExtension])
+    assert SentryTracingExtension not in schema.extensions
+    assert SentrySyncExtension not in schema.extensions
+    assert SentryAsyncExtension in schema.extensions
+
+
+def test_replace_existing_sentry_sync_extension(sentry_init):
+    sentry_init(integrations=[StrawberryIntegration()])
+
+    schema = strawberry.Schema(Query, extensions=[SentryTracingExtensionSync])
+    assert SentryTracingExtensionSync not in schema.extensions
+    assert SentryAsyncExtension not in schema.extensions
+    assert SentrySyncExtension in schema.extensions
+
+
+@parameterize_strawberry_test
+def test_capture_request_if_available_and_send_pii_is_on(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "query ErrorQuery { error }"
+    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})
+
+    assert len(events) == 1
+
+    (error_event,) = events
+
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
+    assert error_event["request"]["api_target"] == "graphql"
+    assert error_event["request"]["data"] == {
+        "query": query,
+        "operationName": "ErrorQuery",
+    }
+    assert error_event["contexts"]["response"] == {
+        "data": {
+            "data": None,
+            "errors": [
+                {
+                    "message": "division by zero",
+                    "locations": [{"line": 1, "column": 20}],
+                    "path": ["error"],
+                }
+            ],
+        }
+    }
+    assert len(error_event["breadcrumbs"]["values"]) == 1
+    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
+    assert error_event["breadcrumbs"]["values"][0]["data"] == {
+        "operation_name": "ErrorQuery",
+        "operation_type": "query",
+    }
+
+
+@parameterize_strawberry_test
+def test_do_not_capture_request_if_send_pii_is_off(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "query ErrorQuery { error }"
+    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})
+
+    assert len(events) == 1
+
+    (error_event,) = events
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "strawberry"
+    assert "data" not in error_event["request"]
+    assert "response" not in error_event["contexts"]
+
+    assert len(error_event["breadcrumbs"]["values"]) == 1
+    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
+    assert error_event["breadcrumbs"]["values"][0]["data"] == {
+        "operation_name": "ErrorQuery",
+        "operation_type": "query",
+    }
+
+
+@parameterize_strawberry_test
+def test_breadcrumb_no_operation_name(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "{ error }"
+    client.post("/graphql", json={"query": query})
+
+    assert len(events) == 1
+
+    (error_event,) = events
+
+    assert len(error_event["breadcrumbs"]["values"]) == 1
+    assert error_event["breadcrumbs"]["values"][0]["category"] == "graphql.operation"
+    assert error_event["breadcrumbs"]["values"][0]["data"] == {
+        "operation_name": None,
+        "operation_type": "query",
+    }
+
+
+@parameterize_strawberry_test
+def test_capture_transaction_on_error(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        send_default_pii=True,
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+        traces_sample_rate=1,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "query ErrorQuery { error }"
+    client.post("/graphql", json={"query": query, "operationName": "ErrorQuery"})
+
+    assert len(events) == 2
+    (_, transaction_event) = events
+
+    if async_execution:
+        assert transaction_event["transaction"] == "/graphql"
+    else:
+        assert transaction_event["transaction"] == "graphql_view"
+
+    assert transaction_event["spans"]
+
+    query_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
+    ]
+    assert len(query_spans) == 1, "exactly one query span expected"
+    query_span = query_spans[0]
+    assert query_span["description"] == "query ErrorQuery"
+    assert query_span["data"]["graphql.operation.type"] == "query"
+    assert query_span["data"]["graphql.operation.name"] == "ErrorQuery"
+    assert query_span["data"]["graphql.document"] == query
+    assert query_span["data"]["graphql.resource_name"]
+
+    parse_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
+    ]
+    assert len(parse_spans) == 1, "exactly one parse span expected"
+    parse_span = parse_spans[0]
+    assert parse_span["parent_span_id"] == query_span["span_id"]
+    assert parse_span["description"] == "parsing"
+
+    validate_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
+    ]
+    assert len(validate_spans) == 1, "exactly one validate span expected"
+    validate_span = validate_spans[0]
+    assert validate_span["parent_span_id"] == query_span["span_id"]
+    assert validate_span["description"] == "validation"
+
+    resolve_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
+    ]
+    assert len(resolve_spans) == 1, "exactly one resolve span expected"
+    resolve_span = resolve_spans[0]
+    assert resolve_span["parent_span_id"] == query_span["span_id"]
+    assert resolve_span["description"] == "resolving Query.error"
+    assert resolve_span["data"] == ApproxDict(
+        {
+            "graphql.field_name": "error",
+            "graphql.parent_type": "Query",
+            "graphql.field_path": "Query.error",
+            "graphql.path": "error",
+        }
+    )
+
+
+@parameterize_strawberry_test
+def test_capture_transaction_on_success(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+        traces_sample_rate=1,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "query GreetingQuery { hello }"
+    client.post("/graphql", json={"query": query, "operationName": "GreetingQuery"})
+
+    assert len(events) == 1
+    (transaction_event,) = events
+
+    if async_execution:
+        assert transaction_event["transaction"] == "/graphql"
+    else:
+        assert transaction_event["transaction"] == "graphql_view"
+
+    assert transaction_event["spans"]
+
+    query_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
+    ]
+    assert len(query_spans) == 1, "exactly one query span expected"
+    query_span = query_spans[0]
+    assert query_span["description"] == "query GreetingQuery"
+    assert query_span["data"]["graphql.operation.type"] == "query"
+    assert query_span["data"]["graphql.operation.name"] == "GreetingQuery"
+    assert query_span["data"]["graphql.document"] == query
+    assert query_span["data"]["graphql.resource_name"]
+
+    parse_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
+    ]
+    assert len(parse_spans) == 1, "exactly one parse span expected"
+    parse_span = parse_spans[0]
+    assert parse_span["parent_span_id"] == query_span["span_id"]
+    assert parse_span["description"] == "parsing"
+
+    validate_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
+    ]
+    assert len(validate_spans) == 1, "exactly one validate span expected"
+    validate_span = validate_spans[0]
+    assert validate_span["parent_span_id"] == query_span["span_id"]
+    assert validate_span["description"] == "validation"
+
+    resolve_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
+    ]
+    assert len(resolve_spans) == 1, "exactly one resolve span expected"
+    resolve_span = resolve_spans[0]
+    assert resolve_span["parent_span_id"] == query_span["span_id"]
+    assert resolve_span["description"] == "resolving Query.hello"
+    assert resolve_span["data"] == ApproxDict(
+        {
+            "graphql.field_name": "hello",
+            "graphql.parent_type": "Query",
+            "graphql.field_path": "Query.hello",
+            "graphql.path": "hello",
+        }
+    )
+
+
+@parameterize_strawberry_test
+def test_transaction_no_operation_name(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+        traces_sample_rate=1,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = "{ hello }"
+    client.post("/graphql", json={"query": query})
+
+    assert len(events) == 1
+    (transaction_event,) = events
+
+    if async_execution:
+        assert transaction_event["transaction"] == "/graphql"
+    else:
+        assert transaction_event["transaction"] == "graphql_view"
+
+    assert transaction_event["spans"]
+
+    query_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_QUERY
+    ]
+    assert len(query_spans) == 1, "exactly one query span expected"
+    query_span = query_spans[0]
+    assert query_span["description"] == "query"
+    assert query_span["data"]["graphql.operation.type"] == "query"
+    assert query_span["data"]["graphql.operation.name"] is None
+    assert query_span["data"]["graphql.document"] == query
+    assert query_span["data"]["graphql.resource_name"]
+
+    parse_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
+    ]
+    assert len(parse_spans) == 1, "exactly one parse span expected"
+    parse_span = parse_spans[0]
+    assert parse_span["parent_span_id"] == query_span["span_id"]
+    assert parse_span["description"] == "parsing"
+
+    validate_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
+    ]
+    assert len(validate_spans) == 1, "exactly one validate span expected"
+    validate_span = validate_spans[0]
+    assert validate_span["parent_span_id"] == query_span["span_id"]
+    assert validate_span["description"] == "validation"
+
+    resolve_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
+    ]
+    assert len(resolve_spans) == 1, "exactly one resolve span expected"
+    resolve_span = resolve_spans[0]
+    assert resolve_span["parent_span_id"] == query_span["span_id"]
+    assert resolve_span["description"] == "resolving Query.hello"
+    assert resolve_span["data"] == ApproxDict(
+        {
+            "graphql.field_name": "hello",
+            "graphql.parent_type": "Query",
+            "graphql.field_path": "Query.hello",
+            "graphql.path": "hello",
+        }
+    )
+
+
+@parameterize_strawberry_test
+def test_transaction_mutation(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+        traces_sample_rate=1,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query, mutation=Mutation)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    query = 'mutation Change { change(attribute: "something") }'
+    client.post("/graphql", json={"query": query})
+
+    assert len(events) == 1
+    (transaction_event,) = events
+
+    if async_execution:
+        assert transaction_event["transaction"] == "/graphql"
+    else:
+        assert transaction_event["transaction"] == "graphql_view"
+
+    assert transaction_event["spans"]
+
+    query_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_MUTATION
+    ]
+    assert len(query_spans) == 1, "exactly one mutation span expected"
+    query_span = query_spans[0]
+    assert query_span["description"] == "mutation"
+    assert query_span["data"]["graphql.operation.type"] == "mutation"
+    assert query_span["data"]["graphql.operation.name"] is None
+    assert query_span["data"]["graphql.document"] == query
+    assert query_span["data"]["graphql.resource_name"]
+
+    parse_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_PARSE
+    ]
+    assert len(parse_spans) == 1, "exactly one parse span expected"
+    parse_span = parse_spans[0]
+    assert parse_span["parent_span_id"] == query_span["span_id"]
+    assert parse_span["description"] == "parsing"
+
+    validate_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_VALIDATE
+    ]
+    assert len(validate_spans) == 1, "exactly one validate span expected"
+    validate_span = validate_spans[0]
+    assert validate_span["parent_span_id"] == query_span["span_id"]
+    assert validate_span["description"] == "validation"
+
+    resolve_spans = [
+        span for span in transaction_event["spans"] if span["op"] == OP.GRAPHQL_RESOLVE
+    ]
+    assert len(resolve_spans) == 1, "exactly one resolve span expected"
+    resolve_span = resolve_spans[0]
+    assert resolve_span["parent_span_id"] == query_span["span_id"]
+    assert resolve_span["description"] == "resolving Mutation.change"
+    assert resolve_span["data"] == ApproxDict(
+        {
+            "graphql.field_name": "change",
+            "graphql.parent_type": "Mutation",
+            "graphql.field_path": "Mutation.change",
+            "graphql.path": "change",
+        }
+    )
+
+
+@parameterize_strawberry_test
+def test_handle_none_query_gracefully(
+    request,
+    sentry_init,
+    capture_events,
+    client_factory,
+    async_execution,
+    framework_integrations,
+):
+    sentry_init(
+        integrations=[
+            StrawberryIntegration(async_execution=async_execution),
+        ]
+        + framework_integrations,
+    )
+    events = capture_events()
+
+    schema = strawberry.Schema(Query)
+
+    client_factory = request.getfixturevalue(client_factory)
+    client = client_factory(schema)
+
+    client.post("/graphql", json={})
+
+    assert len(events) == 0, "expected no events to be sent to Sentry"
diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py
index 67b79e2080..97f480f155 100644
--- a/tests/integrations/threading/test_threading.py
+++ b/tests/integrations/threading/test_threading.py
@@ -1,12 +1,21 @@
 import gc
-
+import sys
 from threading import Thread
 
+try:
+    from concurrent import futures
+except ImportError:
+    futures = None
+
 import pytest
 
+import sentry_sdk
 from sentry_sdk import configure_scope, capture_message
 from sentry_sdk.integrations.threading import ThreadingIntegration
 
+original_start = Thread.start
+original_run = Thread.run
+
 
 @pytest.mark.forked
 @pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
@@ -26,7 +35,8 @@ def crash():
 
         (exception,) = event["exception"]["values"]
         assert exception["type"] == "ZeroDivisionError"
-        assert exception["mechanism"] == {"type": "threading", "handled": False}
+        assert exception["mechanism"]["type"] == "threading"
+        assert not exception["mechanism"]["handled"]
     else:
         assert not events
 
@@ -60,7 +70,8 @@ def stage2():
     (exception,) = event["exception"]["values"]
 
     assert exception["type"] == "ZeroDivisionError"
-    assert exception["mechanism"] == {"type": "threading", "handled": False}
+    assert exception["mechanism"]["type"] == "threading"
+    assert not exception["mechanism"]["handled"]
 
     if propagate_hub:
         assert event["tags"]["stage1"] == "true"
@@ -68,6 +79,42 @@ def stage2():
         assert "stage1" not in event.get("tags", {})
 
 
+@pytest.mark.skipif(
+    futures is None,
+    reason="ThreadPool was added in 3.2",
+)
+@pytest.mark.parametrize("propagate_hub", (True, False))
+def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub):
+    sentry_init(
+        traces_sample_rate=1.0,
+        integrations=[ThreadingIntegration(propagate_hub=propagate_hub)],
+    )
+    events = capture_events()
+
+    def double(number):
+        with sentry_sdk.start_span(op="task", description=str(number)):
+            return number * 2
+
+    with sentry_sdk.start_transaction(name="test_handles_threadpool"):
+        with futures.ThreadPoolExecutor(max_workers=1) as executor:
+            tasks = [executor.submit(double, number) for number in [1, 2, 3, 4]]
+            for future in futures.as_completed(tasks):
+                print("Getting future value!", future.result())
+
+    sentry_sdk.flush()
+
+    if propagate_hub:
+        assert len(events) == 1
+        (event,) = events
+        assert event["spans"][0]["trace_id"] == event["spans"][1]["trace_id"]
+        assert event["spans"][1]["trace_id"] == event["spans"][2]["trace_id"]
+        assert event["spans"][2]["trace_id"] == event["spans"][3]["trace_id"]
+        assert event["spans"][3]["trace_id"] == event["spans"][0]["trace_id"]
+    else:
+        (event,) = events
+        assert len(event["spans"]) == 0
+
+
 def test_circular_references(sentry_init, request):
     sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
 
@@ -84,7 +131,8 @@ def run(self):
     t.join()
     del t
 
-    assert not gc.collect()
+    unreachable_objects = gc.collect()
+    assert unreachable_objects == 0
 
 
 @pytest.mark.forked
@@ -114,3 +162,47 @@ def run(self):
     for event in events:
         (exception,) = event["exception"]["values"]
         assert exception["type"] == "ZeroDivisionError"
+
+
+@pytest.mark.skipif(sys.version_info < (3, 2), reason="no __qualname__ in older python")
+def test_wrapper_attributes(sentry_init):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+
+    def target():
+        assert t.run.__name__ == "run"
+        assert t.run.__qualname__ == original_run.__qualname__
+
+    t = Thread(target=target)
+    t.start()
+    t.join()
+
+    assert Thread.start.__name__ == "start"
+    assert Thread.start.__qualname__ == original_start.__qualname__
+    assert t.start.__name__ == "start"
+    assert t.start.__qualname__ == original_start.__qualname__
+
+    assert Thread.run.__name__ == "run"
+    assert Thread.run.__qualname__ == original_run.__qualname__
+    assert t.run.__name__ == "run"
+    assert t.run.__qualname__ == original_run.__qualname__
+
+
+@pytest.mark.skipif(
+    sys.version_info > (2, 7),
+    reason="simpler test for py2.7 without py3 only __qualname__",
+)
+def test_wrapper_attributes_no_qualname(sentry_init):
+    sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
+
+    def target():
+        assert t.run.__name__ == "run"
+
+    t = Thread(target=target)
+    t.start()
+    t.join()
+
+    assert Thread.start.__name__ == "start"
+    assert t.start.__name__ == "start"
+
+    assert Thread.run.__name__ == "run"
+    assert t.run.__name__ == "run"
diff --git a/tests/integrations/tornado/__init__.py b/tests/integrations/tornado/__init__.py
index a6ccd8a4ec..ac8479dcd7 100644
--- a/tests/integrations/tornado/__init__.py
+++ b/tests/integrations/tornado/__init__.py
@@ -1,3 +1,3 @@
 import pytest
 
-tornado = pytest.importorskip("tornado")
+pytest.importorskip("tornado")
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
index 1c5137f2b2..2160154933 100644
--- a/tests/integrations/tornado/test_tornado.py
+++ b/tests/integrations/tornado/test_tornado.py
@@ -2,7 +2,7 @@
 
 import pytest
 
-from sentry_sdk import configure_scope, start_transaction
+from sentry_sdk import configure_scope, start_transaction, capture_message
 from sentry_sdk.integrations.tornado import TornadoIntegration
 
 from tornado.web import RequestHandler, Application, HTTPError
@@ -46,6 +46,12 @@ def post(self):
         1 / 0
 
 
+class CrashingWithMessageHandler(RequestHandler):
+    def get(self):
+        capture_message("hi")
+        1 / 0
+
+
 class HelloHandler(RequestHandler):
     async def get(self):
         with configure_scope() as scope:
@@ -96,6 +102,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events):
         event["transaction"]
         == "tests.integrations.tornado.test_tornado.CrashingHandler.get"
     )
+    assert event["transaction_info"] == {"source": "component"}
 
     with configure_scope() as scope:
         assert not scope._tags
@@ -129,6 +136,9 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co
 
     assert client_tx["type"] == "transaction"
     assert client_tx["transaction"] == "client"
+    assert client_tx["transaction_info"] == {
+        "source": "custom"
+    }  # because this is just the start_transaction() above.
 
     if server_error is not None:
         assert server_error["exception"]["values"][0]["type"] == "ZeroDivisionError"
@@ -136,6 +146,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co
             server_error["transaction"]
             == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
         )
+        assert server_error["transaction_info"] == {"source": "component"}
 
     if code == 200:
         assert (
@@ -148,6 +159,7 @@ def test_transactions(tornado_testcase, sentry_init, capture_events, handler, co
             == "tests.integrations.tornado.test_tornado.CrashingHandler.post"
         )
 
+    assert server_tx["transaction_info"] == {"source": "component"}
     assert server_tx["type"] == "transaction"
 
     request = server_tx["request"]
@@ -286,3 +298,145 @@ def post(self):
     assert exception["value"] == "[]"
     assert event
     assert event["request"]["data"] == {"foo": {"bar": 42}}
+
+
+def test_error_has_new_trace_context_performance_enabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi")
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_new_trace_context_performance_disabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=None,  # this is the default, just added for clarity
+    )
+    events = capture_events()
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi")
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_error_has_existing_trace_context_performance_enabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is enabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    headers = {"sentry-trace": sentry_trace_header}
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi", headers=headers)
+
+    (msg_event, error_event, transaction_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert "trace" in transaction_event["contexts"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
+
+
+def test_error_has_existing_trace_context_performance_disabled(
+    tornado_testcase, sentry_init, capture_events
+):
+    """
+    Check if an 'trace' context is added to errros and transactions
+    from the incoming 'sentry-trace' header when performance monitoring is disabled.
+    """
+    sentry_init(
+        integrations=[TornadoIntegration()],
+        traces_sample_rate=None,  # this is the default, just added for clarity
+    )
+    events = capture_events()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
+
+    headers = {"sentry-trace": sentry_trace_header}
+
+    client = tornado_testcase(Application([(r"/hi", CrashingWithMessageHandler)]))
+    client.fetch("/hi", headers=headers)
+
+    (msg_event, error_event) = events
+
+    assert "trace" in msg_event["contexts"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert "trace" in error_event["contexts"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == "471a43a4192642f0b136d5159a501701"
+    )
diff --git a/tests/integrations/trytond/__init__.py b/tests/integrations/trytond/__init__.py
new file mode 100644
index 0000000000..897ed4ab6c
--- /dev/null
+++ b/tests/integrations/trytond/__init__.py
@@ -0,0 +1,3 @@
+import pytest
+
+pytest.importorskip("trytond")
diff --git a/tests/integrations/trytond/test_trytond.py b/tests/integrations/trytond/test_trytond.py
index 055f7926eb..30479ca14a 100644
--- a/tests/integrations/trytond/test_trytond.py
+++ b/tests/integrations/trytond/test_trytond.py
@@ -1,10 +1,8 @@
-import pytest
-
-pytest.importorskip("trytond")
-
 import json
 import unittest.mock
 
+import pytest
+
 import trytond
 from trytond.exceptions import TrytonException as TrytondBaseException
 from trytond.exceptions import UserError as TrytondUserError
@@ -124,8 +122,14 @@ def _(app, request, e):
     )
 
     (event,) = events
-    (content, status, headers) = response
-    data = json.loads(next(content))
+    if hasattr(response, "status"):
+        status = response.status
+        data = json.loads(response.get_data())
+        headers = response.headers
+    else:
+        (content, status, headers) = response
+        data = json.loads(next(content))
+
     assert status == "200 OK"
     assert headers.get("Content-Type") == "application/json"
     assert data == dict(id=42, error=["UserError", [event["event_id"], "foo", None]])
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 66cc1a1de7..0b76bf6887 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,7 +1,11 @@
+import sys
+
 from werkzeug.test import Client
+
 import pytest
 
 import sentry_sdk
+from sentry_sdk import capture_message
 from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
 from collections import Counter
 
@@ -122,21 +126,23 @@ def test_transaction_with_error(
     sentry_init, crashing_app, capture_events, DictionaryContaining  # noqa:N803
 ):
     def dogpark(environ, start_response):
-        raise Exception("Fetch aborted. The ball was not returned.")
+        raise ValueError("Fetch aborted. The ball was not returned.")
 
     sentry_init(send_default_pii=True, traces_sample_rate=1.0)
     app = SentryWsgiMiddleware(dogpark)
     client = Client(app)
     events = capture_events()
 
-    with pytest.raises(Exception):
+    with pytest.raises(ValueError):
         client.get("http://dogs.are.great/sit/stay/rollover/")
 
     error_event, envelope = events
 
     assert error_event["transaction"] == "generic WSGI request"
     assert error_event["contexts"]["trace"]["op"] == "http.server"
-    assert error_event["exception"]["values"][0]["type"] == "Exception"
+    assert error_event["exception"]["values"][0]["type"] == "ValueError"
+    assert error_event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
+    assert error_event["exception"]["values"][0]["mechanism"]["handled"] is False
     assert (
         error_event["exception"]["values"][0]["value"]
         == "Fetch aborted. The ball was not returned."
@@ -177,8 +183,139 @@ def dogpark(environ, start_response):
     )
 
 
+def test_has_trace_if_performance_enabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise ValueError("Fetch aborted. The ball was not returned.")
+
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(ValueError):
+        client.get("http://dogs.are.great/sit/stay/rollover/")
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert (
+        msg_event["contexts"]["trace"]["trace_id"]
+        == error_event["contexts"]["trace"]["trace_id"]
+        == transaction_event["contexts"]["trace"]["trace_id"]
+    )
+
+
+def test_has_trace_if_performance_disabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise ValueError("Fetch aborted. The ball was not returned.")
+
+    sentry_init()
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    with pytest.raises(ValueError):
+        client.get("http://dogs.are.great/sit/stay/rollover/")
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+
+def test_trace_from_headers_if_performance_enabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise ValueError("Fetch aborted. The ball was not returned.")
+
+    sentry_init(traces_sample_rate=1.0)
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(ValueError):
+        client.get(
+            "http://dogs.are.great/sit/stay/rollover/",
+            headers={"sentry-trace": sentry_trace_header},
+        )
+
+    msg_event, error_event, transaction_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+
+    assert transaction_event["contexts"]["trace"]
+    assert "trace_id" in transaction_event["contexts"]["trace"]
+
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+    assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
+def test_trace_from_headers_if_performance_disabled(
+    sentry_init,
+    capture_events,
+):
+    def dogpark(environ, start_response):
+        capture_message("Attempting to fetch the ball")
+        raise ValueError("Fetch aborted. The ball was not returned.")
+
+    sentry_init()
+    app = SentryWsgiMiddleware(dogpark)
+    client = Client(app)
+    events = capture_events()
+
+    trace_id = "582b43a4192642f0b136d5159a501701"
+    sentry_trace_header = "{}-{}-{}".format(trace_id, "6e8f22c393e68f19", 1)
+
+    with pytest.raises(ValueError):
+        client.get(
+            "http://dogs.are.great/sit/stay/rollover/",
+            headers={"sentry-trace": sentry_trace_header},
+        )
+
+    msg_event, error_event = events
+
+    assert msg_event["contexts"]["trace"]
+    assert "trace_id" in msg_event["contexts"]["trace"]
+    assert msg_event["contexts"]["trace"]["trace_id"] == trace_id
+
+    assert error_event["contexts"]["trace"]
+    assert "trace_id" in error_event["contexts"]["trace"]
+    assert error_event["contexts"]["trace"]["trace_id"] == trace_id
+
+
 def test_traces_sampler_gets_correct_values_in_sampling_context(
-    sentry_init, DictionaryContaining, ObjectDescribedBy  # noqa:N803
+    sentry_init,
+    DictionaryContaining,  # noqa:N803
 ):
     def app(environ, start_response):
         start_response("200 OK", [])
@@ -279,3 +416,33 @@ def sample_app(environ, start_response):
     assert session_aggregates[0]["exited"] == 2
     assert session_aggregates[0]["crashed"] == 1
     assert len(session_aggregates) == 1
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profile_sent(
+    sentry_init,
+    capture_envelopes,
+    teardown_profiling,
+):
+    def test_app(environ, start_response):
+        start_response("200 OK", [])
+        return ["Go get the ball! Good dog!"]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+    app = SentryWsgiMiddleware(test_app)
+    envelopes = capture_envelopes()
+
+    client = Client(app)
+    client.get("/")
+
+    envelopes = [envelope for envelope in envelopes]
+    assert len(envelopes) == 1
+
+    profiles = [item for item in envelopes[0].items if item.type == "profile"]
+    assert len(profiles) == 1
diff --git a/tests/test_api.py b/tests/test_api.py
new file mode 100644
index 0000000000..1adb9095f0
--- /dev/null
+++ b/tests/test_api.py
@@ -0,0 +1,115 @@
+from sentry_sdk import (
+    configure_scope,
+    continue_trace,
+    get_baggage,
+    get_current_span,
+    get_traceparent,
+    start_transaction,
+)
+from sentry_sdk.hub import Hub
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def test_get_current_span():
+    fake_hub = mock.MagicMock()
+    fake_hub.scope = mock.MagicMock()
+
+    fake_hub.scope.span = mock.MagicMock()
+    assert get_current_span(fake_hub) == fake_hub.scope.span
+
+    fake_hub.scope.span = None
+    assert get_current_span(fake_hub) is None
+
+
+def test_get_current_span_default_hub(sentry_init):
+    sentry_init()
+
+    assert get_current_span() is None
+
+    with configure_scope() as scope:
+        fake_span = mock.MagicMock()
+        scope.span = fake_span
+
+        assert get_current_span() == fake_span
+
+
+def test_get_current_span_default_hub_with_transaction(sentry_init):
+    sentry_init()
+
+    assert get_current_span() is None
+
+    with start_transaction() as new_transaction:
+        assert get_current_span() == new_transaction
+
+
+def test_traceparent_with_tracing_enabled(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    with start_transaction() as transaction:
+        expected_traceparent = "%s-%s-1" % (
+            transaction.trace_id,
+            transaction.span_id,
+        )
+        assert get_traceparent() == expected_traceparent
+
+
+def test_traceparent_with_tracing_disabled(sentry_init):
+    sentry_init()
+
+    propagation_context = Hub.current.scope._propagation_context
+    expected_traceparent = "%s-%s" % (
+        propagation_context["trace_id"],
+        propagation_context["span_id"],
+    )
+    assert get_traceparent() == expected_traceparent
+
+
+def test_baggage_with_tracing_disabled(sentry_init):
+    sentry_init(release="1.0.0", environment="dev")
+    propagation_context = Hub.current.scope._propagation_context
+    expected_baggage = (
+        "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format(
+            propagation_context["trace_id"]
+        )
+    )
+    # order not guaranteed in older python versions
+    assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+
+
+def test_baggage_with_tracing_enabled(sentry_init):
+    sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev")
+    with start_transaction() as transaction:
+        expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format(
+            transaction.trace_id, "true" if transaction.sampled else "false"
+        )
+        # order not guaranteed in older python versions
+        assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+
+
+def test_continue_trace(sentry_init):
+    sentry_init()
+
+    trace_id = "471a43a4192642f0b136d5159a501701"
+    parent_span_id = "6e8f22c393e68f19"
+    parent_sampled = 1
+    transaction = continue_trace(
+        {
+            "sentry-trace": "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled),
+            "baggage": "sentry-trace_id=566e3688a61d4bc888951642d6f14a19",
+        },
+        name="some name",
+    )
+    with start_transaction(transaction):
+        assert transaction.name == "some name"
+
+        propagation_context = Hub.current.scope._propagation_context
+        assert propagation_context["trace_id"] == transaction.trace_id == trace_id
+        assert propagation_context["parent_span_id"] == parent_span_id
+        assert propagation_context["parent_sampled"] == parent_sampled
+        assert propagation_context["dynamic_sampling_context"] == {
+            "trace_id": "566e3688a61d4bc888951642d6f14a19"
+        }
diff --git a/tests/test_basics.py b/tests/test_basics.py
index e9ae6465c9..26dad73274 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -1,9 +1,12 @@
+import logging
 import os
 import sys
-import logging
+import time
 
 import pytest
 
+from tests.conftest import patch_start_tracing_child
+
 from sentry_sdk import (
     Client,
     push_scope,
@@ -16,14 +19,50 @@
     last_event_id,
     Hub,
 )
-
-from sentry_sdk._compat import reraise
-from sentry_sdk.integrations import _AUTO_ENABLING_INTEGRATIONS
+from sentry_sdk._compat import reraise, PY2
+from sentry_sdk.integrations import (
+    _AUTO_ENABLING_INTEGRATIONS,
+    Integration,
+    setup_integrations,
+)
 from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.integrations.redis import RedisIntegration
 from sentry_sdk.scope import (  # noqa: F401
     add_global_event_processor,
     global_event_processors,
 )
+from sentry_sdk.utils import get_sdk_name
+from sentry_sdk.tracing_utils import has_tracing_enabled
+
+
+def _redis_installed():  # type: () -> bool
+    """
+    Determines whether Redis is installed.
+    """
+    try:
+        import redis  # noqa: F401
+    except ImportError:
+        return False
+
+    return True
+
+
+class NoOpIntegration(Integration):
+    """
+    A simple no-op integration for testing purposes.
+    """
+
+    identifier = "noop"
+
+    @staticmethod
+    def setup_once():  # type: () -> None
+        pass
+
+    def __eq__(self, __value):  # type: (object) -> bool
+        """
+        All instances of NoOpIntegration should be considered equal to each other.
+        """
+        return type(__value) == type(self)
 
 
 def test_processors(sentry_init, capture_events):
@@ -50,14 +89,16 @@ def error_processor(event, exc_info):
 
 def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
     caplog.set_level(logging.DEBUG)
-    REDIS = 10  # noqa: N806
+    redis_index = _AUTO_ENABLING_INTEGRATIONS.index(
+        "sentry_sdk.integrations.redis.RedisIntegration"
+    )  # noqa: N806
 
     sentry_init(auto_enabling_integrations=True, debug=True)
 
     for import_string in _AUTO_ENABLING_INTEGRATIONS:
-        # Ignore redis in the test case, because it is installed as a
-        # dependency for running tests, and therefore always enabled.
-        if _AUTO_ENABLING_INTEGRATIONS[REDIS] == import_string:
+        # Ignore redis in the test case, because it does not raise a DidNotEnable
+        # exception on import; rather, it raises the exception upon enabling.
+        if _AUTO_ENABLING_INTEGRATIONS[redis_index] == import_string:
             continue
 
         assert any(
@@ -65,7 +106,7 @@ def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
                 "Did not import default integration {}:".format(import_string)
             )
             for record in caplog.records
-        )
+        ), "Problem with checking auto enabling {}".format(import_string)
 
 
 def test_event_id(sentry_init, capture_events):
@@ -90,7 +131,93 @@ def test_event_id(sentry_init, capture_events):
     assert Hub.current.last_event_id() == event_id
 
 
-def test_option_callback(sentry_init, capture_events, monkeypatch):
+def test_generic_mechanism(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+    assert event["exception"]["values"][0]["mechanism"]["type"] == "generic"
+    assert event["exception"]["values"][0]["mechanism"]["handled"]
+
+
+def test_option_before_send(sentry_init, capture_events):
+    def before_send(event, hint):
+        event["extra"] = {"before_send_called": True}
+        return event
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send)
+    events = capture_events()
+
+    do_this()
+
+    (event,) = events
+    assert event["extra"] == {"before_send_called": True}
+
+
+def test_option_before_send_discard(sentry_init, capture_events):
+    def before_send_discard(event, hint):
+        return None
+
+    def do_this():
+        try:
+            raise ValueError("aha!")
+        except Exception:
+            capture_exception()
+
+    sentry_init(before_send=before_send_discard)
+    events = capture_events()
+
+    do_this()
+
+    assert len(events) == 0
+
+
+def test_option_before_send_transaction(sentry_init, capture_events):
+    def before_send_transaction(event, hint):
+        assert event["type"] == "transaction"
+        event["extra"] = {"before_send_transaction_called": True}
+        return event
+
+    sentry_init(
+        before_send_transaction=before_send_transaction,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    (event,) = events
+    assert event["transaction"] == "foo"
+    assert event["extra"] == {"before_send_transaction_called": True}
+
+
+def test_option_before_send_transaction_discard(sentry_init, capture_events):
+    def before_send_transaction_discard(event, hint):
+        return None
+
+    sentry_init(
+        before_send_transaction=before_send_transaction_discard,
+        traces_sample_rate=1.0,
+    )
+    events = capture_events()
+    transaction = start_transaction(name="foo")
+    transaction.finish()
+
+    assert len(events) == 0
+
+
+def test_option_before_breadcrumb(sentry_init, capture_events, monkeypatch):
     drop_events = False
     drop_breadcrumbs = False
     reports = []
@@ -142,6 +269,32 @@ def do_this():
     assert crumb["type"] == "default"
 
 
+@pytest.mark.parametrize(
+    "enable_tracing, traces_sample_rate, tracing_enabled, updated_traces_sample_rate",
+    [
+        (None, None, False, None),
+        (False, 0.0, False, 0.0),
+        (False, 1.0, False, 1.0),
+        (None, 1.0, True, 1.0),
+        (True, 1.0, True, 1.0),
+        (None, 0.0, True, 0.0),  # We use this as - it's configured but turned off
+        (True, 0.0, True, 0.0),  # We use this as - it's configured but turned off
+        (True, None, True, 1.0),
+    ],
+)
+def test_option_enable_tracing(
+    sentry_init,
+    enable_tracing,
+    traces_sample_rate,
+    tracing_enabled,
+    updated_traces_sample_rate,
+):
+    sentry_init(enable_tracing=enable_tracing, traces_sample_rate=traces_sample_rate)
+    options = Hub.current.client.options
+    assert has_tracing_enabled(options) is tracing_enabled
+    assert options["traces_sample_rate"] == updated_traces_sample_rate
+
+
 def test_breadcrumb_arguments(sentry_init, capture_events):
     assert_hint = {"bar": 42}
 
@@ -437,3 +590,207 @@ def foo(event, hint):
     assert reports == [("event_processor", "error"), ("event_processor", "transaction")]
 
     global_event_processors.pop()
+
+
+@pytest.mark.parametrize(
+    "installed_integrations, expected_name",
+    [
+        # integrations with own name
+        (["django"], "sentry.python.django"),
+        (["flask"], "sentry.python.flask"),
+        (["fastapi"], "sentry.python.fastapi"),
+        (["bottle"], "sentry.python.bottle"),
+        (["falcon"], "sentry.python.falcon"),
+        (["quart"], "sentry.python.quart"),
+        (["sanic"], "sentry.python.sanic"),
+        (["starlette"], "sentry.python.starlette"),
+        (["chalice"], "sentry.python.chalice"),
+        (["serverless"], "sentry.python.serverless"),
+        (["pyramid"], "sentry.python.pyramid"),
+        (["tornado"], "sentry.python.tornado"),
+        (["aiohttp"], "sentry.python.aiohttp"),
+        (["aws_lambda"], "sentry.python.aws_lambda"),
+        (["gcp"], "sentry.python.gcp"),
+        (["beam"], "sentry.python.beam"),
+        (["asgi"], "sentry.python.asgi"),
+        (["wsgi"], "sentry.python.wsgi"),
+        # integrations without name
+        (["argv"], "sentry.python"),
+        (["atexit"], "sentry.python"),
+        (["boto3"], "sentry.python"),
+        (["celery"], "sentry.python"),
+        (["dedupe"], "sentry.python"),
+        (["excepthook"], "sentry.python"),
+        (["executing"], "sentry.python"),
+        (["modules"], "sentry.python"),
+        (["pure_eval"], "sentry.python"),
+        (["redis"], "sentry.python"),
+        (["rq"], "sentry.python"),
+        (["sqlalchemy"], "sentry.python"),
+        (["stdlib"], "sentry.python"),
+        (["threading"], "sentry.python"),
+        (["trytond"], "sentry.python"),
+        (["logging"], "sentry.python"),
+        (["gnu_backtrace"], "sentry.python"),
+        (["httpx"], "sentry.python"),
+        # precedence of frameworks
+        (["flask", "django", "celery"], "sentry.python.django"),
+        (["fastapi", "flask", "redis"], "sentry.python.flask"),
+        (["bottle", "fastapi", "httpx"], "sentry.python.fastapi"),
+        (["falcon", "bottle", "logging"], "sentry.python.bottle"),
+        (["quart", "falcon", "gnu_backtrace"], "sentry.python.falcon"),
+        (["sanic", "quart", "sqlalchemy"], "sentry.python.quart"),
+        (["starlette", "sanic", "rq"], "sentry.python.sanic"),
+        (["chalice", "starlette", "modules"], "sentry.python.starlette"),
+        (["serverless", "chalice", "pure_eval"], "sentry.python.chalice"),
+        (["pyramid", "serverless", "modules"], "sentry.python.serverless"),
+        (["tornado", "pyramid", "executing"], "sentry.python.pyramid"),
+        (["aiohttp", "tornado", "dedupe"], "sentry.python.tornado"),
+        (["aws_lambda", "aiohttp", "boto3"], "sentry.python.aiohttp"),
+        (["gcp", "aws_lambda", "atexit"], "sentry.python.aws_lambda"),
+        (["beam", "gcp", "argv"], "sentry.python.gcp"),
+        (["asgi", "beam", "stdtlib"], "sentry.python.beam"),
+        (["wsgi", "asgi", "boto3"], "sentry.python.asgi"),
+        (["wsgi", "celery", "redis"], "sentry.python.wsgi"),
+    ],
+)
+def test_get_sdk_name(installed_integrations, expected_name):
+    assert get_sdk_name(installed_integrations) == expected_name
+
+
+def _hello_world(word):
+    return "Hello, {}".format(word)
+
+
+def test_functions_to_trace(sentry_init, capture_events):
+    functions_to_trace = [
+        {"qualified_name": "tests.test_basics._hello_world"},
+        {"qualified_name": "time.sleep"},
+    ]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        functions_to_trace=functions_to_trace,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="something"):
+        time.sleep(0)
+
+        for word in ["World", "You"]:
+            _hello_world(word)
+
+    assert len(events) == 1
+
+    (event,) = events
+
+    assert len(event["spans"]) == 3
+    assert event["spans"][0]["description"] == "time.sleep"
+    assert event["spans"][1]["description"] == "tests.test_basics._hello_world"
+    assert event["spans"][2]["description"] == "tests.test_basics._hello_world"
+
+
+class WorldGreeter:
+    def __init__(self, word):
+        self.word = word
+
+    def greet(self, new_word=None):
+        return "Hello, {}".format(new_word if new_word else self.word)
+
+
+def test_functions_to_trace_with_class(sentry_init, capture_events):
+    functions_to_trace = [
+        {"qualified_name": "tests.test_basics.WorldGreeter.greet"},
+    ]
+
+    sentry_init(
+        traces_sample_rate=1.0,
+        functions_to_trace=functions_to_trace,
+    )
+
+    events = capture_events()
+
+    with start_transaction(name="something"):
+        wg = WorldGreeter("World")
+        wg.greet()
+        wg.greet("You")
+
+    assert len(events) == 1
+
+    (event,) = events
+
+    assert len(event["spans"]) == 2
+    assert event["spans"][0]["description"] == "tests.test_basics.WorldGreeter.greet"
+    assert event["spans"][1]["description"] == "tests.test_basics.WorldGreeter.greet"
+
+
+@pytest.mark.skipif(_redis_installed(), reason="skipping because redis is installed")
+def test_redis_disabled_when_not_installed(sentry_init):
+    sentry_init()
+
+    assert Hub.current.get_integration(RedisIntegration) is None
+
+
+def test_multiple_setup_integrations_calls():
+    first_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
+    assert first_call_return == {NoOpIntegration.identifier: NoOpIntegration()}
+
+    second_call_return = setup_integrations([NoOpIntegration()], with_defaults=False)
+    assert second_call_return == {NoOpIntegration.identifier: NoOpIntegration()}
+
+
+class TracingTestClass:
+    @staticmethod
+    def static(arg):
+        return arg
+
+    @classmethod
+    def class_(cls, arg):
+        return cls, arg
+
+
+def test_staticmethod_tracing(sentry_init):
+    test_staticmethod_name = "tests.test_basics.TracingTestClass.static"
+    if not PY2:
+        # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
+        # since the assertion would be expected to fail in Python 3 if there is any problem.
+        assert (
+            ".".join(
+                [
+                    TracingTestClass.static.__module__,
+                    TracingTestClass.static.__qualname__,
+                ]
+            )
+            == test_staticmethod_name
+        ), "The test static method was moved or renamed. Please update the name accordingly"
+
+    sentry_init(functions_to_trace=[{"qualified_name": test_staticmethod_name}])
+
+    for instance_or_class in (TracingTestClass, TracingTestClass()):
+        with patch_start_tracing_child() as fake_start_child:
+            assert instance_or_class.static(1) == 1
+            assert fake_start_child.call_count == 1
+
+
+def test_classmethod_tracing(sentry_init):
+    test_classmethod_name = "tests.test_basics.TracingTestClass.class_"
+    if not PY2:
+        # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
+        # since the assertion would be expected to fail in Python 3 if there is any problem.
+        assert (
+            ".".join(
+                [
+                    TracingTestClass.class_.__module__,
+                    TracingTestClass.class_.__qualname__,
+                ]
+            )
+            == test_classmethod_name
+        ), "The test class method was moved or renamed. Please update the name accordingly"
+
+    sentry_init(functions_to_trace=[{"qualified_name": test_classmethod_name}])
+
+    for instance_or_class in (TracingTestClass, TracingTestClass()):
+        with patch_start_tracing_child() as fake_start_child:
+            assert instance_or_class.class_(1) == (TracingTestClass, 1)
+            assert fake_start_child.call_count == 1
diff --git a/tests/test_client.py b/tests/test_client.py
index ffdb831e39..0954a8c5e8 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -5,8 +5,8 @@
 import subprocess
 import sys
 import time
-
 from textwrap import dedent
+
 from sentry_sdk import (
     Hub,
     Client,
@@ -20,10 +20,22 @@
 )
 from sentry_sdk.integrations.executing import ExecutingIntegration
 from sentry_sdk.transport import Transport
-from sentry_sdk._compat import reraise, text_type, PY2
+from sentry_sdk._compat import text_type, PY2
 from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
+from sentry_sdk.utils import logger
 from sentry_sdk.serializer import MAX_DATABAG_BREADTH
-from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS
+from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+    from typing import Any, Optional, Union
+    from sentry_sdk._types import Event
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
 if PY2:
     # Importing ABCs from collections is deprecated, and will stop working in 3.8
@@ -35,13 +47,13 @@
     from collections.abc import Mapping
 
 
-class EventCaptured(Exception):
+class EventCapturedError(Exception):
     pass
 
 
 class _TestTransport(Transport):
     def capture_event(self, event):
-        raise EventCaptured(event)
+        raise EventCapturedError(event)
 
 
 def test_transport_option(monkeypatch):
@@ -227,6 +239,16 @@ def test_transport_option(monkeypatch):
             "arg_https_proxy": "https://localhost/123",
             "expected_proxy_scheme": "https",
         },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "env_http_proxy": None,
+            "env_https_proxy": None,
+            "env_no_proxy": "sentry.io,example.com",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "https://localhost/123",
+            "expected_proxy_scheme": "https",
+            "arg_proxy_headers": {"Test-Header": "foo-bar"},
+        },
     ],
 )
 def test_proxy(monkeypatch, testcase):
@@ -236,17 +258,97 @@ def test_proxy(monkeypatch, testcase):
         monkeypatch.setenv("HTTPS_PROXY", testcase["env_https_proxy"])
     if testcase.get("env_no_proxy") is not None:
         monkeypatch.setenv("NO_PROXY", testcase["env_no_proxy"])
+
     kwargs = {}
+
     if testcase["arg_http_proxy"] is not None:
         kwargs["http_proxy"] = testcase["arg_http_proxy"]
     if testcase["arg_https_proxy"] is not None:
         kwargs["https_proxy"] = testcase["arg_https_proxy"]
+    if testcase.get("arg_proxy_headers") is not None:
+        kwargs["proxy_headers"] = testcase["arg_proxy_headers"]
+
     client = Client(testcase["dsn"], **kwargs)
+
     if testcase["expected_proxy_scheme"] is None:
         assert client.transport._pool.proxy is None
     else:
         assert client.transport._pool.proxy.scheme == testcase["expected_proxy_scheme"]
 
+        if testcase.get("arg_proxy_headers") is not None:
+            assert client.transport._pool.proxy_headers == testcase["arg_proxy_headers"]
+
+
+@pytest.mark.parametrize(
+    "testcase",
+    [
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "http://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks4a://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks4://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks5h://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": "socks5://localhost/123",
+            "arg_https_proxy": None,
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks4a://localhost/123",
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks4://localhost/123",
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks5h://localhost/123",
+            "expected_proxy_class": "",
+        },
+        {
+            "dsn": "https://foo@sentry.io/123",
+            "arg_http_proxy": None,
+            "arg_https_proxy": "socks5://localhost/123",
+            "expected_proxy_class": "",
+        },
+    ],
+)
+def test_socks_proxy(testcase):
+    kwargs = {}
+
+    if testcase["arg_http_proxy"] is not None:
+        kwargs["http_proxy"] = testcase["arg_http_proxy"]
+    if testcase["arg_https_proxy"] is not None:
+        kwargs["https_proxy"] = testcase["arg_https_proxy"]
+
+    client = Client(testcase["dsn"], **kwargs)
+    assert str(type(client.transport._pool)) == testcase["expected_proxy_class"]
+
 
 def test_simple_transport(sentry_init):
     events = []
@@ -256,28 +358,85 @@ def test_simple_transport(sentry_init):
 
 
 def test_ignore_errors(sentry_init, capture_events):
-    class MyDivisionError(ZeroDivisionError):
-        pass
+    with mock.patch(
+        "sentry_sdk.scope.Scope._capture_internal_exception"
+    ) as mock_capture_internal_exception:
 
-    def raise_it(exc_info):
-        reraise(*exc_info)
+        class MyDivisionError(ZeroDivisionError):
+            pass
 
-    sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport())
-    Hub.current._capture_internal_exception = raise_it
+        sentry_init(ignore_errors=[ZeroDivisionError], transport=_TestTransport())
+
+        def e(exc):
+            try:
+                raise exc
+            except Exception:
+                capture_exception()
+
+        e(ZeroDivisionError())
+        e(MyDivisionError())
+        e(ValueError())
+
+        assert mock_capture_internal_exception.call_count == 1
+        assert mock_capture_internal_exception.call_args[0][0][0] == EventCapturedError
 
-    def e(exc):
-        try:
-            raise exc
-        except Exception:
-            capture_exception()
 
-    e(ZeroDivisionError())
-    e(MyDivisionError())
-    pytest.raises(EventCaptured, lambda: e(ValueError()))
+def test_with_locals_deprecation_enabled(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(with_locals=True)
 
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert client.options["include_local_variables"]
 
-def test_with_locals_enabled(sentry_init, capture_events):
-    sentry_init(with_locals=True)
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+        )
+
+
+def test_with_locals_deprecation_disabled(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(with_locals=False)
+
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert not client.options["include_local_variables"]
+
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
+        )
+
+
+def test_include_local_variables_deprecation(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(include_local_variables=False)
+
+        client = Hub.current.client
+        assert "with_locals" not in client.options
+        assert "include_local_variables" in client.options
+        assert not client.options["include_local_variables"]
+
+        fake_warning.assert_not_called()
+
+
+def test_request_bodies_deprecation(sentry_init):
+    with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+        sentry_init(request_bodies="small")
+
+        client = Hub.current.client
+        assert "request_bodies" not in client.options
+        assert "max_request_body_size" in client.options
+        assert client.options["max_request_body_size"] == "small"
+
+        fake_warning.assert_called_once_with(
+            "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
+        )
+
+
+def test_include_local_variables_enabled(sentry_init, capture_events):
+    sentry_init(include_local_variables=True)
     events = capture_events()
     try:
         1 / 0
@@ -292,8 +451,8 @@ def test_with_locals_enabled(sentry_init, capture_events):
     )
 
 
-def test_with_locals_disabled(sentry_init, capture_events):
-    sentry_init(with_locals=False)
+def test_include_local_variables_disabled(sentry_init, capture_events):
+    sentry_init(include_local_variables=False)
     events = capture_events()
     try:
         1 / 0
@@ -308,6 +467,38 @@ def test_with_locals_disabled(sentry_init, capture_events):
     )
 
 
+def test_include_source_context_enabled(sentry_init, capture_events):
+    sentry_init(include_source_context=True)
+    events = capture_events()
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+
+    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
+    assert "post_context" in frame
+    assert "pre_context" in frame
+    assert "context_line" in frame
+
+
+def test_include_source_context_disabled(sentry_init, capture_events):
+    sentry_init(include_source_context=False)
+    events = capture_events()
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    (event,) = events
+
+    frame = event["exception"]["values"][0]["stacktrace"]["frames"][0]
+    assert "post_context" not in frame
+    assert "pre_context" not in frame
+    assert "context_line" not in frame
+
+
 @pytest.mark.parametrize("integrations", [[], [ExecutingIntegration()]])
 def test_function_names(sentry_init, capture_events, integrations):
     sentry_init(integrations=integrations)
@@ -357,7 +548,7 @@ def bar():
 
 
 def test_attach_stacktrace_enabled_no_locals(sentry_init, capture_events):
-    sentry_init(attach_stacktrace=True, with_locals=False)
+    sentry_init(attach_stacktrace=True, include_local_variables=False)
     events = capture_events()
 
     def foo():
@@ -386,7 +577,6 @@ def test_attach_stacktrace_in_app(sentry_init, capture_events):
     pytest_frames = [f for f in frames if f["module"].startswith("_pytest")]
     assert pytest_frames
     assert all(f["in_app"] is False for f in pytest_frames)
-    assert any(f["in_app"] for f in frames)
 
 
 def test_attach_stacktrace_disabled(sentry_init, capture_events):
@@ -400,8 +590,8 @@ def test_attach_stacktrace_disabled(sentry_init, capture_events):
 
 def test_capture_event_works(sentry_init):
     sentry_init(transport=_TestTransport())
-    pytest.raises(EventCaptured, lambda: capture_event({}))
-    pytest.raises(EventCaptured, lambda: capture_event({}))
+    pytest.raises(EventCapturedError, lambda: capture_event({}))
+    pytest.raises(EventCapturedError, lambda: capture_event({}))
 
 
 @pytest.mark.parametrize("num_messages", [10, 20])
@@ -744,10 +934,10 @@ def test_errno_errors(sentry_init, capture_events):
     sentry_init()
     events = capture_events()
 
-    class Foo(Exception):
+    class FooError(Exception):
         errno = 69
 
-    capture_exception(Foo())
+    capture_exception(FooError())
 
     (event,) = events
 
@@ -830,7 +1020,7 @@ def test_init_string_types(dsn, sentry_init):
     )
 
 
-def test_envelope_types():
+def test_sending_events_with_tracing():
     """
     Tests for calling the right transport method (capture_event vs
     capture_envelope) from the SDK client for different data types.
@@ -846,8 +1036,56 @@ def capture_envelope(self, envelope):
         def capture_event(self, event):
             events.append(event)
 
-    with Hub(Client(traces_sample_rate=1.0, transport=CustomTransport())):
-        event_id = capture_message("hello")
+    with Hub(Client(enable_tracing=True, transport=CustomTransport())):
+        try:
+            1 / 0
+        except Exception:
+            event_id = capture_exception()
+
+        # Assert error events get passed in via capture_envelope
+        assert not events
+        envelope = envelopes.pop()
+        (item,) = envelope.items
+        assert item.data_category == "error"
+        assert item.headers.get("type") == "event"
+        assert item.get_event()["event_id"] == event_id
+
+        with start_transaction(name="foo"):
+            pass
+
+        # Assert transactions get passed in via capture_envelope
+        assert not events
+        envelope = envelopes.pop()
+
+        (item,) = envelope.items
+        assert item.data_category == "transaction"
+        assert item.headers.get("type") == "transaction"
+
+    assert not envelopes
+    assert not events
+
+
+def test_sending_events_with_no_tracing():
+    """
+    Tests for calling the right transport method (capture_event vs
+    capture_envelope) from the SDK client for different data types.
+    """
+
+    envelopes = []
+    events = []
+
+    class CustomTransport(Transport):
+        def capture_envelope(self, envelope):
+            envelopes.append(envelope)
+
+        def capture_event(self, event):
+            events.append(event)
+
+    with Hub(Client(enable_tracing=False, transport=CustomTransport())):
+        try:
+            1 / 0
+        except Exception:
+            event_id = capture_exception()
 
         # Assert error events get passed in via capture_event
         assert not envelopes
@@ -861,11 +1099,7 @@ def capture_event(self, event):
 
         # Assert transactions get passed in via capture_envelope
         assert not events
-        envelope = envelopes.pop()
-
-        (item,) = envelope.items
-        assert item.data_category == "transaction"
-        assert item.headers.get("type") == "transaction"
+        assert not envelopes
 
     assert not envelopes
     assert not events
@@ -893,3 +1127,232 @@ def test_multiple_positional_args(sentry_init):
     with pytest.raises(TypeError) as exinfo:
         sentry_init(1, None)
     assert "Only single positional argument is expected" in str(exinfo.value)
+
+
+@pytest.mark.parametrize(
+    "sdk_options, expected_data_length",
+    [
+        ({}, DEFAULT_MAX_VALUE_LENGTH),
+        ({"max_value_length": 1800}, 1800),
+    ],
+)
+def test_max_value_length_option(
+    sentry_init, capture_events, sdk_options, expected_data_length
+):
+    sentry_init(sdk_options)
+    events = capture_events()
+
+    capture_message("a" * 2000)
+
+    assert len(events[0]["message"]) == expected_data_length
+
+
+@pytest.mark.parametrize(
+    "client_option,env_var_value,debug_output_expected",
+    [
+        (None, "", False),
+        (None, "t", True),
+        (None, "1", True),
+        (None, "True", True),
+        (None, "true", True),
+        (None, "f", False),
+        (None, "0", False),
+        (None, "False", False),
+        (None, "false", False),
+        (None, "xxx", False),
+        (True, "", True),
+        (True, "t", True),
+        (True, "1", True),
+        (True, "True", True),
+        (True, "true", True),
+        (True, "f", True),
+        (True, "0", True),
+        (True, "False", True),
+        (True, "false", True),
+        (True, "xxx", True),
+        (False, "", False),
+        (False, "t", False),
+        (False, "1", False),
+        (False, "True", False),
+        (False, "true", False),
+        (False, "f", False),
+        (False, "0", False),
+        (False, "False", False),
+        (False, "false", False),
+        (False, "xxx", False),
+    ],
+)
+@pytest.mark.tests_internal_exceptions
+def test_debug_option(
+    sentry_init,
+    monkeypatch,
+    caplog,
+    client_option,
+    env_var_value,
+    debug_output_expected,
+):
+    monkeypatch.setenv("SENTRY_DEBUG", env_var_value)
+
+    if client_option is None:
+        sentry_init()
+    else:
+        sentry_init(debug=client_option)
+
+    Hub.current._capture_internal_exception(
+        (ValueError, ValueError("something is wrong"), None)
+    )
+    if debug_output_expected:
+        assert "something is wrong" in caplog.text
+    else:
+        assert "something is wrong" not in caplog.text
+
+
+class IssuesSamplerTestConfig:
+    def __init__(
+        self,
+        expected_events,
+        sampler_function=None,
+        sample_rate=None,
+        exception_to_raise=Exception,
+    ):
+        # type: (int, Optional[Callable[[Event], Union[float, bool]]], Optional[float], type[Exception]) -> None
+        self.sampler_function_mock = (
+            None
+            if sampler_function is None
+            else mock.MagicMock(side_effect=sampler_function)
+        )
+        self.expected_events = expected_events
+        self.sample_rate = sample_rate
+        self.exception_to_raise = exception_to_raise
+
+    def init_sdk(self, sentry_init):
+        # type: (Callable[[*Any], None]) -> None
+        sentry_init(
+            error_sampler=self.sampler_function_mock, sample_rate=self.sample_rate
+        )
+
+    def raise_exception(self):
+        # type: () -> None
+        raise self.exception_to_raise()
+
+
+@mock.patch("sentry_sdk.client.random.random", return_value=0.618)
+@pytest.mark.parametrize(
+    "test_config",
+    (
+        # Baseline test with error_sampler only, both floats and bools
+        IssuesSamplerTestConfig(sampler_function=lambda *_: 1.0, expected_events=1),
+        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.7, expected_events=1),
+        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.6, expected_events=0),
+        IssuesSamplerTestConfig(sampler_function=lambda *_: 0.0, expected_events=0),
+        IssuesSamplerTestConfig(sampler_function=lambda *_: True, expected_events=1),
+        IssuesSamplerTestConfig(sampler_function=lambda *_: False, expected_events=0),
+        # Baseline test with sample_rate only
+        IssuesSamplerTestConfig(sample_rate=1.0, expected_events=1),
+        IssuesSamplerTestConfig(sample_rate=0.7, expected_events=1),
+        IssuesSamplerTestConfig(sample_rate=0.6, expected_events=0),
+        IssuesSamplerTestConfig(sample_rate=0.0, expected_events=0),
+        # error_sampler takes precedence over sample_rate
+        IssuesSamplerTestConfig(
+            sampler_function=lambda *_: 1.0, sample_rate=0.0, expected_events=1
+        ),
+        IssuesSamplerTestConfig(
+            sampler_function=lambda *_: 0.0, sample_rate=1.0, expected_events=0
+        ),
+        # Different sample rates based on exception, retrieved both from event and hint
+        IssuesSamplerTestConfig(
+            sampler_function=lambda event, _: {
+                "ZeroDivisionError": 1.0,
+                "AttributeError": 0.0,
+            }[event["exception"]["values"][0]["type"]],
+            exception_to_raise=ZeroDivisionError,
+            expected_events=1,
+        ),
+        IssuesSamplerTestConfig(
+            sampler_function=lambda event, _: {
+                "ZeroDivisionError": 1.0,
+                "AttributeError": 0.0,
+            }[event["exception"]["values"][0]["type"]],
+            exception_to_raise=AttributeError,
+            expected_events=0,
+        ),
+        IssuesSamplerTestConfig(
+            sampler_function=lambda _, hint: {
+                ZeroDivisionError: 1.0,
+                AttributeError: 0.0,
+            }[hint["exc_info"][0]],
+            exception_to_raise=ZeroDivisionError,
+            expected_events=1,
+        ),
+        IssuesSamplerTestConfig(
+            sampler_function=lambda _, hint: {
+                ZeroDivisionError: 1.0,
+                AttributeError: 0.0,
+            }[hint["exc_info"][0]],
+            exception_to_raise=AttributeError,
+            expected_events=0,
+        ),
+        # If sampler returns invalid value, we should still send the event
+        IssuesSamplerTestConfig(
+            sampler_function=lambda *_: "This is an invalid return value for the sampler",
+            expected_events=1,
+        ),
+    ),
+)
+def test_error_sampler(_, sentry_init, capture_events, test_config):
+    test_config.init_sdk(sentry_init)
+
+    events = capture_events()
+
+    try:
+        test_config.raise_exception()
+    except Exception:
+        capture_exception()
+
+    assert len(events) == test_config.expected_events
+
+    if test_config.sampler_function_mock is not None:
+        assert test_config.sampler_function_mock.call_count == 1
+
+        # Ensure two arguments (the event and hint) were passed to the sampler function
+        assert len(test_config.sampler_function_mock.call_args[0]) == 2
+
+
+@pytest.mark.forked
+@pytest.mark.parametrize(
+    "opt,missing_flags",
+    [
+        # lazy mode with enable-threads, no warning
+        [{"enable-threads": True, "lazy-apps": True}, []],
+        [{"enable-threads": "true", "lazy-apps": b"1"}, []],
+        # preforking mode with enable-threads and py-call-uwsgi-fork-hooks, no warning
+        [{"enable-threads": True, "py-call-uwsgi-fork-hooks": True}, []],
+        [{"enable-threads": b"true", "py-call-uwsgi-fork-hooks": b"on"}, []],
+        # lazy mode, no enable-threads, warning
+        [{"lazy-apps": True}, ["--enable-threads"]],
+        [{"enable-threads": b"false", "lazy-apps": True}, ["--enable-threads"]],
+        [{"enable-threads": b"0", "lazy": True}, ["--enable-threads"]],
+        # preforking mode, no enable-threads or py-call-uwsgi-fork-hooks, warning
+        [{}, ["--enable-threads", "--py-call-uwsgi-fork-hooks"]],
+        [{"processes": b"2"}, ["--enable-threads", "--py-call-uwsgi-fork-hooks"]],
+        [{"enable-threads": True}, ["--py-call-uwsgi-fork-hooks"]],
+        [{"enable-threads": b"1"}, ["--py-call-uwsgi-fork-hooks"]],
+        [
+            {"enable-threads": b"false"},
+            ["--enable-threads", "--py-call-uwsgi-fork-hooks"],
+        ],
+        [{"py-call-uwsgi-fork-hooks": True}, ["--enable-threads"]],
+    ],
+)
+def test_uwsgi_warnings(sentry_init, recwarn, opt, missing_flags):
+    uwsgi = mock.MagicMock()
+    uwsgi.opt = opt
+    with mock.patch.dict("sys.modules", uwsgi=uwsgi):
+        sentry_init(profiles_sample_rate=1.0)
+        if missing_flags:
+            assert len(recwarn) == 1
+            record = recwarn.pop()
+            for flag in missing_flags:
+                assert flag in str(record.message)
+        else:
+            assert not recwarn
diff --git a/tests/test_conftest.py b/tests/test_conftest.py
index 8a2d4cee24..1b006ed12e 100644
--- a/tests/test_conftest.py
+++ b/tests/test_conftest.py
@@ -24,7 +24,6 @@
 def test_string_containing(
     test_string, expected_result, StringContaining  # noqa: N803
 ):
-
     assert (test_string == StringContaining("dogs")) is expected_result
 
 
@@ -49,7 +48,6 @@ def test_string_containing(
 def test_dictionary_containing(
     test_dict, expected_result, DictionaryContaining  # noqa: N803
 ):
-
     assert (
         test_dict == DictionaryContaining({"dogs": "yes", "cats": "maybe"})
     ) is expected_result
@@ -98,7 +96,6 @@ def test_object_described_by(
     attrs_only_result,
     ObjectDescribedBy,  # noqa: N803
 ):
-
     assert (
         test_obj == ObjectDescribedBy(type=Dog, attrs={"name": "Maisey", "age": 7})
     ) is type_and_attrs_result
diff --git a/tests/test_envelope.py b/tests/test_envelope.py
index 582fe6236f..a8b3ac11f4 100644
--- a/tests/test_envelope.py
+++ b/tests/test_envelope.py
@@ -1,16 +1,8 @@
 from sentry_sdk.envelope import Envelope
 from sentry_sdk.session import Session
 from sentry_sdk import capture_event
-from sentry_sdk.tracing_utils import compute_tracestate_value
 import sentry_sdk.client
 
-import pytest
-
-try:
-    from unittest import mock  # python 3.3 and above
-except ImportError:
-    import mock  # python < 3.3
-
 
 def generate_transaction_item():
     return {
@@ -26,16 +18,15 @@ def generate_transaction_item():
                 "parent_span_id": None,
                 "description": "",
                 "op": "greeting.sniff",
-                "tracestate": compute_tracestate_value(
-                    {
-                        "trace_id": "12312012123120121231201212312012",
-                        "environment": "dogpark",
-                        "release": "off.leash.park",
-                        "public_key": "dogsarebadatkeepingsecrets",
-                        "user": {"id": 12312013, "segment": "bigs"},
-                        "transaction": "/interactions/other-dogs/new-dog",
-                    }
-                ),
+                "dynamic_sampling_context": {
+                    "trace_id": "12312012123120121231201212312012",
+                    "sample_rate": "1.0",
+                    "environment": "dogpark",
+                    "release": "off.leash.park",
+                    "public_key": "dogsarebadatkeepingsecrets",
+                    "user_segment": "bigs",
+                    "transaction": "/interactions/other-dogs/new-dog",
+                },
             }
         },
         "spans": [
@@ -88,25 +79,16 @@ def test_add_and_get_session():
             assert item.payload.json == expected.to_json()
 
 
-# TODO (kmclb) remove this parameterization once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False])
-def test_envelope_headers(
-    sentry_init, capture_envelopes, monkeypatch, tracestate_enabled
-):
+def test_envelope_headers(sentry_init, capture_envelopes, monkeypatch):
     monkeypatch.setattr(
         sentry_sdk.client,
         "format_timestamp",
         lambda x: "2012-11-21T12:31:12.415908Z",
     )
 
-    monkeypatch.setattr(
-        sentry_sdk.client,
-        "has_tracestate_enabled",
-        mock.Mock(return_value=tracestate_enabled),
-    )
-
     sentry_init(
         dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+        traces_sample_rate=1.0,
     )
     envelopes = capture_envelopes()
 
@@ -114,24 +96,19 @@ def test_envelope_headers(
 
     assert len(envelopes) == 1
 
-    if tracestate_enabled:
-        assert envelopes[0].headers == {
-            "event_id": "15210411201320122115110420122013",
-            "sent_at": "2012-11-21T12:31:12.415908Z",
-            "trace": {
-                "trace_id": "12312012123120121231201212312012",
-                "environment": "dogpark",
-                "release": "off.leash.park",
-                "public_key": "dogsarebadatkeepingsecrets",
-                "user": {"id": 12312013, "segment": "bigs"},
-                "transaction": "/interactions/other-dogs/new-dog",
-            },
-        }
-    else:
-        assert envelopes[0].headers == {
-            "event_id": "15210411201320122115110420122013",
-            "sent_at": "2012-11-21T12:31:12.415908Z",
-        }
+    assert envelopes[0].headers == {
+        "event_id": "15210411201320122115110420122013",
+        "sent_at": "2012-11-21T12:31:12.415908Z",
+        "trace": {
+            "trace_id": "12312012123120121231201212312012",
+            "sample_rate": "1.0",
+            "environment": "dogpark",
+            "release": "off.leash.park",
+            "public_key": "dogsarebadatkeepingsecrets",
+            "user_segment": "bigs",
+            "transaction": "/interactions/other-dogs/new-dog",
+        },
+    }
 
 
 def test_envelope_with_sized_items():
@@ -141,15 +118,15 @@ def test_envelope_with_sized_items():
     """
     envelope_raw = (
         b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
-        + b'{"type":"type1","length":4 }\n1234\n'
-        + b'{"type":"type2","length":4 }\nabcd\n'
-        + b'{"type":"type3","length":0}\n\n'
-        + b'{"type":"type4","length":4 }\nab12\n'
+        b'{"type":"type1","length":4 }\n1234\n'
+        b'{"type":"type2","length":4 }\nabcd\n'
+        b'{"type":"type3","length":0}\n\n'
+        b'{"type":"type4","length":4 }\nab12\n'
     )
     envelope_raw_eof_terminated = envelope_raw[:-1]
 
-    for envelope_raw in (envelope_raw, envelope_raw_eof_terminated):
-        actual = Envelope.deserialize(envelope_raw)
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
 
         items = [item for item in actual]
 
@@ -177,15 +154,15 @@ def test_envelope_with_implicitly_sized_items():
     """
     envelope_raw = (
         b'{"event_id":"9ec79c33ec9942ab8353589fcb2e04dc"}\n'
-        + b'{"type":"type1"}\n1234\n'
-        + b'{"type":"type2"}\nabcd\n'
-        + b'{"type":"type3"}\n\n'
-        + b'{"type":"type4"}\nab12\n'
+        b'{"type":"type1"}\n1234\n'
+        b'{"type":"type2"}\nabcd\n'
+        b'{"type":"type3"}\n\n'
+        b'{"type":"type4"}\nab12\n'
     )
     envelope_raw_eof_terminated = envelope_raw[:-1]
 
-    for envelope_raw in (envelope_raw, envelope_raw_eof_terminated):
-        actual = Envelope.deserialize(envelope_raw)
+    for envelope in (envelope_raw, envelope_raw_eof_terminated):
+        actual = Envelope.deserialize(envelope)
         assert actual.headers["event_id"] == "9ec79c33ec9942ab8353589fcb2e04dc"
 
         items = [item for item in actual]
diff --git a/tests/test_exceptiongroup.py b/tests/test_exceptiongroup.py
new file mode 100644
index 0000000000..4c7afc58eb
--- /dev/null
+++ b/tests/test_exceptiongroup.py
@@ -0,0 +1,308 @@
+import sys
+import pytest
+
+from sentry_sdk.utils import event_from_exception
+
+
+try:
+    # Python 3.11
+    from builtins import ExceptionGroup  # type: ignore
+except ImportError:
+    # Python 3.10 and below
+    ExceptionGroup = None
+
+
+minimum_python_311 = pytest.mark.skipif(
+    sys.version_info < (3, 11), reason="ExceptionGroup tests need Python >= 3.11"
+)
+
+
+@minimum_python_311
+def test_exceptiongroup():
+    exception_group = None
+
+    try:
+        try:
+            raise RuntimeError("something")
+        except RuntimeError:
+            raise ExceptionGroup(
+                "nested",
+                [
+                    ValueError(654),
+                    ExceptionGroup(
+                        "imports",
+                        [
+                            ImportError("no_such_module"),
+                            ModuleNotFoundError("another_module"),
+                        ],
+                    ),
+                    TypeError("int"),
+                ],
+            )
+    except ExceptionGroup as e:
+        exception_group = e
+
+    (event, _) = event_from_exception(
+        exception_group,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    values = event["exception"]["values"]
+
+    # For this test the stacktrace and the module is not important
+    for x in values:
+        if "stacktrace" in x:
+            del x["stacktrace"]
+        if "module" in x:
+            del x["module"]
+
+    expected_values = [
+        {
+            "mechanism": {
+                "exception_id": 6,
+                "handled": False,
+                "parent_id": 0,
+                "source": "exceptions[2]",
+                "type": "chained",
+            },
+            "type": "TypeError",
+            "value": "int",
+        },
+        {
+            "mechanism": {
+                "exception_id": 5,
+                "handled": False,
+                "parent_id": 3,
+                "source": "exceptions[1]",
+                "type": "chained",
+            },
+            "type": "ModuleNotFoundError",
+            "value": "another_module",
+        },
+        {
+            "mechanism": {
+                "exception_id": 4,
+                "handled": False,
+                "parent_id": 3,
+                "source": "exceptions[0]",
+                "type": "chained",
+            },
+            "type": "ImportError",
+            "value": "no_such_module",
+        },
+        {
+            "mechanism": {
+                "exception_id": 3,
+                "handled": False,
+                "is_exception_group": True,
+                "parent_id": 0,
+                "source": "exceptions[1]",
+                "type": "chained",
+            },
+            "type": "ExceptionGroup",
+            "value": "imports",
+        },
+        {
+            "mechanism": {
+                "exception_id": 2,
+                "handled": False,
+                "parent_id": 0,
+                "source": "exceptions[0]",
+                "type": "chained",
+            },
+            "type": "ValueError",
+            "value": "654",
+        },
+        {
+            "mechanism": {
+                "exception_id": 1,
+                "handled": False,
+                "parent_id": 0,
+                "source": "__context__",
+                "type": "chained",
+            },
+            "type": "RuntimeError",
+            "value": "something",
+        },
+        {
+            "mechanism": {
+                "exception_id": 0,
+                "handled": False,
+                "is_exception_group": True,
+                "type": "test_suite",
+            },
+            "type": "ExceptionGroup",
+            "value": "nested",
+        },
+    ]
+
+    assert values == expected_values
+
+
+@minimum_python_311
+def test_exceptiongroup_simple():
+    exception_group = None
+
+    try:
+        raise ExceptionGroup(
+            "simple",
+            [
+                RuntimeError("something strange's going on"),
+            ],
+        )
+    except ExceptionGroup as e:
+        exception_group = e
+
+    (event, _) = event_from_exception(
+        exception_group,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    exception_values = event["exception"]["values"]
+
+    assert len(exception_values) == 2
+
+    assert exception_values[0]["type"] == "RuntimeError"
+    assert exception_values[0]["value"] == "something strange's going on"
+    assert exception_values[0]["mechanism"] == {
+        "type": "chained",
+        "handled": False,
+        "exception_id": 1,
+        "source": "exceptions[0]",
+        "parent_id": 0,
+    }
+
+    assert exception_values[1]["type"] == "ExceptionGroup"
+    assert exception_values[1]["value"] == "simple"
+    assert exception_values[1]["mechanism"] == {
+        "type": "test_suite",
+        "handled": False,
+        "exception_id": 0,
+        "is_exception_group": True,
+    }
+    frame = exception_values[1]["stacktrace"]["frames"][0]
+    assert frame["module"] == "tests.test_exceptiongroup"
+    assert frame["context_line"] == "        raise ExceptionGroup("
+
+
+@minimum_python_311
+def test_exception_chain_cause():
+    exception_chain_cause = ValueError("Exception with cause")
+    exception_chain_cause.__context__ = TypeError("Exception in __context__")
+    exception_chain_cause.__cause__ = TypeError(
+        "Exception in __cause__"
+    )  # this implicitly sets exception_chain_cause.__suppress_context__=True
+
+    (event, _) = event_from_exception(
+        exception_chain_cause,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "TypeError",
+            "value": "Exception in __cause__",
+        },
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "Exception with cause",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
+
+
+@minimum_python_311
+def test_exception_chain_context():
+    exception_chain_context = ValueError("Exception with context")
+    exception_chain_context.__context__ = TypeError("Exception in __context__")
+
+    (event, _) = event_from_exception(
+        exception_chain_context,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "TypeError",
+            "value": "Exception in __context__",
+        },
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "Exception with context",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
+
+
+@minimum_python_311
+def test_simple_exception():
+    simple_excpetion = ValueError("A simple exception")
+
+    (event, _) = event_from_exception(
+        simple_excpetion,
+        client_options={
+            "include_local_variables": True,
+            "include_source_context": True,
+            "max_value_length": 1024,
+        },
+        mechanism={"type": "test_suite", "handled": False},
+    )
+
+    expected_exception_values = [
+        {
+            "mechanism": {
+                "handled": False,
+                "type": "test_suite",
+            },
+            "module": None,
+            "type": "ValueError",
+            "value": "A simple exception",
+        },
+    ]
+
+    exception_values = event["exception"]["values"]
+    assert exception_values == expected_exception_values
diff --git a/tests/test_lru_cache.py b/tests/test_lru_cache.py
new file mode 100644
index 0000000000..5343e76169
--- /dev/null
+++ b/tests/test_lru_cache.py
@@ -0,0 +1,37 @@
+import pytest
+
+from sentry_sdk._lru_cache import LRUCache
+
+
+@pytest.mark.parametrize("max_size", [-10, -1, 0])
+def test_illegal_size(max_size):
+    with pytest.raises(AssertionError):
+        LRUCache(max_size=max_size)
+
+
+def test_simple_set_get():
+    cache = LRUCache(1)
+    assert cache.get(1) is None
+    cache.set(1, 1)
+    assert cache.get(1) == 1
+
+
+def test_overwrite():
+    cache = LRUCache(1)
+    assert cache.get(1) is None
+    cache.set(1, 1)
+    assert cache.get(1) == 1
+    cache.set(1, 2)
+    assert cache.get(1) == 2
+
+
+def test_cache_eviction():
+    cache = LRUCache(3)
+    cache.set(1, 1)
+    cache.set(2, 2)
+    cache.set(3, 3)
+    assert cache.get(1) == 1
+    assert cache.get(2) == 2
+    cache.set(4, 4)
+    assert cache.get(3) is None
+    assert cache.get(4) == 4
diff --git a/tests/test_metrics.py b/tests/test_metrics.py
new file mode 100644
index 0000000000..741935615d
--- /dev/null
+++ b/tests/test_metrics.py
@@ -0,0 +1,974 @@
+# coding: utf-8
+import pytest
+import sys
+import time
+import linecache
+
+from sentry_sdk import Hub, metrics, push_scope, start_transaction
+from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE
+from sentry_sdk.envelope import parse_json
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+
+minimum_python_37_with_gevent = pytest.mark.skipif(
+    gevent and sys.version_info < (3, 7),
+    reason="Require Python 3.7 or higher with gevent",
+)
+
+
+def parse_metrics(bytes):
+    rv = []
+    for line in bytes.splitlines():
+        pieces = line.decode("utf-8").split("|")
+        payload = pieces[0].split(":")
+        name = payload[0]
+        values = payload[1:]
+        ty = pieces[1]
+        ts = None
+        tags = {}
+        for piece in pieces[2:]:
+            if piece[0] == "#":
+                for pair in piece[1:].split(","):
+                    k, v = pair.split(":", 1)
+                    old = tags.get(k)
+                    if old is not None:
+                        if isinstance(old, list):
+                            old.append(v)
+                        else:
+                            tags[k] = [old, v]
+                    else:
+                        tags[k] = v
+            elif piece[0] == "T":
+                ts = int(piece[1:])
+            else:
+                raise ValueError("unknown piece %r" % (piece,))
+        rv.append((ts, name, ty, values, tags))
+    rv.sort(key=lambda x: (x[0], x[1], tuple(sorted(tags.items()))))
+    return rv
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_increment(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.increment("foobar", 1.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
+    # python specific alias
+    metrics.incr("foobar", 2.0, tags={"foo": "bar", "blub": "blah"}, timestamp=ts)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "foobar@none"
+    assert m[0][2] == "c"
+    assert m[0][3] == ["3.0"]
+    assert m[0][4] == {
+        "blub": "blah",
+        "foo": "bar",
+        "release": "fun-release",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    assert parse_json(meta_item.payload.get_bytes()) == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "c:foobar@none": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ]
+        },
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_timing(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):
+        time.sleep(0.1)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "whatever@second"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 1
+    assert float(m[0][3][0]) >= 0.1
+    assert m[0][4] == {
+        "blub": "blah",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    json = parse_json(meta_item.payload.get_bytes())
+    assert json == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "d:whatever@second": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ]
+        },
+    }
+
+    loc = json["mapping"]["d:whatever@second"][0]
+    line = linecache.getline(loc["abs_path"], loc["lineno"])
+    assert (
+        line.strip()
+        == 'with metrics.timing("whatever", tags={"blub": "blah"}, timestamp=ts):'
+    )
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_timing_decorator(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    envelopes = capture_envelopes()
+
+    @metrics.timing("whatever-1", tags={"x": "y"})
+    def amazing():
+        time.sleep(0.1)
+        return 42
+
+    @metrics.timing("whatever-2", tags={"x": "y"}, unit="nanosecond")
+    def amazing_nano():
+        time.sleep(0.01)
+        return 23
+
+    assert amazing() == 42
+    assert amazing_nano() == 23
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 2
+    assert m[0][1] == "whatever-1@second"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 1
+    assert float(m[0][3][0]) >= 0.1
+    assert m[0][4] == {
+        "x": "y",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert m[1][1] == "whatever-2@nanosecond"
+    assert m[1][2] == "d"
+    assert len(m[1][3]) == 1
+    assert float(m[1][3][0]) >= 10000000.0
+    assert m[1][4] == {
+        "x": "y",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    json = parse_json(meta_item.payload.get_bytes())
+    assert json == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "d:whatever-1@second": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ],
+            "d:whatever-2@nanosecond": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ],
+        },
+    }
+
+    # XXX: this is not the best location.  It would probably be better to
+    # report the location in the function, however that is quite a bit
+    # tricker to do since we report from outside the function so we really
+    # only see the callsite.
+    loc = json["mapping"]["d:whatever-1@second"][0]
+    line = linecache.getline(loc["abs_path"], loc["lineno"])
+    assert line.strip() == "assert amazing() == 42"
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_timing_basic(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.timing("timing", 1.0, tags={"a": "b"}, timestamp=ts)
+    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
+    metrics.timing("timing", 2.0, tags={"a": "b"}, timestamp=ts)
+    metrics.timing("timing", 3.0, tags={"a": "b"}, timestamp=ts)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "timing@second"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 4
+    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
+    assert m[0][4] == {
+        "a": "b",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    assert parse_json(meta_item.payload.get_bytes()) == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "d:timing@second": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ]
+        },
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_distribution(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
+    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+    metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+    metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "dist@none"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 4
+    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
+    assert m[0][4] == {
+        "a": "b",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    json = parse_json(meta_item.payload.get_bytes())
+    assert json == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "d:dist@none": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ]
+        },
+    }
+
+    loc = json["mapping"]["d:dist@none"][0]
+    line = linecache.getline(loc["abs_path"], loc["lineno"])
+    assert (
+        line.strip()
+        == 'metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)'
+    )
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_set(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": True},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.set("my-set", "peter", tags={"magic": "puff"}, timestamp=ts)
+    metrics.set("my-set", "paul", tags={"magic": "puff"}, timestamp=ts)
+    metrics.set("my-set", "mary", tags={"magic": "puff"}, timestamp=ts)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    statsd_item, meta_item = envelope.items
+
+    assert statsd_item.headers["type"] == "statsd"
+    m = parse_metrics(statsd_item.payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "my-set@none"
+    assert m[0][2] == "s"
+    assert len(m[0][3]) == 3
+    assert sorted(map(int, m[0][3])) == [354582103, 2513273657, 3329318813]
+    assert m[0][4] == {
+        "magic": "puff",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert meta_item.headers["type"] == "metric_meta"
+    assert parse_json(meta_item.payload.get_bytes()) == {
+        "timestamp": mock.ANY,
+        "mapping": {
+            "s:my-set@none": [
+                {
+                    "type": "location",
+                    "filename": "tests/test_metrics.py",
+                    "abs_path": __file__,
+                    "function": sys._getframe().f_code.co_name,
+                    "module": __name__,
+                    "lineno": mock.ANY,
+                    "pre_context": mock.ANY,
+                    "context_line": mock.ANY,
+                    "post_context": mock.ANY,
+                }
+            ]
+        },
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_gauge(sentry_init, capture_envelopes, maybe_monkeypatched_threading):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
+    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
+    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "my-gauge@none"
+    assert m[0][2] == "g"
+    assert len(m[0][3]) == 5
+    assert list(map(float, m[0][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
+    assert m[0][4] == {
+        "x": "y",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_multiple(sentry_init, capture_envelopes):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    metrics.gauge("my-gauge", 10.0, tags={"x": "y"}, timestamp=ts)
+    metrics.gauge("my-gauge", 20.0, tags={"x": "y"}, timestamp=ts)
+    metrics.gauge("my-gauge", 30.0, tags={"x": "y"}, timestamp=ts)
+    for _ in range(10):
+        metrics.increment("counter-1", 1.0, timestamp=ts)
+    metrics.increment("counter-2", 1.0, timestamp=ts)
+
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 3
+
+    assert m[0][1] == "counter-1@none"
+    assert m[0][2] == "c"
+    assert list(map(float, m[0][3])) == [10.0]
+    assert m[0][4] == {
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert m[1][1] == "counter-2@none"
+    assert m[1][2] == "c"
+    assert list(map(float, m[1][3])) == [1.0]
+    assert m[1][4] == {
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert m[2][1] == "my-gauge@none"
+    assert m[2][2] == "g"
+    assert len(m[2][3]) == 5
+    assert list(map(float, m[2][3])) == [30.0, 10.0, 30.0, 60.0, 3.0]
+    assert m[2][4] == {
+        "x": "y",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_transaction_name(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    with push_scope() as scope:
+        scope.set_transaction_name("/user/{user_id}", source="route")
+        metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
+        metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+        metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+        metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
+
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "dist@none"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 4
+    assert sorted(map(float, m[0][3])) == [1.0, 2.0, 2.0, 3.0]
+    assert m[0][4] == {
+        "a": "b",
+        "transaction": "/user/{user_id}",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_metric_summaries(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        enable_tracing=True,
+    )
+    ts = time.time()
+    envelopes = capture_envelopes()
+
+    with start_transaction(
+        op="stuff", name="/foo", source=TRANSACTION_SOURCE_ROUTE
+    ) as transaction:
+        metrics.increment("root-counter", timestamp=ts)
+        with metrics.timing("my-timer-metric", tags={"a": "b"}, timestamp=ts):
+            for x in range(10):
+                metrics.distribution("my-dist", float(x), timestamp=ts)
+
+    Hub.current.flush()
+
+    (transaction, envelope) = envelopes
+
+    # Metrics Emission
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 3
+
+    assert m[0][1] == "my-dist@none"
+    assert m[0][2] == "d"
+    assert len(m[0][3]) == 10
+    assert sorted(m[0][3]) == list(map(str, map(float, range(10))))
+    assert m[0][4] == {
+        "transaction": "/foo",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert m[1][1] == "my-timer-metric@second"
+    assert m[1][2] == "d"
+    assert len(m[1][3]) == 1
+    assert m[1][4] == {
+        "a": "b",
+        "transaction": "/foo",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    assert m[2][1] == "root-counter@none"
+    assert m[2][2] == "c"
+    assert m[2][3] == ["1.0"]
+    assert m[2][4] == {
+        "transaction": "/foo",
+        "release": "fun-release@1.0.0",
+        "environment": "not-fun-env",
+    }
+
+    # Measurement Attachment
+    t = transaction.items[0].get_transaction_event()
+
+    assert t["_metrics_summary"] == {
+        "c:root-counter@none": [
+            {
+                "count": 1,
+                "min": 1.0,
+                "max": 1.0,
+                "sum": 1.0,
+                "tags": {
+                    "transaction": "/foo",
+                    "release": "fun-release@1.0.0",
+                    "environment": "not-fun-env",
+                },
+            }
+        ]
+    }
+
+    assert t["spans"][0]["_metrics_summary"]["d:my-dist@none"] == [
+        {
+            "count": 10,
+            "min": 0.0,
+            "max": 9.0,
+            "sum": 45.0,
+            "tags": {
+                "environment": "not-fun-env",
+                "release": "fun-release@1.0.0",
+                "transaction": "/foo",
+            },
+        }
+    ]
+
+    assert t["spans"][0]["tags"] == {"a": "b"}
+    (timer,) = t["spans"][0]["_metrics_summary"]["d:my-timer-metric@second"]
+    assert timer["count"] == 1
+    assert timer["max"] == timer["min"] == timer["sum"]
+    assert timer["sum"] > 0
+    assert timer["tags"] == {
+        "a": "b",
+        "environment": "not-fun-env",
+        "release": "fun-release@1.0.0",
+        "transaction": "/foo",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+@pytest.mark.parametrize(
+    "metric_name,metric_unit,expected_name",
+    [
+        ("first-metric", "nano-second", "first-metric@nanosecond"),
+        ("another_metric?", "nano second", "another_metric_@nanosecond"),
+        (
+            "metric",
+            "nanosecond",
+            "metric@nanosecond",
+        ),
+        (
+            "my.amaze.metric I guess",
+            "nano|\nsecond",
+            "my.amaze.metric_I_guess@nanosecond",
+        ),
+        # fmt: off
+        (u"métríc", u"nanöseconď", u"m_tr_c@nansecon"),
+        # fmt: on
+    ],
+)
+def test_metric_name_normalization(
+    sentry_init,
+    capture_envelopes,
+    metric_name,
+    metric_unit,
+    expected_name,
+    maybe_monkeypatched_threading,
+):
+    sentry_init(
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    envelopes = capture_envelopes()
+
+    metrics.distribution(metric_name, 1.0, unit=metric_unit)
+
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+
+    parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes())
+    assert len(parsed_metrics) == 1
+
+    name = parsed_metrics[0][1]
+    assert name == expected_name
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+@pytest.mark.parametrize(
+    "metric_tag,expected_tag",
+    [
+        ({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}),
+        ({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}),
+        # fmt: off
+        ({u"foö-bar": u"snöwmän"}, {u"fo-bar": u"snöwmän"},),
+        # fmt: on
+        ({"route": "GET /foo"}, {"route": "GET /foo"}),
+        ({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}),
+        ({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}),
+    ],
+)
+def test_metric_tag_normalization(
+    sentry_init,
+    capture_envelopes,
+    metric_tag,
+    expected_tag,
+    maybe_monkeypatched_threading,
+):
+    sentry_init(
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    envelopes = capture_envelopes()
+
+    metrics.distribution("a", 1.0, tags=metric_tag)
+
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+
+    parsed_metrics = parse_metrics(envelope.items[0].payload.get_bytes())
+    assert len(parsed_metrics) == 1
+
+    tags = parsed_metrics[0][4]
+
+    expected_tag_key, expected_tag_value = expected_tag.popitem()
+    assert expected_tag_key in tags
+    assert tags[expected_tag_key] == expected_tag_value
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_before_emit_metric(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    def before_emit(key, value, unit, tags):
+        if key == "removed-metric" or value == 47 or unit == "unsupported":
+            return False
+
+        tags["extra"] = "foo"
+        del tags["release"]
+        # this better be a noop!
+        metrics.increment("shitty-recursion")
+        return True
+
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={
+            "enable_metrics": True,
+            "metric_code_locations": False,
+            "before_emit_metric": before_emit,
+        },
+    )
+    envelopes = capture_envelopes()
+
+    metrics.increment("removed-metric", 1.0)
+    metrics.increment("another-removed-metric", 47)
+    metrics.increment("yet-another-removed-metric", 1.0, unit="unsupported")
+    metrics.increment("actual-metric", 1.0)
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][1] == "actual-metric@none"
+    assert m[0][3] == ["1.0"]
+    assert m[0][4] == {
+        "extra": "foo",
+        "environment": "not-fun-env",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_aggregator_flush(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release@1.0.0",
+        environment="not-fun-env",
+        _experiments={
+            "enable_metrics": True,
+        },
+    )
+    envelopes = capture_envelopes()
+
+    metrics.increment("a-metric", 1.0)
+    Hub.current.flush()
+
+    assert len(envelopes) == 1
+    assert Hub.current.client.metrics_aggregator.buckets == {}
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_tag_serialization(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True, "metric_code_locations": False},
+    )
+    envelopes = capture_envelopes()
+
+    metrics.increment(
+        "counter",
+        tags={
+            "no-value": None,
+            "an-int": 42,
+            "a-float": 23.0,
+            "a-string": "blah",
+            "more-than-one": [1, "zwei", "3.0", None],
+        },
+    )
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+
+    assert len(envelope.items) == 1
+    assert envelope.items[0].headers["type"] == "statsd"
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+
+    assert len(m) == 1
+    assert m[0][4] == {
+        "an-int": "42",
+        "a-float": "23.0",
+        "a-string": "blah",
+        "more-than-one": ["1", "3.0", "zwei"],
+        "release": "fun-release",
+        "environment": "not-fun-env",
+    }
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_flush_recursion_protection(
+    sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
+):
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True},
+    )
+    envelopes = capture_envelopes()
+    test_client = Hub.current.client
+
+    real_capture_envelope = test_client.transport.capture_envelope
+
+    def bad_capture_envelope(*args, **kwargs):
+        metrics.increment("bad-metric")
+        return real_capture_envelope(*args, **kwargs)
+
+    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)
+
+    metrics.increment("counter")
+
+    # flush twice to see the inner metric
+    Hub.current.flush()
+    Hub.current.flush()
+
+    (envelope,) = envelopes
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+    assert len(m) == 1
+    assert m[0][1] == "counter@none"
+
+
+@minimum_python_37_with_gevent
+@pytest.mark.forked
+def test_flush_recursion_protection_background_flush(
+    sentry_init, capture_envelopes, monkeypatch, maybe_monkeypatched_threading
+):
+    monkeypatch.setattr(metrics.MetricsAggregator, "FLUSHER_SLEEP_TIME", 0.01)
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True},
+    )
+    envelopes = capture_envelopes()
+    test_client = Hub.current.client
+
+    real_capture_envelope = test_client.transport.capture_envelope
+
+    def bad_capture_envelope(*args, **kwargs):
+        metrics.increment("bad-metric")
+        return real_capture_envelope(*args, **kwargs)
+
+    monkeypatch.setattr(test_client.transport, "capture_envelope", bad_capture_envelope)
+
+    metrics.increment("counter")
+
+    # flush via sleep and flag
+    Hub.current.client.metrics_aggregator._force_flush = True
+    time.sleep(0.5)
+
+    (envelope,) = envelopes
+    m = parse_metrics(envelope.items[0].payload.get_bytes())
+    assert len(m) == 1
+    assert m[0][1] == "counter@none"
+
+
+@pytest.mark.skipif(
+    not gevent or sys.version_info >= (3, 7),
+    reason="Python 3.6 or lower and gevent required",
+)
+@pytest.mark.forked
+def test_disable_metrics_for_old_python_with_gevent(
+    sentry_init, capture_envelopes, maybe_monkeypatched_threading
+):
+    if maybe_monkeypatched_threading != "greenlet":
+        pytest.skip("Test specifically for gevent/greenlet")
+
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+        _experiments={"enable_metrics": True},
+    )
+    envelopes = capture_envelopes()
+
+    metrics.incr("counter")
+
+    Hub.current.flush()
+
+    assert Hub.current.client.metrics_aggregator is None
+    assert not envelopes
diff --git a/tests/test_monitor.py b/tests/test_monitor.py
new file mode 100644
index 0000000000..42d600ebbb
--- /dev/null
+++ b/tests/test_monitor.py
@@ -0,0 +1,104 @@
+import random
+
+from sentry_sdk import Hub, start_transaction
+from sentry_sdk.transport import Transport
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+class HealthyTestTransport(Transport):
+    def _send_event(self, event):
+        pass
+
+    def _send_envelope(self, envelope):
+        pass
+
+    def is_healthy(self):
+        return True
+
+
+class UnhealthyTestTransport(HealthyTestTransport):
+    def is_healthy(self):
+        return False
+
+
+def test_no_monitor_if_disabled(sentry_init):
+    sentry_init(
+        transport=HealthyTestTransport(),
+        enable_backpressure_handling=False,
+    )
+
+    assert Hub.current.client.monitor is None
+
+
+def test_monitor_if_enabled(sentry_init):
+    sentry_init(transport=HealthyTestTransport())
+
+    monitor = Hub.current.client.monitor
+    assert monitor is not None
+    assert monitor._thread is None
+
+    assert monitor.is_healthy() is True
+    assert monitor.downsample_factor == 0
+    assert monitor._thread is not None
+    assert monitor._thread.name == "sentry.monitor"
+
+
+def test_monitor_unhealthy(sentry_init):
+    sentry_init(transport=UnhealthyTestTransport())
+
+    monitor = Hub.current.client.monitor
+    monitor.interval = 0.1
+
+    assert monitor.is_healthy() is True
+
+    for i in range(15):
+        monitor.run()
+        assert monitor.is_healthy() is False
+        assert monitor.downsample_factor == (i + 1 if i < 10 else 10)
+
+
+def test_transaction_uses_downsampled_rate(
+    sentry_init, capture_client_reports, monkeypatch
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        transport=UnhealthyTestTransport(),
+    )
+
+    reports = capture_client_reports()
+
+    monitor = Hub.current.client.monitor
+    monitor.interval = 0.1
+
+    # make sure rng doesn't sample
+    monkeypatch.setattr(random, "random", lambda: 0.9)
+
+    assert monitor.is_healthy() is True
+    monitor.run()
+    assert monitor.is_healthy() is False
+    assert monitor.downsample_factor == 1
+
+    with start_transaction(name="foobar") as transaction:
+        assert transaction.sampled is False
+        assert transaction.sample_rate == 0.5
+
+    assert reports == [("backpressure", "transaction")]
+
+
+def test_monitor_no_thread_on_shutdown_no_errors(sentry_init):
+    sentry_init(transport=HealthyTestTransport())
+
+    # make it seem like the interpreter is shutting down
+    with mock.patch(
+        "threading.Thread.start",
+        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
+    ):
+        monitor = Hub.current.client.monitor
+        assert monitor is not None
+        assert monitor._thread is None
+        monitor.run()
+        assert monitor._thread is None
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
new file mode 100644
index 0000000000..495dd3f300
--- /dev/null
+++ b/tests/test_profiler.py
@@ -0,0 +1,843 @@
+import inspect
+import os
+import sys
+import threading
+import time
+
+import pytest
+
+from collections import defaultdict
+from sentry_sdk import start_transaction
+from sentry_sdk.profiler import (
+    GeventScheduler,
+    Profile,
+    Scheduler,
+    ThreadScheduler,
+    extract_frame,
+    extract_stack,
+    frame_id,
+    get_frame_name,
+    setup_profiler,
+)
+from sentry_sdk.tracing import Transaction
+from sentry_sdk._lru_cache import LRUCache
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+
+def requires_python_version(major, minor, reason=None):
+    if reason is None:
+        reason = "Requires Python {}.{}".format(major, minor)
+    return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)
+
+
+requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+
+
+def process_test_sample(sample):
+    # insert a mock hashable for the stack
+    return [(tid, (stack, stack)) for tid, stack in sample]
+
+
+def non_experimental_options(mode=None, sample_rate=None):
+    return {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
+
+
+def experimental_options(mode=None, sample_rate=None):
+    return {
+        "_experiments": {"profiler_mode": mode, "profiles_sample_rate": sample_rate}
+    }
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("foo"),
+        pytest.param(
+            "gevent",
+            marks=pytest.mark.skipif(gevent is not None, reason="gevent not enabled"),
+        ),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_invalid_mode(mode, make_options, teardown_profiling):
+    with pytest.raises(ValueError):
+        setup_profiler(make_options(mode))
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("sleep"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_valid_mode(mode, make_options, teardown_profiling):
+    # should not raise any exceptions
+    setup_profiler(make_options(mode))
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+def test_profiler_setup_twice(make_options, teardown_profiling):
+    # setting up the first time should return True to indicate success
+    assert setup_profiler(make_options())
+    # setting up the second time should return False to indicate no-op
+    assert not setup_profiler(make_options())
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    ("profiles_sample_rate", "profile_count"),
+    [
+        pytest.param(1.00, 1, id="profiler sampled at 1.00"),
+        pytest.param(0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(0.25, 0, id="profiler sampled at 0.25"),
+        pytest.param(0.00, 0, id="profiler sampled at 0.00"),
+        pytest.param(None, 0, id="profiler not enabled"),
+    ],
+)
+@pytest.mark.parametrize(
+    "make_options",
+    [
+        pytest.param(experimental_options, id="experiment"),
+        pytest.param(non_experimental_options, id="non experimental"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profiles_sample_rate(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+    profiles_sample_rate,
+    profile_count,
+    make_options,
+    mode,
+):
+    options = make_options(mode=mode, sample_rate=profiles_sample_rate)
+    sentry_init(
+        traces_sample_rate=1.0,
+        profiler_mode=options.get("profiler_mode"),
+        profiles_sample_rate=options.get("profiles_sample_rate"),
+        _experiments=options.get("_experiments", {}),
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        with start_transaction(name="profiling"):
+            pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == profile_count
+    if profiles_sample_rate is None or profiles_sample_rate == 0:
+        assert reports == []
+    elif profile_count:
+        assert reports == []
+    else:
+        assert reports == [("sample_rate", "profile")]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    "mode",
+    [
+        pytest.param("thread"),
+        pytest.param("gevent", marks=requires_gevent),
+    ],
+)
+@pytest.mark.parametrize(
+    ("profiles_sampler", "profile_count"),
+    [
+        pytest.param(lambda _: 1.00, 1, id="profiler sampled at 1.00"),
+        pytest.param(lambda _: 0.75, 1, id="profiler sampled at 0.75"),
+        pytest.param(lambda _: 0.25, 0, id="profiler sampled at 0.25"),
+        pytest.param(lambda _: 0.00, 0, id="profiler sampled at 0.00"),
+        pytest.param(lambda _: None, 0, id="profiler not enabled"),
+        pytest.param(
+            lambda ctx: 1 if ctx["transaction_context"]["name"] == "profiling" else 0,
+            1,
+            id="profiler sampled for transaction name",
+        ),
+        pytest.param(
+            lambda ctx: 0 if ctx["transaction_context"]["name"] == "profiling" else 1,
+            0,
+            id="profiler not sampled for transaction name",
+        ),
+        pytest.param(
+            lambda _: "1", 0, id="profiler not sampled because string sample rate"
+        ),
+        pytest.param(lambda _: True, 1, id="profiler sampled at True"),
+        pytest.param(lambda _: False, 0, id="profiler sampled at False"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
+def test_profiles_sampler(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+    profiles_sampler,
+    profile_count,
+    mode,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        profiles_sampler=profiles_sampler,
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with mock.patch("sentry_sdk.profiler.random.random", return_value=0.5):
+        with start_transaction(name="profiling"):
+            pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == profile_count
+    if profile_count:
+        assert reports == []
+    else:
+        assert reports == [("sample_rate", "profile")]
+
+
+@requires_python_version(3, 3)
+def test_minimum_unique_samples_required(
+    sentry_init,
+    capture_envelopes,
+    capture_client_reports,
+    teardown_profiling,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+    reports = capture_client_reports()
+
+    with start_transaction(name="profiling"):
+        pass
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    # because we dont leave any time for the profiler to
+    # take any samples, it should be not be sent
+    assert len(items["profile"]) == 0
+    assert reports == [("insufficient_data", "profile")]
+
+
+@pytest.mark.forked
+@requires_python_version(3, 3)
+def test_profile_captured(
+    sentry_init,
+    capture_envelopes,
+    teardown_profiling,
+):
+    sentry_init(
+        traces_sample_rate=1.0,
+        _experiments={"profiles_sample_rate": 1.0},
+    )
+
+    envelopes = capture_envelopes()
+
+    with start_transaction(name="profiling"):
+        time.sleep(0.05)
+
+    items = defaultdict(list)
+    for envelope in envelopes:
+        for item in envelope.items:
+            items[item.type].append(item)
+
+    assert len(items["transaction"]) == 1
+    assert len(items["profile"]) == 1
+
+
+def get_frame(depth=1):
+    """
+    This function is not exactly true to its name. Depending on
+    how it is called, the true depth of the stack can be deeper
+    than the argument implies.
+    """
+    if depth <= 0:
+        raise ValueError("only positive integers allowed")
+    if depth > 1:
+        return get_frame(depth=depth - 1)
+    return inspect.currentframe()
+
+
+class GetFrameBase:
+    def inherited_instance_method(self):
+        return inspect.currentframe()
+
+    def inherited_instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def inherited_class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def inherited_class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def inherited_static_method():
+        return inspect.currentframe()
+
+
+class GetFrame(GetFrameBase):
+    def instance_method(self):
+        return inspect.currentframe()
+
+    def instance_method_wrapped(self):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @classmethod
+    def class_method(cls):
+        return inspect.currentframe()
+
+    @classmethod
+    def class_method_wrapped(cls):
+        def wrapped():
+            return inspect.currentframe()
+
+        return wrapped
+
+    @staticmethod
+    def static_method():
+        return inspect.currentframe()
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("frame", "frame_name"),
+    [
+        pytest.param(
+            get_frame(),
+            "get_frame",
+            id="function",
+        ),
+        pytest.param(
+            (lambda: inspect.currentframe())(),
+            "",
+            id="lambda",
+        ),
+        pytest.param(
+            GetFrame().instance_method(),
+            "GetFrame.instance_method",
+            id="instance_method",
+        ),
+        pytest.param(
+            GetFrame().instance_method_wrapped()(),
+            (
+                "wrapped"
+                if sys.version_info < (3, 11)
+                else "GetFrame.instance_method_wrapped..wrapped"
+            ),
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().class_method(),
+            "GetFrame.class_method",
+            id="class_method",
+        ),
+        pytest.param(
+            GetFrame().class_method_wrapped()(),
+            (
+                "wrapped"
+                if sys.version_info < (3, 11)
+                else "GetFrame.class_method_wrapped..wrapped"
+            ),
+            id="class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().static_method(),
+            "static_method" if sys.version_info < (3, 11) else "GetFrame.static_method",
+            id="static_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method(),
+            "GetFrameBase.inherited_instance_method",
+            id="inherited_instance_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_instance_method_wrapped()(),
+            (
+                "wrapped"
+                if sys.version_info < (3, 11)
+                else "GetFrameBase.inherited_instance_method_wrapped..wrapped"
+            ),
+            id="instance_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method(),
+            "GetFrameBase.inherited_class_method",
+            id="inherited_class_method",
+        ),
+        pytest.param(
+            GetFrame().inherited_class_method_wrapped()(),
+            (
+                "wrapped"
+                if sys.version_info < (3, 11)
+                else "GetFrameBase.inherited_class_method_wrapped..wrapped"
+            ),
+            id="inherited_class_method_wrapped",
+        ),
+        pytest.param(
+            GetFrame().inherited_static_method(),
+            (
+                "inherited_static_method"
+                if sys.version_info < (3, 11)
+                else "GetFrameBase.inherited_static_method"
+            ),
+            id="inherited_static_method",
+        ),
+    ],
+)
+def test_get_frame_name(frame, frame_name):
+    assert get_frame_name(frame) == frame_name
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("get_frame", "function"),
+    [
+        pytest.param(lambda: get_frame(depth=1), "get_frame", id="simple"),
+    ],
+)
+def test_extract_frame(get_frame, function):
+    cwd = os.getcwd()
+    frame = get_frame()
+    extracted_frame = extract_frame(frame_id(frame), frame, cwd)
+
+    # the abs_path should be equal toe the normalized path of the co_filename
+    assert extracted_frame["abs_path"] == os.path.normpath(frame.f_code.co_filename)
+
+    # the module should be pull from this test module
+    assert extracted_frame["module"] == __name__
+
+    # the filename should be the file starting after the cwd
+    assert extracted_frame["filename"] == __file__[len(cwd) + 1 :]
+
+    assert extracted_frame["function"] == function
+
+    # the lineno will shift over time as this file is modified so just check
+    # that it is an int
+    assert isinstance(extracted_frame["lineno"], int)
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("depth", "max_stack_depth", "actual_depth"),
+    [
+        pytest.param(1, 128, 1, id="less than"),
+        pytest.param(256, 128, 128, id="greater than"),
+        pytest.param(128, 128, 128, id="equals"),
+    ],
+)
+def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
+    # introduce a lambda that we'll be looking for in the stack
+    frame = (lambda: get_frame(depth=depth))()
+
+    # plus 1 because we introduced a lambda intentionally that we'll
+    # look for in the final stack to make sure its in the right position
+    base_stack_depth = len(inspect.stack()) + 1
+
+    # increase the max_depth by the `base_stack_depth` to account
+    # for the extra frames pytest will add
+    _, frame_ids, frames = extract_stack(
+        frame,
+        LRUCache(max_size=1),
+        max_stack_depth=max_stack_depth + base_stack_depth,
+        cwd=os.getcwd(),
+    )
+    assert len(frame_ids) == base_stack_depth + actual_depth
+    assert len(frames) == base_stack_depth + actual_depth
+
+    for i in range(actual_depth):
+        assert frames[i]["function"] == "get_frame", i
+
+    # index 0 contains the inner most frame on the stack, so the lamdba
+    # should be at index `actual_depth`
+    if sys.version_info >= (3, 11):
+        assert (
+            frames[actual_depth]["function"]
+            == "test_extract_stack_with_max_depth.."
+        ), actual_depth
+    else:
+        assert frames[actual_depth]["function"] == "", actual_depth
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("frame", "depth"),
+    [(get_frame(depth=1), len(inspect.stack()))],
+)
+def test_extract_stack_with_cache(frame, depth):
+    # make sure cache has enough room or this test will fail
+    cache = LRUCache(max_size=depth)
+    cwd = os.getcwd()
+    _, _, frames1 = extract_stack(frame, cache, cwd=cwd)
+    _, _, frames2 = extract_stack(frame, cache, cwd=cwd)
+
+    assert len(frames1) > 0
+    assert len(frames2) > 0
+    assert len(frames1) == len(frames2)
+    for i, (frame1, frame2) in enumerate(zip(frames1, frames2)):
+        # DO NOT use `==` for the assertion here since we are
+        # testing for identity, and using `==` would test for
+        # equality which would always pass since we're extract
+        # the same stack.
+        assert frame1 is frame2, i
+
+
+def get_scheduler_threads(scheduler):
+    return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(
+            GeventScheduler,
+            marks=[
+                requires_gevent,
+                pytest.mark.skip(
+                    reason="cannot find this thread via threading.enumerate()"
+                ),
+            ],
+            id="gevent scheduler",
+        ),
+    ],
+)
+def test_thread_scheduler_single_background_thread(scheduler_class):
+    scheduler = scheduler_class(frequency=1000)
+
+    # not yet setup, no scheduler threads yet
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.setup()
+
+    # setup but no profiles started so still no threads
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.ensure_running()
+
+    # the scheduler will start always 1 thread
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    scheduler.ensure_running()
+
+    # the scheduler still only has 1 thread
+    assert len(get_scheduler_threads(scheduler)) == 1
+
+    scheduler.teardown()
+
+    # once finished, the thread should stop
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(
+            GeventScheduler,
+            marks=[
+                requires_gevent,
+                pytest.mark.skip(
+                    reason="cannot find this thread via threading.enumerate()"
+                ),
+            ],
+            id="gevent scheduler",
+        ),
+    ],
+)
+def test_thread_scheduler_no_thread_on_shutdown(scheduler_class):
+    scheduler = scheduler_class(frequency=1000)
+
+    # not yet setup, no scheduler threads yet
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.setup()
+
+    # setup but no profiles started so still no threads
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    # mock RuntimeError as if the 3.12 intepreter was shutting down
+    with mock.patch(
+        "threading.Thread.start",
+        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
+    ):
+        scheduler.ensure_running()
+
+    assert scheduler.running is False
+
+    # still no thread
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+    scheduler.teardown()
+
+    assert len(get_scheduler_threads(scheduler)) == 0
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("scheduler_class",),
+    [
+        pytest.param(ThreadScheduler, id="thread scheduler"),
+        pytest.param(GeventScheduler, marks=requires_gevent, id="gevent scheduler"),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 1)
+def test_max_profile_duration_reached(scheduler_class):
+    sample = [
+        (
+            "1",
+            extract_stack(
+                get_frame(),
+                LRUCache(max_size=1),
+                cwd=os.getcwd(),
+            ),
+        ),
+    ]
+
+    with scheduler_class(frequency=1000) as scheduler:
+        transaction = Transaction(sampled=True)
+        with Profile(transaction, scheduler=scheduler) as profile:
+            # profile just started, it's active
+            assert profile.active
+
+            # write a sample at the start time, so still active
+            profile.write(profile.start_ns + 0, sample)
+            assert profile.active
+
+            # write a sample at max time, so still active
+            profile.write(profile.start_ns + 1, sample)
+            assert profile.active
+
+            # write a sample PAST the max time, so now inactive
+            profile.write(profile.start_ns + 2, sample)
+            assert not profile.active
+
+
+class NoopScheduler(Scheduler):
+    def setup(self):
+        # type: () -> None
+        pass
+
+    def teardown(self):
+        # type: () -> None
+        pass
+
+    def ensure_running(self):
+        # type: () -> None
+        pass
+
+
+current_thread = threading.current_thread()
+thread_metadata = {
+    str(current_thread.ident): {
+        "name": str(current_thread.name),
+    },
+}
+
+
+sample_stacks = [
+    extract_stack(
+        get_frame(),
+        LRUCache(max_size=1),
+        max_stack_depth=1,
+        cwd=os.getcwd(),
+    ),
+    extract_stack(
+        get_frame(),
+        LRUCache(max_size=1),
+        max_stack_depth=2,
+        cwd=os.getcwd(),
+    ),
+]
+
+
+@requires_python_version(3, 3)
+@pytest.mark.parametrize(
+    ("samples", "expected"),
+    [
+        pytest.param(
+            [],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="empty",
+        ),
+        pytest.param(
+            [(6, [("1", sample_stacks[0])])],
+            {
+                "frames": [],
+                "samples": [],
+                "stacks": [],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample out of range",
+        ),
+        pytest.param(
+            [(0, [("1", sample_stacks[0])])],
+            {
+                "frames": [sample_stacks[0][2][0]],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="single sample in range",
+        ),
+        pytest.param(
+            [
+                (0, [("1", sample_stacks[0])]),
+                (1, [("1", sample_stacks[0])]),
+            ],
+            {
+                "frames": [sample_stacks[0][2][0]],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                ],
+                "stacks": [[0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two identical stacks",
+        ),
+        pytest.param(
+            [
+                (0, [("1", sample_stacks[0])]),
+                (1, [("1", sample_stacks[1])]),
+            ],
+            {
+                "frames": [
+                    sample_stacks[0][2][0],
+                    sample_stacks[1][2][0],
+                ],
+                "samples": [
+                    {
+                        "elapsed_since_start_ns": "0",
+                        "thread_id": "1",
+                        "stack_id": 0,
+                    },
+                    {
+                        "elapsed_since_start_ns": "1",
+                        "thread_id": "1",
+                        "stack_id": 1,
+                    },
+                ],
+                "stacks": [[0], [1, 0]],
+                "thread_metadata": thread_metadata,
+            },
+            id="two different stacks",
+        ),
+    ],
+)
+@mock.patch("sentry_sdk.profiler.MAX_PROFILE_DURATION_NS", 5)
+def test_profile_processing(
+    DictionaryContaining,  # noqa: N803
+    samples,
+    expected,
+):
+    with NoopScheduler(frequency=1000) as scheduler:
+        transaction = Transaction(sampled=True)
+        with Profile(transaction, scheduler=scheduler) as profile:
+            for ts, sample in samples:
+                # force the sample to be written at a time relative to the
+                # start of the profile
+                now = profile.start_ns + ts
+                profile.write(now, sample)
+
+            processed = profile.process()
+
+            assert processed["thread_metadata"] == DictionaryContaining(
+                expected["thread_metadata"]
+            )
+            assert processed["frames"] == expected["frames"]
+            assert processed["stacks"] == expected["stacks"]
+            assert processed["samples"] == expected["samples"]
diff --git a/tests/test_scope.py b/tests/test_scope.py
index d90a89f490..8bdd46e02f 100644
--- a/tests/test_scope.py
+++ b/tests/test_scope.py
@@ -1,7 +1,14 @@
 import copy
+import os
+import pytest
 from sentry_sdk import capture_exception
 from sentry_sdk.scope import Scope
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 def test_copying():
     s1 = Scope()
@@ -62,3 +69,91 @@ def test_common_args():
     assert s2._extras == {"k": "v", "foo": "bar"}
     assert s2._tags == {"a": "b", "x": "y"}
     assert s2._contexts == {"os": {"name": "Blafasel"}, "device": {"a": "b"}}
+
+
+BAGGAGE_VALUE = (
+    "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+    "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+    "sentry-user_id=Am%C3%A9lie, other-vendor-value-2=foo;bar;"
+)
+
+SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"
+
+
+@pytest.mark.parametrize(
+    "env,excepted_value",
+    [
+        (
+            {
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "True",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            {
+                "sentry-trace": SENTRY_TRACE_VALUE,
+                "baggage": BAGGAGE_VALUE,
+            },
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "no",
+                "SENTRY_TRACE": SENTRY_TRACE_VALUE,
+                "SENTRY_BAGGAGE": BAGGAGE_VALUE,
+            },
+            None,
+        ),
+        (
+            {
+                "SENTRY_USE_ENVIRONMENT": "True",
+                "MY_OTHER_VALUE": "asdf",
+                "SENTRY_RELEASE": "1.0.0",
+            },
+            None,
+        ),
+    ],
+)
+def test_load_trace_data_from_env(env, excepted_value):
+    new_env = os.environ.copy()
+    new_env.update(env)
+
+    with mock.patch.dict(os.environ, new_env):
+        s = Scope()
+        incoming_trace_data = s._load_trace_data_from_env()
+        assert incoming_trace_data == excepted_value
diff --git a/tests/test_scrubber.py b/tests/test_scrubber.py
new file mode 100644
index 0000000000..2c4bd3aa90
--- /dev/null
+++ b/tests/test_scrubber.py
@@ -0,0 +1,189 @@
+import sys
+import logging
+
+from sentry_sdk import capture_exception, capture_event, start_transaction, start_span
+from sentry_sdk.utils import event_from_exception
+from sentry_sdk.scrubber import EventScrubber
+from tests.conftest import ApproxDict
+
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.DEBUG)
+
+
+def test_request_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        1 / 0
+    except ZeroDivisionError:
+        ev, _hint = event_from_exception(sys.exc_info())
+
+        ev["request"] = {
+            "headers": {
+                "COOKIE": "secret",
+                "authorization": "Bearer bla",
+                "ORIGIN": "google.com",
+            },
+            "cookies": {
+                "sessionid": "secret",
+                "foo": "bar",
+            },
+            "data": {
+                "token": "secret",
+                "foo": "bar",
+            },
+        }
+
+        capture_event(ev)
+
+    (event,) = events
+
+    assert event["request"] == {
+        "headers": {
+            "COOKIE": "[Filtered]",
+            "authorization": "[Filtered]",
+            "ORIGIN": "google.com",
+        },
+        "cookies": {"sessionid": "[Filtered]", "foo": "bar"},
+        "data": {"token": "[Filtered]", "foo": "bar"},
+    }
+
+    assert event["_meta"]["request"] == {
+        "headers": {
+            "COOKIE": {"": {"rem": [["!config", "s"]]}},
+            "authorization": {"": {"rem": [["!config", "s"]]}},
+        },
+        "cookies": {"sessionid": {"": {"rem": [["!config", "s"]]}}},
+        "data": {"token": {"": {"rem": [["!config", "s"]]}}},
+    }
+
+
+def test_stack_var_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        password = "supersecret"  # noqa
+        api_key = "1231231231"  # noqa
+        safe = "keepthis"  # noqa
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["password"] == "[Filtered]"
+    assert frame["vars"]["api_key"] == "[Filtered]"
+    assert frame["vars"]["safe"] == "'keepthis'"
+
+    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
+        "vars"
+    ]
+    assert meta == {
+        "password": {"": {"rem": [["!config", "s"]]}},
+        "api_key": {"": {"rem": [["!config", "s"]]}},
+    }
+
+
+def test_breadcrumb_extra_scrubbing(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    logger.info("bread", extra=dict(foo=42, password="secret"))
+    logger.critical("whoops", extra=dict(bar=69, auth="secret"))
+
+    (event,) = events
+
+    assert event["extra"]["bar"] == 69
+    assert event["extra"]["auth"] == "[Filtered]"
+
+    assert event["breadcrumbs"]["values"][0]["data"] == {
+        "foo": 42,
+        "password": "[Filtered]",
+    }
+
+    assert event["_meta"]["extra"]["auth"] == {"": {"rem": [["!config", "s"]]}}
+    assert event["_meta"]["breadcrumbs"] == {
+        "values": {"0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}}
+    }
+
+
+def test_span_data_scrubbing(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0)
+    events = capture_events()
+
+    with start_transaction(name="hi"):
+        with start_span(op="foo", description="bar") as span:
+            span.set_data("password", "secret")
+            span.set_data("datafoo", "databar")
+
+    (event,) = events
+    assert event["spans"][0]["data"] == ApproxDict(
+        {"password": "[Filtered]", "datafoo": "databar"}
+    )
+    assert event["_meta"]["spans"] == {
+        "0": {"data": {"password": {"": {"rem": [["!config", "s"]]}}}}
+    }
+
+
+def test_custom_denylist(sentry_init, capture_events):
+    sentry_init(event_scrubber=EventScrubber(denylist=["my_sensitive_var"]))
+    events = capture_events()
+
+    try:
+        my_sensitive_var = "secret"  # noqa
+        safe = "keepthis"  # noqa
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["my_sensitive_var"] == "[Filtered]"
+    assert frame["vars"]["safe"] == "'keepthis'"
+
+    meta = event["_meta"]["exception"]["values"]["0"]["stacktrace"]["frames"]["0"][
+        "vars"
+    ]
+    assert meta == {
+        "my_sensitive_var": {"": {"rem": [["!config", "s"]]}},
+    }
+
+
+def test_scrubbing_doesnt_affect_local_vars(sentry_init, capture_events):
+    sentry_init()
+    events = capture_events()
+
+    try:
+        password = "cat123"
+        1 / 0
+    except ZeroDivisionError:
+        capture_exception()
+
+    (event,) = events
+
+    frames = event["exception"]["values"][0]["stacktrace"]["frames"]
+    (frame,) = frames
+    assert frame["vars"]["password"] == "[Filtered]"
+    assert password == "cat123"
+
+
+def test_recursive_event_scrubber(sentry_init, capture_events):
+    sentry_init(event_scrubber=EventScrubber(recursive=True))
+    events = capture_events()
+    complex_structure = {
+        "deep": {
+            "deeper": [{"deepest": {"password": "my_darkest_secret"}}],
+        },
+    }
+
+    capture_event({"extra": complex_structure})
+
+    (event,) = events
+    assert event["extra"]["deep"]["deeper"][0]["deepest"]["password"] == "'[Filtered]'"
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index f5ecc7560e..ddc65c9b3e 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -1,7 +1,8 @@
+import re
 import sys
 import pytest
 
-from sentry_sdk.serializer import serialize
+from sentry_sdk.serializer import MAX_DATABAG_BREADTH, MAX_DATABAG_DEPTH, serialize
 
 try:
     from hypothesis import given
@@ -39,14 +40,24 @@ def inner(message, **kwargs):
 
 @pytest.fixture
 def extra_normalizer(validate_event_schema):
-    def inner(message, **kwargs):
-        event = serialize({"extra": {"foo": message}}, **kwargs)
+    def inner(extra, **kwargs):
+        event = serialize({"extra": {"foo": extra}}, **kwargs)
         validate_event_schema(event)
         return event["extra"]["foo"]
 
     return inner
 
 
+@pytest.fixture
+def body_normalizer(validate_event_schema):
+    def inner(body, **kwargs):
+        event = serialize({"request": {"data": body}}, **kwargs)
+        validate_event_schema(event)
+        return event["request"]["data"]
+
+    return inner
+
+
 def test_bytes_serialization_decode(message_normalizer):
     binary = b"abc123\x80\xf0\x9f\x8d\x95"
     result = message_normalizer(binary, should_repr_strings=False)
@@ -62,6 +73,27 @@ def test_bytes_serialization_repr(message_normalizer):
     assert result == r"b'abc123\x80\xf0\x9f\x8d\x95'"
 
 
+def test_bytearray_serialization_decode(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    # fmt: off
+    assert result == u"abc123\ufffd\U0001f355"
+    # fmt: on
+
+
+@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
+def test_bytearray_serialization_repr(message_normalizer):
+    binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=True)
+    assert result == r"bytearray(b'abc123\x80\xf0\x9f\x8d\x95')"
+
+
+def test_memoryview_serialization_repr(message_normalizer):
+    binary = memoryview(b"abc123\x80\xf0\x9f\x8d\x95")
+    result = message_normalizer(binary, should_repr_strings=False)
+    assert re.match(r"^$", result)
+
+
 def test_serialize_sets(extra_normalizer):
     result = extra_normalizer({1, 2, 3})
     assert result == [1, 2, 3]
@@ -86,3 +118,46 @@ def test_custom_mapping_doesnt_mess_with_mock(extra_normalizer):
     m = mock.Mock()
     extra_normalizer(m)
     assert len(m.mock_calls) == 0
+
+
+def test_trim_databag_breadth(body_normalizer):
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+
+    result = body_normalizer(data)
+
+    assert len(result) == MAX_DATABAG_BREADTH
+    for key, value in result.items():
+        assert data.get(key) == value
+
+
+def test_no_trimming_if_max_request_body_size_is_always(body_normalizer):
+    data = {
+        "key{}".format(i): "value{}".format(i) for i in range(MAX_DATABAG_BREADTH + 10)
+    }
+    curr = data
+    for _ in range(MAX_DATABAG_DEPTH + 5):
+        curr["nested"] = {}
+        curr = curr["nested"]
+
+    result = body_normalizer(data, max_request_body_size="always")
+
+    assert result == data
+
+
+def test_max_value_length_default(body_normalizer):
+    data = {"key": "a" * 2000}
+
+    result = body_normalizer(data)
+
+    assert len(result["key"]) == 1024  # fallback max length
+
+
+def test_max_value_length(body_normalizer):
+    data = {"key": "a" * 2000}
+
+    max_value_length = 1800
+    result = body_normalizer(data, max_value_length=max_value_length)
+
+    assert len(result["key"]) == max_value_length
diff --git a/tests/test_sessions.py b/tests/test_sessions.py
index 09b42b70a4..311aa53966 100644
--- a/tests/test_sessions.py
+++ b/tests/test_sessions.py
@@ -3,6 +3,11 @@
 from sentry_sdk import Hub
 from sentry_sdk.sessions import auto_session_tracking
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
 
 def sorted_aggregates(item):
     aggregates = item["aggregates"]
@@ -119,3 +124,32 @@ def test_aggregates_explicitly_disabled_session_tracking_request_mode(
     assert len(aggregates) == 1
     assert aggregates[0]["exited"] == 1
     assert "errored" not in aggregates[0]
+
+
+def test_no_thread_on_shutdown_no_errors(sentry_init):
+    sentry_init(
+        release="fun-release",
+        environment="not-fun-env",
+    )
+
+    hub = Hub.current
+
+    # make it seem like the interpreter is shutting down
+    with mock.patch(
+        "threading.Thread.start",
+        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
+    ):
+        with auto_session_tracking(session_mode="request"):
+            with sentry_sdk.push_scope():
+                try:
+                    raise Exception("all is wrong")
+                except Exception:
+                    sentry_sdk.capture_exception()
+
+        with auto_session_tracking(session_mode="request"):
+            pass
+
+        hub.start_session(session_mode="request")
+        hub.end_session()
+
+        sentry_sdk.flush()
diff --git a/tests/test_spotlight.py b/tests/test_spotlight.py
new file mode 100644
index 0000000000..f0ab4664e0
--- /dev/null
+++ b/tests/test_spotlight.py
@@ -0,0 +1,56 @@
+import pytest
+
+from sentry_sdk import Hub, capture_exception
+
+
+@pytest.fixture
+def capture_spotlight_envelopes(monkeypatch):
+    def inner():
+        envelopes = []
+        test_spotlight = Hub.current.client.spotlight
+        old_capture_envelope = test_spotlight.capture_envelope
+
+        def append_envelope(envelope):
+            envelopes.append(envelope)
+            return old_capture_envelope(envelope)
+
+        monkeypatch.setattr(test_spotlight, "capture_envelope", append_envelope)
+        return envelopes
+
+    return inner
+
+
+def test_spotlight_off_by_default(sentry_init):
+    sentry_init()
+    assert Hub.current.client.spotlight is None
+
+
+def test_spotlight_default_url(sentry_init):
+    sentry_init(spotlight=True)
+
+    spotlight = Hub.current.client.spotlight
+    assert spotlight is not None
+    assert spotlight.url == "http://localhost:8969/stream"
+
+
+def test_spotlight_custom_url(sentry_init):
+    sentry_init(spotlight="http://foobar@test.com/132")
+
+    spotlight = Hub.current.client.spotlight
+    assert spotlight is not None
+    assert spotlight.url == "http://foobar@test.com/132"
+
+
+def test_spotlight_envelope(sentry_init, capture_spotlight_envelopes):
+    sentry_init(spotlight=True)
+    envelopes = capture_spotlight_envelopes()
+
+    try:
+        raise ValueError("aha!")
+    except Exception:
+        capture_exception()
+
+    (envelope,) = envelopes
+    payload = envelope.items[0].payload.json
+
+    assert payload["exception"]["values"][0]["value"] == "aha!"
diff --git a/tests/test_transport.py b/tests/test_transport.py
index a837182f6d..8848ad471e 100644
--- a/tests/test_transport.py
+++ b/tests/test_transport.py
@@ -3,22 +3,26 @@
 import pickle
 import gzip
 import io
-
+import socket
+from collections import namedtuple
 from datetime import datetime, timedelta
 
 import pytest
-from collections import namedtuple
-from werkzeug.wrappers import Request, Response
-
 from pytest_localserver.http import WSGIServer
+from werkzeug.wrappers import Request, Response
 
 from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
-from sentry_sdk.transport import _parse_rate_limits
-from sentry_sdk.envelope import Envelope, parse_json
+from sentry_sdk._compat import datetime_utcnow
+from sentry_sdk.transport import KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits
+from sentry_sdk.envelope import Envelope, Item, parse_json
 from sentry_sdk.integrations.logging import LoggingIntegration
 
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
 
-CapturedData = namedtuple("CapturedData", ["path", "event", "envelope"])
+CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"])
 
 
 class CapturingServer(WSGIServer):
@@ -42,15 +46,25 @@ def __call__(self, environ, start_response):
         """
         request = Request(environ)
         event = envelope = None
+        if request.headers.get("content-encoding") == "gzip":
+            rdr = gzip.GzipFile(fileobj=io.BytesIO(request.data))
+            compressed = True
+        else:
+            rdr = io.BytesIO(request.data)
+            compressed = False
+
         if request.mimetype == "application/json":
-            event = parse_json(gzip.GzipFile(fileobj=io.BytesIO(request.data)).read())
+            event = parse_json(rdr.read())
         else:
-            envelope = Envelope.deserialize_from(
-                gzip.GzipFile(fileobj=io.BytesIO(request.data))
-            )
+            envelope = Envelope.deserialize_from(rdr)
 
         self.captured.append(
-            CapturedData(path=request.path, event=event, envelope=envelope)
+            CapturedData(
+                path=request.path,
+                event=event,
+                envelope=envelope,
+                compressed=compressed,
+            )
         )
 
         response = Response(status=self.code)
@@ -81,6 +95,7 @@ def inner(**kwargs):
 @pytest.mark.parametrize("debug", (True, False))
 @pytest.mark.parametrize("client_flush_method", ["close", "flush"])
 @pytest.mark.parametrize("use_pickle", (True, False))
+@pytest.mark.parametrize("compressionlevel", (0, 9))
 def test_transport_works(
     capturing_server,
     request,
@@ -90,10 +105,16 @@ def test_transport_works(
     make_client,
     client_flush_method,
     use_pickle,
+    compressionlevel,
     maybe_monkeypatched_threading,
 ):
     caplog.set_level(logging.DEBUG)
-    client = make_client(debug=debug)
+    client = make_client(
+        debug=debug,
+        _experiments={
+            "transport_zlib_compression_level": compressionlevel,
+        },
+    )
 
     if use_pickle:
         client = pickle.loads(pickle.dumps(client))
@@ -101,7 +122,7 @@ def test_transport_works(
     Hub.current.bind_client(client)
     request.addfinalizer(lambda: Hub.current.bind_client(None))
 
-    add_breadcrumb(level="info", message="i like bread", timestamp=datetime.utcnow())
+    add_breadcrumb(level="info", message="i like bread", timestamp=datetime_utcnow())
     capture_message("löl")
 
     getattr(client, client_flush_method)()
@@ -109,10 +130,103 @@ def test_transport_works(
     out, err = capsys.readouterr()
     assert not err and not out
     assert capturing_server.captured
+    assert capturing_server.captured[0].compressed == (compressionlevel > 0)
 
     assert any("Sending event" in record.msg for record in caplog.records) == debug
 
 
+@pytest.mark.parametrize(
+    "num_pools,expected_num_pools",
+    (
+        (None, 2),
+        (2, 2),
+        (10, 10),
+    ),
+)
+def test_transport_num_pools(make_client, num_pools, expected_num_pools):
+    _experiments = {}
+    if num_pools is not None:
+        _experiments["transport_num_pools"] = num_pools
+
+    client = make_client(_experiments=_experiments)
+
+    options = client.transport._get_pool_options([])
+    assert options["num_pools"] == expected_num_pools
+
+
+def test_socket_options(make_client):
+    socket_options = [
+        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10),
+        (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
+    ]
+
+    client = make_client(socket_options=socket_options)
+
+    options = client.transport._get_pool_options([])
+    assert options["socket_options"] == socket_options
+
+
+def test_keep_alive_true(make_client):
+    client = make_client(keep_alive=True)
+
+    options = client.transport._get_pool_options([])
+    assert options["socket_options"] == KEEP_ALIVE_SOCKET_OPTIONS
+
+
+def test_keep_alive_off_by_default(make_client):
+    client = make_client()
+    options = client.transport._get_pool_options([])
+    assert "socket_options" not in options
+
+
+def test_socket_options_override_keep_alive(make_client):
+    socket_options = [
+        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 10),
+        (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
+    ]
+
+    client = make_client(socket_options=socket_options, keep_alive=False)
+
+    options = client.transport._get_pool_options([])
+    assert options["socket_options"] == socket_options
+
+
+def test_socket_options_merge_with_keep_alive(make_client):
+    socket_options = [
+        (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
+        (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
+    ]
+
+    client = make_client(socket_options=socket_options, keep_alive=True)
+
+    options = client.transport._get_pool_options([])
+    try:
+        assert options["socket_options"] == [
+            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
+            (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
+            (socket.SOL_TCP, socket.TCP_KEEPIDLE, 45),
+            (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
+        ]
+    except AttributeError:
+        assert options["socket_options"] == [
+            (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 42),
+            (socket.SOL_TCP, socket.TCP_KEEPINTVL, 42),
+            (socket.SOL_TCP, socket.TCP_KEEPCNT, 6),
+        ]
+
+
+def test_socket_options_override_defaults(make_client):
+    # If socket_options are set to [], this doesn't mean the user doesn't want
+    # any custom socket_options, but rather that they want to disable the urllib3
+    # socket option defaults, so we need to set this and not ignore it.
+    client = make_client(socket_options=[])
+
+    options = client.transport._get_pool_options([])
+    assert options["socket_options"] == []
+
+
 def test_transport_infinite_loop(capturing_server, request, make_client):
     client = make_client(
         debug=True,
@@ -127,6 +241,21 @@ def test_transport_infinite_loop(capturing_server, request, make_client):
     assert len(capturing_server.captured) == 1
 
 
+def test_transport_no_thread_on_shutdown_no_errors(capturing_server, make_client):
+    client = make_client()
+
+    # make it seem like the interpreter is shutting down
+    with mock.patch(
+        "threading.Thread.start",
+        side_effect=RuntimeError("can't create new thread at interpreter shutdown"),
+    ):
+        with Hub(client):
+            capture_message("hi")
+
+    # nothing exploded but also no events can be sent anymore
+    assert len(capturing_server.captured) == 0
+
+
 NOW = datetime(2014, 6, 2)
 
 
@@ -162,7 +291,7 @@ def test_parse_rate_limits(input, expected):
     assert dict(_parse_rate_limits(input, now=NOW)) == expected
 
 
-def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
+def test_simple_rate_limits(capturing_server, make_client):
     client = make_client()
     capturing_server.respond_with(code=429, headers={"Retry-After": "4"})
 
@@ -184,7 +313,7 @@ def test_simple_rate_limits(capturing_server, capsys, caplog, make_client):
 
 @pytest.mark.parametrize("response_code", [200, 429])
 def test_data_category_limits(
-    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
+    capturing_server, response_code, make_client, monkeypatch
 ):
     client = make_client(send_client_reports=False)
 
@@ -231,7 +360,7 @@ def record_lost_event(reason, data_category=None, item=None):
 
 @pytest.mark.parametrize("response_code", [200, 429])
 def test_data_category_limits_reporting(
-    capturing_server, capsys, caplog, response_code, make_client, monkeypatch
+    capturing_server, response_code, make_client, monkeypatch
 ):
     client = make_client(send_client_reports=True)
 
@@ -314,7 +443,7 @@ def intercepting_fetch(*args, **kwargs):
 
 @pytest.mark.parametrize("response_code", [200, 429])
 def test_complex_limits_without_data_category(
-    capturing_server, capsys, caplog, response_code, make_client
+    capturing_server, response_code, make_client
 ):
     client = make_client()
     capturing_server.respond_with(
@@ -337,3 +466,114 @@ def test_complex_limits_without_data_category(
     client.flush()
 
     assert len(capturing_server.captured) == 0
+
+
+@pytest.mark.parametrize("response_code", [200, 429])
+def test_metric_bucket_limits(capturing_server, response_code, make_client):
+    client = make_client()
+    capturing_server.respond_with(
+        code=response_code,
+        headers={
+            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:custom"
+        },
+    )
+
+    envelope = Envelope()
+    envelope.add_item(Item(payload=b"{}", type="statsd"))
+    client.transport.capture_envelope(envelope)
+    client.flush()
+
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
+
+    assert set(client.transport._disabled_until) == set(["metric_bucket"])
+
+    client.transport.capture_envelope(envelope)
+    client.capture_event({"type": "transaction"})
+    client.flush()
+
+    assert len(capturing_server.captured) == 2
+
+    envelope = capturing_server.captured[0].envelope
+    assert envelope.items[0].type == "transaction"
+    envelope = capturing_server.captured[1].envelope
+    assert envelope.items[0].type == "client_report"
+    report = parse_json(envelope.items[0].get_bytes())
+    assert report["discarded_events"] == [
+        {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1},
+    ]
+
+
+@pytest.mark.parametrize("response_code", [200, 429])
+def test_metric_bucket_limits_with_namespace(
+    capturing_server, response_code, make_client
+):
+    client = make_client()
+    capturing_server.respond_with(
+        code=response_code,
+        headers={
+            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded:foo"
+        },
+    )
+
+    envelope = Envelope()
+    envelope.add_item(Item(payload=b"{}", type="statsd"))
+    client.transport.capture_envelope(envelope)
+    client.flush()
+
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
+
+    assert set(client.transport._disabled_until) == set([])
+
+    client.transport.capture_envelope(envelope)
+    client.capture_event({"type": "transaction"})
+    client.flush()
+
+    assert len(capturing_server.captured) == 2
+
+    envelope = capturing_server.captured[0].envelope
+    assert envelope.items[0].type == "statsd"
+    envelope = capturing_server.captured[1].envelope
+    assert envelope.items[0].type == "transaction"
+
+
+@pytest.mark.parametrize("response_code", [200, 429])
+def test_metric_bucket_limits_with_all_namespaces(
+    capturing_server, response_code, make_client
+):
+    client = make_client()
+    capturing_server.respond_with(
+        code=response_code,
+        headers={
+            "X-Sentry-Rate-Limits": "4711:metric_bucket:organization:quota_exceeded"
+        },
+    )
+
+    envelope = Envelope()
+    envelope.add_item(Item(payload=b"{}", type="statsd"))
+    client.transport.capture_envelope(envelope)
+    client.flush()
+
+    assert len(capturing_server.captured) == 1
+    assert capturing_server.captured[0].path == "/api/132/envelope/"
+    capturing_server.clear_captured()
+
+    assert set(client.transport._disabled_until) == set(["metric_bucket"])
+
+    client.transport.capture_envelope(envelope)
+    client.capture_event({"type": "transaction"})
+    client.flush()
+
+    assert len(capturing_server.captured) == 2
+
+    envelope = capturing_server.captured[0].envelope
+    assert envelope.items[0].type == "transaction"
+    envelope = capturing_server.captured[1].envelope
+    assert envelope.items[0].type == "client_report"
+    report = parse_json(envelope.items[0].get_bytes())
+    assert report["discarded_events"] == [
+        {"category": "metric_bucket", "reason": "ratelimit_backoff", "quantity": 1},
+    ]
diff --git a/tests/test_types.py b/tests/test_types.py
new file mode 100644
index 0000000000..bef6aaa59e
--- /dev/null
+++ b/tests/test_types.py
@@ -0,0 +1,28 @@
+import sys
+
+import pytest
+from sentry_sdk.types import Event, Hint
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 10),
+    reason="Type hinting with `|` is available in Python 3.10+",
+)
+def test_event_or_none_runtime():
+    """
+    Ensures that the `Event` type's runtime value supports the `|` operation with `None`.
+    This test is needed to ensure that using an `Event | None` type hint (e.g. for
+    `before_send`'s return value) does not raise a TypeError at runtime.
+    """
+    Event | None
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 10),
+    reason="Type hinting with `|` is available in Python 3.10+",
+)
+def test_hint_or_none_runtime():
+    """
+    Analogue to `test_event_or_none_runtime`, but for the `Hint` type.
+    """
+    Hint | None
diff --git a/tests/test_utils.py b/tests/test_utils.py
new file mode 100644
index 0000000000..812718599c
--- /dev/null
+++ b/tests/test_utils.py
@@ -0,0 +1,753 @@
+import pytest
+import re
+import sys
+import threading
+from datetime import timedelta
+
+from sentry_sdk._compat import duration_in_milliseconds
+from sentry_sdk._queue import Queue
+from sentry_sdk.utils import (
+    Components,
+    Dsn,
+    get_current_thread_meta,
+    get_default_release,
+    get_error_message,
+    get_git_revision,
+    is_valid_sample_rate,
+    logger,
+    match_regex_list,
+    parse_url,
+    parse_version,
+    safe_str,
+    sanitize_url,
+    serialize_frame,
+    is_sentry_url,
+    _get_installed_modules,
+    _generate_installed_modules,
+)
+
+import sentry_sdk
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+try:
+    import gevent
+except ImportError:
+    gevent = None
+
+try:
+    # Python 3
+    FileNotFoundError
+except NameError:
+    # Python 2
+    FileNotFoundError = IOError
+
+
+def _normalize_distribution_name(name):
+    # type: (str) -> str
+    """Normalize distribution name according to PEP-0503.
+
+    See:
+    https://peps.python.org/pep-0503/#normalized-names
+    for more details.
+    """
+    return re.sub(r"[-_.]+", "-", name).lower()
+
+
+@pytest.mark.parametrize(
+    ("url", "expected_result"),
+    [
+        ("http://localhost:8000", "http://localhost:8000"),
+        ("http://example.com", "http://example.com"),
+        ("https://example.com", "https://example.com"),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            "example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "http://example.com?token=abc&sessionid=123&save=true",
+            "http://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            "https://example.com?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "http://localhost:8000/?token=abc&sessionid=123&save=true",
+            "http://localhost:8000/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
+            "ftp://[Filtered]:[Filtered]@ftp.example.com:9876/bla/blub#foo",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            "https://[Filtered]:[Filtered]@example.com/bla/blub?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]#fragment",
+        ),
+        ("bla/blub/foo", "bla/blub/foo"),
+        ("/bla/blub/foo/", "/bla/blub/foo/"),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            "bla/blub/foo?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            "/bla/blub/foo/?token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+        ),
+    ],
+)
+def test_sanitize_url(url, expected_result):
+    # sort parts because old Python versions (<3.6) don't preserve order
+    sanitized_url = sanitize_url(url)
+    parts = sorted(re.split(r"\&|\?|\#", sanitized_url))
+    expected_parts = sorted(re.split(r"\&|\?|\#", expected_result))
+
+    assert parts == expected_parts
+
+
+@pytest.mark.parametrize(
+    ("url", "expected_result"),
+    [
+        (
+            "http://localhost:8000",
+            Components(
+                scheme="http", netloc="localhost:8000", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "http://example.com",
+            Components(
+                scheme="http", netloc="example.com", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "https://example.com",
+            Components(
+                scheme="https", netloc="example.com", path="", query="", fragment=""
+            ),
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="example.com",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "http://example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="http",
+                netloc="example.com",
+                path="",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="https",
+                netloc="example.com",
+                path="",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "http://localhost:8000/?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="http",
+                netloc="localhost:8000",
+                path="/",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "ftp://username:password@ftp.example.com:9876/bla/blub#foo",
+            Components(
+                scheme="ftp",
+                netloc="[Filtered]:[Filtered]@ftp.example.com:9876",
+                path="/bla/blub",
+                query="",
+                fragment="foo",
+            ),
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            Components(
+                scheme="https",
+                netloc="[Filtered]:[Filtered]@example.com",
+                path="/bla/blub",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="fragment",
+            ),
+        ),
+        (
+            "bla/blub/foo",
+            Components(
+                scheme="", netloc="", path="bla/blub/foo", query="", fragment=""
+            ),
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="bla/blub/foo",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            Components(
+                scheme="",
+                netloc="",
+                path="/bla/blub/foo/",
+                query="token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+                fragment="",
+            ),
+        ),
+    ],
+)
+def test_sanitize_url_and_split(url, expected_result):
+    sanitized_url = sanitize_url(url, split=True)
+    # sort query because old Python versions (<3.6) don't preserve order
+    query = sorted(sanitized_url.query.split("&"))
+    expected_query = sorted(expected_result.query.split("&"))
+
+    assert sanitized_url.scheme == expected_result.scheme
+    assert sanitized_url.netloc == expected_result.netloc
+    assert query == expected_query
+    assert sanitized_url.path == expected_result.path
+    assert sanitized_url.fragment == expected_result.fragment
+
+
+@pytest.mark.parametrize(
+    ("url", "sanitize", "expected_url", "expected_query", "expected_fragment"),
+    [
+        # Test with sanitize=True
+        (
+            "https://example.com",
+            True,
+            "https://example.com",
+            "",
+            "",
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            True,
+            "example.com",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            True,
+            "https://example.com",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            True,
+            "https://[Filtered]:[Filtered]@example.com/bla/blub",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "fragment",
+        ),
+        (
+            "bla/blub/foo",
+            True,
+            "bla/blub/foo",
+            "",
+            "",
+        ),
+        (
+            "/bla/blub/foo/#baz",
+            True,
+            "/bla/blub/foo/",
+            "",
+            "baz",
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            True,
+            "bla/blub/foo",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            True,
+            "/bla/blub/foo/",
+            "token=[Filtered]&sessionid=[Filtered]&save=[Filtered]",
+            "",
+        ),
+        # Test with sanitize=False
+        (
+            "https://example.com",
+            False,
+            "https://example.com",
+            "",
+            "",
+        ),
+        (
+            "example.com?token=abc&sessionid=123&save=true",
+            False,
+            "example.com",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "https://example.com?token=abc&sessionid=123&save=true",
+            False,
+            "https://example.com",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "https://username:password@example.com/bla/blub?token=abc&sessionid=123&save=true#fragment",
+            False,
+            "https://[Filtered]:[Filtered]@example.com/bla/blub",
+            "token=abc&sessionid=123&save=true",
+            "fragment",
+        ),
+        (
+            "bla/blub/foo",
+            False,
+            "bla/blub/foo",
+            "",
+            "",
+        ),
+        (
+            "/bla/blub/foo/#baz",
+            False,
+            "/bla/blub/foo/",
+            "",
+            "baz",
+        ),
+        (
+            "bla/blub/foo?token=abc&sessionid=123&save=true",
+            False,
+            "bla/blub/foo",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+        (
+            "/bla/blub/foo/?token=abc&sessionid=123&save=true",
+            False,
+            "/bla/blub/foo/",
+            "token=abc&sessionid=123&save=true",
+            "",
+        ),
+    ],
+)
+def test_parse_url(url, sanitize, expected_url, expected_query, expected_fragment):
+    assert parse_url(url, sanitize=sanitize).url == expected_url
+    assert parse_url(url, sanitize=sanitize).fragment == expected_fragment
+
+    # sort parts because old Python versions (<3.6) don't preserve order
+    sanitized_query = parse_url(url, sanitize=sanitize).query
+    query_parts = sorted(re.split(r"\&|\?|\#", sanitized_query))
+    expected_query_parts = sorted(re.split(r"\&|\?|\#", expected_query))
+
+    assert query_parts == expected_query_parts
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [0.0, 0.1231, 1.0, True, False],
+)
+def test_accepts_valid_sample_rate(rate):
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate, source="Testing")
+        assert logger.warning.called is False
+        assert result is True
+
+
+@pytest.mark.parametrize(
+    "rate",
+    [
+        "dogs are great",  # wrong type
+        (0, 1),  # wrong type
+        {"Maisey": "Charllie"},  # wrong type
+        [True, True],  # wrong type
+        {0.2012},  # wrong type
+        float("NaN"),  # wrong type
+        None,  # wrong type
+        -1.121,  # wrong value
+        1.231,  # wrong value
+    ],
+)
+def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
+    with mock.patch.object(logger, "warning", mock.Mock()):
+        result = is_valid_sample_rate(rate, source="Testing")
+        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
+        assert result is False
+
+
+@pytest.mark.parametrize(
+    "include_source_context",
+    [True, False],
+)
+def test_include_source_context_when_serializing_frame(include_source_context):
+    frame = sys._getframe()
+    result = serialize_frame(frame, include_source_context=include_source_context)
+
+    assert include_source_context ^ ("pre_context" in result) ^ True
+    assert include_source_context ^ ("context_line" in result) ^ True
+    assert include_source_context ^ ("post_context" in result) ^ True
+
+
+@pytest.mark.parametrize(
+    "item,regex_list,expected_result",
+    [
+        ["", [], False],
+        [None, [], False],
+        ["", None, False],
+        [None, None, False],
+        ["some-string", [], False],
+        ["some-string", None, False],
+        ["some-string", ["some-string"], True],
+        ["some-string", ["some"], False],
+        ["some-string", ["some$"], False],  # same as above
+        ["some-string", ["some.*"], True],
+        ["some-string", ["Some"], False],  # we do case sensitive matching
+        ["some-string", [".*string$"], True],
+    ],
+)
+def test_match_regex_list(item, regex_list, expected_result):
+    assert match_regex_list(item, regex_list) == expected_result
+
+
+@pytest.mark.parametrize(
+    "version,expected_result",
+    [
+        ["3.5.15", (3, 5, 15)],
+        ["2.0.9", (2, 0, 9)],
+        ["2.0.0", (2, 0, 0)],
+        ["0.6.0", (0, 6, 0)],
+        ["2.0.0.post1", (2, 0, 0)],
+        ["2.0.0rc3", (2, 0, 0)],
+        ["2.0.0rc2", (2, 0, 0)],
+        ["2.0.0rc1", (2, 0, 0)],
+        ["2.0.0b4", (2, 0, 0)],
+        ["2.0.0b3", (2, 0, 0)],
+        ["2.0.0b2", (2, 0, 0)],
+        ["2.0.0b1", (2, 0, 0)],
+        ["0.6beta3", (0, 6)],
+        ["0.6beta2", (0, 6)],
+        ["0.6beta1", (0, 6)],
+        ["0.4.2b", (0, 4, 2)],
+        ["0.4.2a", (0, 4, 2)],
+        ["0.0.1", (0, 0, 1)],
+        ["0.0.0", (0, 0, 0)],
+        ["1", (1,)],
+        ["1.0", (1, 0)],
+        ["1.0.0", (1, 0, 0)],
+        [" 1.0.0 ", (1, 0, 0)],
+        ["  1.0.0   ", (1, 0, 0)],
+        ["x1.0.0", None],
+        ["1.0.0x", None],
+        ["x1.0.0x", None],
+    ],
+)
+def test_parse_version(version, expected_result):
+    assert parse_version(version) == expected_result
+
+
+@pytest.fixture
+def mock_hub_with_dsn_netloc():
+    """
+    Returns a mocked hub with a DSN netloc of "abcd1234.ingest.sentry.io".
+    """
+
+    mock_hub = mock.Mock(spec=sentry_sdk.Hub)
+    mock_hub.client = mock.Mock(spec=sentry_sdk.Client)
+    mock_hub.client.transport = mock.Mock(spec=sentry_sdk.Transport)
+    mock_hub.client.transport.parsed_dsn = mock.Mock(spec=Dsn)
+
+    mock_hub.client.transport.parsed_dsn.netloc = "abcd1234.ingest.sentry.io"
+
+    return mock_hub
+
+
+@pytest.mark.parametrize(
+    ["test_url", "is_sentry_url_expected"],
+    [
+        ["https://asdf@abcd1234.ingest.sentry.io/123456789", True],
+        ["https://asdf@abcd1234.ingest.notsentry.io/123456789", False],
+    ],
+)
+def test_is_sentry_url_true(test_url, is_sentry_url_expected, mock_hub_with_dsn_netloc):
+    ret_val = is_sentry_url(mock_hub_with_dsn_netloc, test_url)
+
+    assert ret_val == is_sentry_url_expected
+
+
+def test_is_sentry_url_no_client():
+    hub = mock.Mock()
+    hub.client = None
+
+    test_url = "https://asdf@abcd1234.ingest.sentry.io/123456789"
+
+    ret_val = is_sentry_url(hub, test_url)
+
+    assert not ret_val
+
+
+@pytest.mark.parametrize(
+    "error,expected_result",
+    [
+        ["", lambda x: safe_str(x)],
+        ["some-string", lambda _: "some-string"],
+    ],
+)
+def test_get_error_message(error, expected_result):
+    with pytest.raises(BaseException) as exc_value:
+        exc_value.message = error
+        raise Exception
+    assert get_error_message(exc_value) == expected_result(exc_value)
+
+    with pytest.raises(BaseException) as exc_value:
+        exc_value.detail = error
+        raise Exception
+    assert get_error_message(exc_value) == expected_result(exc_value)
+
+
+def test_installed_modules():
+    try:
+        from importlib.metadata import distributions, version
+
+        importlib_available = True
+    except ImportError:
+        importlib_available = False
+
+    try:
+        import pkg_resources
+
+        pkg_resources_available = True
+    except ImportError:
+        pkg_resources_available = False
+
+    installed_distributions = {
+        _normalize_distribution_name(dist): version
+        for dist, version in _generate_installed_modules()
+    }
+
+    if importlib_available:
+        importlib_distributions = {
+            _normalize_distribution_name(dist.metadata["Name"]): version(
+                dist.metadata["Name"]
+            )
+            for dist in distributions()
+            if dist.metadata["Name"] is not None
+            and version(dist.metadata["Name"]) is not None
+        }
+        assert installed_distributions == importlib_distributions
+
+    elif pkg_resources_available:
+        pkg_resources_distributions = {
+            _normalize_distribution_name(dist.key): dist.version
+            for dist in pkg_resources.working_set
+        }
+        assert installed_distributions == pkg_resources_distributions
+    else:
+        pytest.fail("Neither importlib nor pkg_resources is available")
+
+
+def test_installed_modules_caching():
+    mock_generate_installed_modules = mock.Mock()
+    mock_generate_installed_modules.return_value = {"package": "1.0.0"}
+    with mock.patch("sentry_sdk.utils._installed_modules", None):
+        with mock.patch(
+            "sentry_sdk.utils._generate_installed_modules",
+            mock_generate_installed_modules,
+        ):
+            _get_installed_modules()
+            assert mock_generate_installed_modules.called
+            mock_generate_installed_modules.reset_mock()
+
+            _get_installed_modules()
+            mock_generate_installed_modules.assert_not_called()
+
+
+def test_devnull_inaccessible():
+    with mock.patch("sentry_sdk.utils.open", side_effect=OSError("oh no")):
+        revision = get_git_revision()
+
+    assert revision is None
+
+
+def test_devnull_not_found():
+    with mock.patch("sentry_sdk.utils.open", side_effect=FileNotFoundError("oh no")):
+        revision = get_git_revision()
+
+    assert revision is None
+
+
+def test_default_release():
+    release = get_default_release()
+    assert release is not None
+
+
+def test_default_release_empty_string():
+    with mock.patch("sentry_sdk.utils.get_git_revision", return_value=""):
+        release = get_default_release()
+
+    assert release is None
+
+
+@pytest.mark.parametrize(
+    "timedelta,expected_milliseconds",
+    [
+        [timedelta(milliseconds=132), 132.0],
+        [timedelta(hours=1, milliseconds=132), float(60 * 60 * 1000 + 132)],
+        [timedelta(days=10), float(10 * 24 * 60 * 60 * 1000)],
+        [timedelta(microseconds=100), 0.1],
+    ],
+)
+def test_duration_in_milliseconds(timedelta, expected_milliseconds):
+    assert duration_in_milliseconds(timedelta) == expected_milliseconds
+
+
+def test_get_current_thread_meta_explicit_thread():
+    results = Queue(maxsize=1)
+
+    def target1():
+        pass
+
+    def target2():
+        results.put(get_current_thread_meta(thread1))
+
+    thread1 = threading.Thread(target=target1)
+    thread1.start()
+
+    thread2 = threading.Thread(target=target2)
+    thread2.start()
+
+    thread2.join()
+    thread1.join()
+
+    assert (thread1.ident, thread1.name) == results.get(timeout=1)
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 4), reason="threading.main_thread() Not available"
+)
+def test_get_current_thread_meta_bad_explicit_thread():
+    thread = "fake thread"
+
+    main_thread = threading.main_thread()
+
+    assert (main_thread.ident, main_thread.name) == get_current_thread_meta(thread)
+
+
+@pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+def test_get_current_thread_meta_gevent_in_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]):
+            job = gevent.spawn(get_current_thread_meta)
+            job.join()
+            results.put(job.value)
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert (thread.ident, None) == results.get(timeout=1)
+
+
+@pytest.mark.skipif(gevent is None, reason="gevent not enabled")
+def test_get_current_thread_meta_gevent_in_thread_failed_to_get_hub():
+    results = Queue(maxsize=1)
+
+    def target():
+        with mock.patch("sentry_sdk.utils.is_gevent", side_effect=[True]):
+            with mock.patch(
+                "sentry_sdk.utils.get_gevent_hub", side_effect=["fake hub"]
+            ):
+                job = gevent.spawn(get_current_thread_meta)
+                job.join()
+                results.put(job.value)
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert (thread.ident, thread.name) == results.get(timeout=1)
+
+
+def test_get_current_thread_meta_running_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        results.put(get_current_thread_meta())
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert (thread.ident, thread.name) == results.get(timeout=1)
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 4), reason="threading.main_thread() Not available"
+)
+def test_get_current_thread_meta_bad_running_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        with mock.patch("threading.current_thread", side_effect=["fake thread"]):
+            results.put(get_current_thread_meta())
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+
+    main_thread = threading.main_thread()
+    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 4), reason="threading.main_thread() Not available"
+)
+def test_get_current_thread_meta_main_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        # mock that somehow the current thread doesn't exist
+        with mock.patch("threading.current_thread", side_effect=[None]):
+            results.put(get_current_thread_meta())
+
+    main_thread = threading.main_thread()
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)
+
+
+@pytest.mark.skipif(
+    sys.version_info < (3, 4), reason="threading.main_thread() Not available"
+)
+def test_get_current_thread_meta_failed_to_get_main_thread():
+    results = Queue(maxsize=1)
+
+    def target():
+        with mock.patch("threading.current_thread", side_effect=["fake thread"]):
+            with mock.patch("threading.current_thread", side_effect=["fake thread"]):
+                results.put(get_current_thread_meta())
+
+    main_thread = threading.main_thread()
+
+    thread = threading.Thread(target=target)
+    thread.start()
+    thread.join()
+    assert (main_thread.ident, main_thread.name) == results.get(timeout=1)
diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py
new file mode 100644
index 0000000000..fa856e0af4
--- /dev/null
+++ b/tests/tracing/test_baggage.py
@@ -0,0 +1,80 @@
+# coding: utf-8
+from sentry_sdk.tracing_utils import Baggage
+
+
+def test_third_party_baggage():
+    header = "other-vendor-value-1=foo;bar;baz, other-vendor-value-2=foo;bar;"
+    baggage = Baggage.from_incoming_header(header)
+
+    assert baggage.mutable
+    assert baggage.sentry_items == {}
+    assert sorted(baggage.third_party_items.split(",")) == sorted(
+        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+    )
+
+    assert baggage.dynamic_sampling_context() == {}
+    assert baggage.serialize() == ""
+    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
+        "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+    )
+
+
+def test_mixed_baggage():
+    header = (
+        "other-vendor-value-1=foo;bar;baz, sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+        "sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337, "
+        "sentry-user_id=Am%C3%A9lie, sentry-foo=bar, other-vendor-value-2=foo;bar;"
+    )
+
+    baggage = Baggage.from_incoming_header(header)
+
+    assert not baggage.mutable
+
+    assert baggage.sentry_items == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amélie",
+        "sample_rate": "0.01337",
+        "foo": "bar",
+    }
+
+    assert (
+        baggage.third_party_items
+        == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
+    )
+
+    assert baggage.dynamic_sampling_context() == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amélie",
+        "sample_rate": "0.01337",
+        "foo": "bar",
+    }
+
+    assert sorted(baggage.serialize().split(",")) == sorted(
+        (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
+            "sentry-foo=bar"
+        ).split(",")
+    )
+
+    assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
+        (
+            "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+            "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+            "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
+            "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
+        ).split(",")
+    )
+
+
+def test_malformed_baggage():
+    header = ","
+
+    baggage = Baggage.from_incoming_header(header)
+
+    assert baggage.sentry_items == {}
+    assert baggage.third_party_items == ""
+    assert baggage.mutable
diff --git a/tests/tracing/test_decorator_async_py3.py b/tests/tracing/test_decorator_async_py3.py
new file mode 100644
index 0000000000..401180ad39
--- /dev/null
+++ b/tests/tracing/test_decorator_async_py3.py
@@ -0,0 +1,49 @@
+from unittest import mock
+import pytest
+import sys
+
+from tests.conftest import patch_start_tracing_child
+
+from sentry_sdk.tracing_utils_py3 import (
+    start_child_span_decorator as start_child_span_decorator_py3,
+)
+from sentry_sdk.utils import logger
+
+if sys.version_info < (3, 6):
+    pytest.skip("Async decorator only works on Python 3.6+", allow_module_level=True)
+
+
+async def my_async_example_function():
+    return "return_of_async_function"
+
+
+@pytest.mark.asyncio
+async def test_trace_decorator_async_py3():
+    with patch_start_tracing_child() as fake_start_child:
+        result = await my_async_example_function()
+        fake_start_child.assert_not_called()
+        assert result == "return_of_async_function"
+
+        result2 = await start_child_span_decorator_py3(my_async_example_function)()
+        fake_start_child.assert_called_once_with(
+            op="function",
+            description="test_decorator_async_py3.my_async_example_function",
+        )
+        assert result2 == "return_of_async_function"
+
+
+@pytest.mark.asyncio
+async def test_trace_decorator_async_py3_no_trx():
+    with patch_start_tracing_child(fake_transaction_is_none=True):
+        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+            result = await my_async_example_function()
+            fake_warning.assert_not_called()
+            assert result == "return_of_async_function"
+
+            result2 = await start_child_span_decorator_py3(my_async_example_function)()
+            fake_warning.assert_called_once_with(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                "test_decorator_async_py3.my_async_example_function",
+            )
+            assert result2 == "return_of_async_function"
diff --git a/tests/tracing/test_decorator_sync.py b/tests/tracing/test_decorator_sync.py
new file mode 100644
index 0000000000..6d7be8b8f9
--- /dev/null
+++ b/tests/tracing/test_decorator_sync.py
@@ -0,0 +1,48 @@
+from sentry_sdk._compat import PY2
+
+if PY2:
+    from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
+else:
+    from sentry_sdk.tracing_utils_py3 import start_child_span_decorator
+
+from sentry_sdk.utils import logger
+
+from tests.conftest import patch_start_tracing_child
+
+try:
+    from unittest import mock  # python 3.3 and above
+except ImportError:
+    import mock  # python < 3.3
+
+
+def my_example_function():
+    return "return_of_sync_function"
+
+
+def test_trace_decorator():
+    with patch_start_tracing_child() as fake_start_child:
+        result = my_example_function()
+        fake_start_child.assert_not_called()
+        assert result == "return_of_sync_function"
+
+        result2 = start_child_span_decorator(my_example_function)()
+        fake_start_child.assert_called_once_with(
+            op="function", description="test_decorator_sync.my_example_function"
+        )
+        assert result2 == "return_of_sync_function"
+
+
+def test_trace_decorator_no_trx():
+    with patch_start_tracing_child(fake_transaction_is_none=True):
+        with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+            result = my_example_function()
+            fake_warning.assert_not_called()
+            assert result == "return_of_sync_function"
+
+            result2 = start_child_span_decorator(my_example_function)()
+            fake_warning.assert_called_once_with(
+                "Can not create a child span for %s. "
+                "Please start a Sentry transaction before calling this function.",
+                "test_decorator_sync.my_example_function",
+            )
+            assert result2 == "return_of_sync_function"
diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py
index 3db967b24b..443bb163e8 100644
--- a/tests/tracing/test_http_headers.py
+++ b/tests/tracing/test_http_headers.py
@@ -1,16 +1,7 @@
-import json
-
 import pytest
 
-import sentry_sdk
-from sentry_sdk.tracing import Transaction, Span
-from sentry_sdk.tracing_utils import (
-    compute_tracestate_value,
-    extract_sentrytrace_data,
-    extract_tracestate_data,
-    reinflate_tracestate,
-)
-from sentry_sdk.utils import from_base64, to_base64
+from sentry_sdk.tracing import Transaction
+from sentry_sdk.tracing_utils import extract_sentrytrace_data
 
 
 try:
@@ -19,142 +10,8 @@
     import mock  # python < 3.3
 
 
-def test_tracestate_computation(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-        trace_id="12312012123120121231201212312012",
-    )
-
-    # force lazy computation to create a value
-    transaction.to_tracestate()
-
-    computed_value = transaction._sentry_tracestate.replace("sentry=", "")
-    # we have to decode and reinflate the data because we can guarantee that the
-    # order of the entries in the jsonified dict will be the same here as when
-    # the tracestate is computed
-    reinflated_trace_data = json.loads(from_base64(computed_value))
-
-    assert reinflated_trace_data == {
-        "trace_id": "12312012123120121231201212312012",
-        "environment": "dogpark",
-        "release": "off.leash.park",
-        "public_key": "dogsarebadatkeepingsecrets",
-        "user": {"id": 12312013, "segment": "bigs"},
-        "transaction": "/interactions/other-dogs/new-dog",
-    }
-
-
-def test_doesnt_add_new_tracestate_to_transaction_when_none_given(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-        # sentry_tracestate=< value would be passed here >
-    )
-
-    assert transaction._sentry_tracestate is None
-
-
-def test_adds_tracestate_to_transaction_when_to_traceparent_called(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-    )
-
-    # no inherited tracestate, and none created in Transaction constructor
-    assert transaction._sentry_tracestate is None
-
-    transaction.to_tracestate()
-
-    assert transaction._sentry_tracestate is not None
-
-
-def test_adds_tracestate_to_transaction_when_getting_trace_context(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    transaction = Transaction(
-        name="/interactions/other-dogs/new-dog",
-        op="greeting.sniff",
-    )
-
-    # no inherited tracestate, and none created in Transaction constructor
-    assert transaction._sentry_tracestate is None
-
-    transaction.get_trace_context()
-
-    assert transaction._sentry_tracestate is not None
-
-
-@pytest.mark.parametrize(
-    "set_by", ["inheritance", "to_tracestate", "get_trace_context"]
-)
-def test_tracestate_is_immutable_once_set(sentry_init, monkeypatch, set_by):
-    monkeypatch.setattr(
-        sentry_sdk.tracing,
-        "compute_tracestate_entry",
-        mock.Mock(return_value="sentry=doGsaREgReaT"),
-    )
-
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    # for each scenario, get to the point where tracestate has been set
-    if set_by == "inheritance":
-        transaction = Transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-            sentry_tracestate=("sentry=doGsaREgReaT"),
-        )
-    else:
-        transaction = Transaction(
-            name="/interactions/other-dogs/new-dog",
-            op="greeting.sniff",
-        )
-
-        if set_by == "to_tracestate":
-            transaction.to_tracestate()
-        if set_by == "get_trace_context":
-            transaction.get_trace_context()
-
-    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
-
-    # user data would be included in tracestate if it were recomputed at this point
-    sentry_sdk.set_user({"id": 12312013, "segment": "bigs"})
-
-    # value hasn't changed
-    assert transaction._sentry_tracestate == "sentry=doGsaREgReaT"
-
-
 @pytest.mark.parametrize("sampled", [True, False, None])
-def test_to_traceparent(sentry_init, sampled):
-
+def test_to_traceparent(sampled):
     transaction = Transaction(
         name="/interactions/other-dogs/new-dog",
         op="greeting.sniff",
@@ -164,56 +21,13 @@ def test_to_traceparent(sentry_init, sampled):
 
     traceparent = transaction.to_traceparent()
 
-    trace_id, parent_span_id, parent_sampled = traceparent.split("-")
-    assert trace_id == "12312012123120121231201212312012"
-    assert parent_span_id == transaction.span_id
-    assert parent_sampled == (
-        "1" if sampled is True else "0" if sampled is False else ""
-    )
-
-
-def test_to_tracestate(sentry_init):
-    sentry_init(
-        dsn="https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
-        environment="dogpark",
-        release="off.leash.park",
-    )
-
-    # it correctly uses the value from the transaction itself or the span's
-    # containing transaction
-    transaction_no_third_party = Transaction(
-        trace_id="12312012123120121231201212312012",
-        sentry_tracestate="sentry=doGsaREgReaT",
-    )
-    non_orphan_span = Span()
-    non_orphan_span._containing_transaction = transaction_no_third_party
-    assert transaction_no_third_party.to_tracestate() == "sentry=doGsaREgReaT"
-    assert non_orphan_span.to_tracestate() == "sentry=doGsaREgReaT"
-
-    # it combines sentry and third-party values correctly
-    transaction_with_third_party = Transaction(
-        trace_id="12312012123120121231201212312012",
-        sentry_tracestate="sentry=doGsaREgReaT",
-        third_party_tracestate="maisey=silly",
-    )
-    assert (
-        transaction_with_third_party.to_tracestate()
-        == "sentry=doGsaREgReaT,maisey=silly"
-    )
-
-    # it computes a tracestate from scratch for orphan transactions
-    orphan_span = Span(
-        trace_id="12312012123120121231201212312012",
-    )
-    assert orphan_span._containing_transaction is None
-    assert orphan_span.to_tracestate() == "sentry=" + compute_tracestate_value(
-        {
-            "trace_id": "12312012123120121231201212312012",
-            "environment": "dogpark",
-            "release": "off.leash.park",
-            "public_key": "dogsarebadatkeepingsecrets",
-        }
-    )
+    parts = traceparent.split("-")
+    assert parts[0] == "12312012123120121231201212312012"  # trace_id
+    assert parts[1] == transaction.span_id  # parent_span_id
+    if sampled is None:
+        assert len(parts) == 2
+    else:
+        assert parts[2] == "1" if sampled is True else "0"  # sampled
 
 
 @pytest.mark.parametrize("sampling_decision", [True, False])
@@ -228,78 +42,12 @@ def test_sentrytrace_extraction(sampling_decision):
     }
 
 
-@pytest.mark.parametrize(
-    ("incoming_header", "expected_sentry_value", "expected_third_party"),
-    [
-        # sentry only
-        ("sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
-        # sentry only, invalid (`!` isn't a valid base64 character)
-        ("sentry=doGsaREgReaT!", None, None),
-        # stuff before
-        ("maisey=silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", "maisey=silly"),
-        # stuff after
-        ("sentry=doGsaREgReaT,maisey=silly", "sentry=doGsaREgReaT", "maisey=silly"),
-        # stuff before and after
-        (
-            "charlie=goofy,sentry=doGsaREgReaT,maisey=silly",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple before
-        (
-            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple after
-        (
-            "sentry=doGsaREgReaT,charlie=goofy,maisey=silly",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly",
-        ),
-        # multiple before and after
-        (
-            "charlie=goofy,maisey=silly,sentry=doGsaREgReaT,bodhi=floppy,cory=loyal",
-            "sentry=doGsaREgReaT",
-            "charlie=goofy,maisey=silly,bodhi=floppy,cory=loyal",
-        ),
-        # only third-party data
-        ("maisey=silly", None, "maisey=silly"),
-        # invalid third-party data, valid sentry data
-        ("maisey_is_silly,sentry=doGsaREgReaT", "sentry=doGsaREgReaT", None),
-        # valid third-party data, invalid sentry data
-        ("maisey=silly,sentry=doGsaREgReaT!", None, "maisey=silly"),
-        # nothing valid at all
-        ("maisey_is_silly,sentry=doGsaREgReaT!", None, None),
-    ],
-)
-def test_tracestate_extraction(
-    incoming_header, expected_sentry_value, expected_third_party
-):
-    assert extract_tracestate_data(incoming_header) == {
-        "sentry_tracestate": expected_sentry_value,
-        "third_party_tracestate": expected_third_party,
-    }
-
-
-# TODO (kmclb) remove this parameterization once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False])
-def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
+def test_iter_headers(monkeypatch):
     monkeypatch.setattr(
         Transaction,
         "to_traceparent",
         mock.Mock(return_value="12312012123120121231201212312012-0415201309082013-0"),
     )
-    monkeypatch.setattr(
-        Transaction,
-        "to_tracestate",
-        mock.Mock(return_value="sentry=doGsaREgReaT,charlie=goofy"),
-    )
-    monkeypatch.setattr(
-        sentry_sdk.tracing,
-        "has_tracestate_enabled",
-        mock.Mock(return_value=tracestate_enabled),
-    )
 
     transaction = Transaction(
         name="/interactions/other-dogs/new-dog",
@@ -310,23 +58,3 @@ def test_iter_headers(sentry_init, monkeypatch, tracestate_enabled):
     assert (
         headers["sentry-trace"] == "12312012123120121231201212312012-0415201309082013-0"
     )
-    if tracestate_enabled:
-        assert "tracestate" in headers
-        assert headers["tracestate"] == "sentry=doGsaREgReaT,charlie=goofy"
-    else:
-        assert "tracestate" not in headers
-
-
-@pytest.mark.parametrize(
-    "data",
-    [  # comes out with no trailing `=`
-        {"name": "Maisey", "birthday": "12/31/12"},
-        # comes out with one trailing `=`
-        {"dogs": "yes", "cats": "maybe"},
-        # comes out with two trailing `=`
-        {"name": "Charlie", "birthday": "11/21/12"},
-    ],
-)
-def test_tracestate_reinflation(data):
-    encoded_tracestate = to_base64(json.dumps(data)).strip("=")
-    assert reinflate_tracestate(encoded_tracestate) == data
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
index 486651c754..0fe8117c8e 100644
--- a/tests/tracing/test_integration_tests.py
+++ b/tests/tracing/test_integration_tests.py
@@ -1,7 +1,9 @@
+# coding: utf-8
 import weakref
 import gc
-
+import re
 import pytest
+import random
 
 from sentry_sdk import (
     capture_message,
@@ -32,6 +34,9 @@ def test_basic(sentry_init, capture_events, sample_rate):
         assert len(events) == 1
         event = events[0]
 
+        assert event["transaction"] == "hi"
+        assert event["transaction_info"]["source"] == "custom"
+
         span1, span2 = event["spans"]
         parent_span = event
         assert span1["tags"]["status"] == "internal_error"
@@ -49,25 +54,28 @@ def test_basic(sentry_init, capture_events, sample_rate):
 
 @pytest.mark.parametrize("sampled", [True, False, None])
 @pytest.mark.parametrize("sample_rate", [0.0, 1.0])
-def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate):
+def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_rate):
     """
     Ensure data is actually passed along via headers, and that they are read
     correctly.
     """
     sentry_init(traces_sample_rate=sample_rate)
-    events = capture_events()
+    envelopes = capture_envelopes()
 
     # make a parent transaction (normally this would be in a different service)
-    with start_transaction(
-        name="hi", sampled=True if sample_rate == 0 else None
-    ) as parent_transaction:
+    with start_transaction(name="hi", sampled=True if sample_rate == 0 else None):
         with start_span() as old_span:
             old_span.sampled = sampled
             headers = dict(Hub.current.iter_trace_propagation_headers(old_span))
-            tracestate = parent_transaction._sentry_tracestate
-
-    # child transaction, to prove that we can read 'sentry-trace' and
-    # `tracestate` header data correctly
+            headers["baggage"] = (
+                "other-vendor-value-1=foo;bar;baz, "
+                "sentry-trace_id=771a43a4192642f0b136d5159a501700, "
+                "sentry-public_key=49d0f7386ad645858ae85020e393bef3, "
+                "sentry-sample_rate=0.01337, sentry-user_id=Amelie, "
+                "other-vendor-value-2=foo;bar;"
+            )
+
+    # child transaction, to prove that we can read 'sentry-trace' header data correctly
     child_transaction = Transaction.continue_from_headers(headers, name="WRONG")
     assert child_transaction is not None
     assert child_transaction.parent_sampled == sampled
@@ -75,7 +83,16 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate
     assert child_transaction.same_process_as_parent is False
     assert child_transaction.parent_span_id == old_span.span_id
     assert child_transaction.span_id != old_span.span_id
-    assert child_transaction._sentry_tracestate == tracestate
+
+    baggage = child_transaction._baggage
+    assert baggage
+    assert not baggage.mutable
+    assert baggage.sentry_items == {
+        "public_key": "49d0f7386ad645858ae85020e393bef3",
+        "trace_id": "771a43a4192642f0b136d5159a501700",
+        "user_id": "Amelie",
+        "sample_rate": "0.01337",
+    }
 
     # add child transaction to the scope, to show that the captured message will
     # be tagged with the trace id (since it happens while the transaction is
@@ -89,23 +106,93 @@ def test_continue_from_headers(sentry_init, capture_events, sampled, sample_rate
 
     # in this case the child transaction won't be captured
     if sampled is False or (sample_rate == 0 and sampled is None):
-        trace1, message = events
+        trace1, message = envelopes
+        message_payload = message.get_event()
+        trace1_payload = trace1.get_transaction_event()
 
-        assert trace1["transaction"] == "hi"
+        assert trace1_payload["transaction"] == "hi"
     else:
-        trace1, message, trace2 = events
+        trace1, message, trace2 = envelopes
+        trace1_payload = trace1.get_transaction_event()
+        message_payload = message.get_event()
+        trace2_payload = trace2.get_transaction_event()
 
-        assert trace1["transaction"] == "hi"
-        assert trace2["transaction"] == "ho"
+        assert trace1_payload["transaction"] == "hi"
+        assert trace2_payload["transaction"] == "ho"
 
         assert (
-            trace1["contexts"]["trace"]["trace_id"]
-            == trace2["contexts"]["trace"]["trace_id"]
+            trace1_payload["contexts"]["trace"]["trace_id"]
+            == trace2_payload["contexts"]["trace"]["trace_id"]
             == child_transaction.trace_id
-            == message["contexts"]["trace"]["trace_id"]
+            == message_payload["contexts"]["trace"]["trace_id"]
         )
 
-    assert message["message"] == "hello"
+        assert trace2.headers["trace"] == baggage.dynamic_sampling_context()
+        assert trace2.headers["trace"] == {
+            "public_key": "49d0f7386ad645858ae85020e393bef3",
+            "trace_id": "771a43a4192642f0b136d5159a501700",
+            "user_id": "Amelie",
+            "sample_rate": "0.01337",
+        }
+
+    assert message_payload["message"] == "hello"
+
+
+@pytest.mark.parametrize("sample_rate", [0.5, 1.0])
+def test_dynamic_sampling_head_sdk_creates_dsc(
+    sentry_init, capture_envelopes, sample_rate, monkeypatch
+):
+    sentry_init(traces_sample_rate=sample_rate, release="foo")
+    envelopes = capture_envelopes()
+
+    # make sure transaction is sampled for both cases
+    monkeypatch.setattr(random, "random", lambda: 0.1)
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+
+    # will create empty mutable baggage
+    baggage = transaction._baggage
+    assert baggage
+    assert baggage.mutable
+    assert baggage.sentry_items == {}
+    assert baggage.third_party_items == ""
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc"):
+            pass
+
+    # finish will create a new baggage entry
+    baggage = transaction._baggage
+    trace_id = transaction.trace_id
+
+    assert baggage
+    assert not baggage.mutable
+    assert baggage.third_party_items == ""
+    assert baggage.sentry_items == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "sampled": "true" if transaction.sampled else "false",
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
+
+    expected_baggage = (
+        "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s,sentry-sampled=%s"
+        % (sample_rate, trace_id, "true" if transaction.sampled else "false")
+    )
+    assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))
+
+    (envelope,) = envelopes
+    assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
+    assert envelope.headers["trace"] == {
+        "environment": "production",
+        "release": "foo",
+        "sample_rate": str(sample_rate),
+        "sampled": "true" if transaction.sampled else "false",
+        "transaction": "Head SDK tx",
+        "trace_id": trace_id,
+    }
 
 
 @pytest.mark.parametrize(
@@ -167,3 +254,27 @@ def capture_event(self, event):
             pass
 
     assert len(events) == 1
+
+
+def test_trace_propagation_meta_head_sdk(sentry_init):
+    sentry_init(traces_sample_rate=1.0, release="foo")
+
+    transaction = Transaction.continue_from_headers({}, name="Head SDK tx")
+    meta = None
+    span = None
+
+    with start_transaction(transaction):
+        with start_span(op="foo", description="foodesc") as current_span:
+            span = current_span
+            meta = Hub.current.trace_propagation_meta()
+
+    ind = meta.find(">") + 1
+    sentry_trace, baggage = meta[:ind], meta[ind:]
+
+    assert 'meta name="sentry-trace"' in sentry_trace
+    sentry_trace_content = re.findall('content="([^"]*)"', sentry_trace)[0]
+    assert sentry_trace_content == span.to_traceparent()
+
+    assert 'meta name="baggage"' in baggage
+    baggage_content = re.findall('content="([^"]*)"', baggage)[0]
+    assert baggage_content == transaction.get_baggage().serialize()
diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py
index 43d9597f1b..3668f1b3a8 100644
--- a/tests/tracing/test_misc.py
+++ b/tests/tracing/test_misc.py
@@ -4,14 +4,18 @@
 import os
 
 import sentry_sdk
-from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk import Hub, start_span, start_transaction, set_measurement, push_scope
+from sentry_sdk.consts import MATCH_ALL
 from sentry_sdk.tracing import Span, Transaction
-from sentry_sdk.tracing_utils import has_tracestate_enabled
+from sentry_sdk.tracing_utils import should_propagate_trace
+from sentry_sdk.utils import Dsn
 
 try:
     from unittest import mock  # python 3.3 and above
+    from unittest.mock import MagicMock
 except ImportError:
     import mock  # python < 3.3
+    from mock import MagicMock
 
 
 def test_span_trimming(sentry_init, capture_events):
@@ -173,7 +177,7 @@ def test_circular_references(monkeypatch, sentry_init, request):
     #     request.addfinalizer(lambda: gc.set_debug(~gc.DEBUG_LEAK))
     #
     # immediately after the initial collection below, so we can see what new
-    # objects the garbage collecter has to clean up once `transaction.finish` is
+    # objects the garbage collector has to clean up once `transaction.finish` is
     # called and the serializer runs.)
     monkeypatch.setattr(
         sentry_sdk.client,
@@ -232,24 +236,8 @@ def test_circular_references(monkeypatch, sentry_init, request):
     assert gc.collect() == 0
 
 
-# TODO (kmclb) remove this test once tracestate is a real feature
-@pytest.mark.parametrize("tracestate_enabled", [True, False, None])
-def test_has_tracestate_enabled(sentry_init, tracestate_enabled):
-    experiments = (
-        {"propagate_tracestate": tracestate_enabled}
-        if tracestate_enabled is not None
-        else {}
-    )
-    sentry_init(_experiments=experiments)
-
-    if tracestate_enabled is True:
-        assert has_tracestate_enabled() is True
-    else:
-        assert has_tracestate_enabled() is False
-
-
 def test_set_meaurement(sentry_init, capture_events):
-    sentry_init(traces_sample_rate=1.0, _experiments={"custom_measurements": True})
+    sentry_init(traces_sample_rate=1.0)
 
     events = capture_events()
 
@@ -274,3 +262,107 @@ def test_set_meaurement(sentry_init, capture_events):
     assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
     assert event["measurements"]["metric.baz"] == {"value": 420.69, "unit": "custom"}
     assert event["measurements"]["metric.foobar"] == {"value": 17.99, "unit": "percent"}
+
+
+def test_set_meaurement_public_api(sentry_init, capture_events):
+    sentry_init(traces_sample_rate=1.0)
+
+    events = capture_events()
+
+    with start_transaction(name="measuring stuff"):
+        set_measurement("metric.foo", 123)
+        set_measurement("metric.bar", 456, unit="second")
+
+    (event,) = events
+    assert event["measurements"]["metric.foo"] == {"value": 123, "unit": ""}
+    assert event["measurements"]["metric.bar"] == {"value": 456, "unit": "second"}
+
+
+@pytest.mark.parametrize(
+    "trace_propagation_targets,url,expected_propagation_decision",
+    [
+        (None, "http://example.com", False),
+        ([], "http://example.com", False),
+        ([MATCH_ALL], "http://example.com", True),
+        (["localhost"], "localhost:8443/api/users", True),
+        (["localhost"], "http://localhost:8443/api/users", True),
+        (["localhost"], "mylocalhost:8080/api/users", True),
+        ([r"^/api"], "/api/envelopes", True),
+        ([r"^/api"], "/backend/api/envelopes", False),
+        ([r"myApi.com/v[2-4]"], "myApi.com/v2/projects", True),
+        ([r"myApi.com/v[2-4]"], "myApi.com/v1/projects", False),
+        ([r"https:\/\/.*"], "https://example.com", True),
+        (
+            [r"https://.*"],
+            "https://example.com",
+            True,
+        ),  # to show escaping is not needed
+        ([r"https://.*"], "http://example.com/insecure/", False),
+    ],
+)
+def test_should_propagate_trace(
+    trace_propagation_targets, url, expected_propagation_decision
+):
+    hub = MagicMock()
+    hub.client = MagicMock()
+
+    # This test assumes the urls are not Sentry URLs. Use test_should_propagate_trace_to_sentry for sentry URLs.
+    hub.is_sentry_url = lambda _: False
+
+    hub.client.options = {"trace_propagation_targets": trace_propagation_targets}
+    hub.client.transport = MagicMock()
+    hub.client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")
+
+    assert should_propagate_trace(hub, url) == expected_propagation_decision
+
+
+@pytest.mark.parametrize(
+    "dsn,url,expected_propagation_decision",
+    [
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://example.com",
+            True,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            False,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://squirrelchasers.ingest.sentry.io/12312012",
+            False,
+        ),
+        (
+            "https://dogsarebadatkeepingsecrets@squirrelchasers.ingest.sentry.io/12312012",
+            "http://ingest.sentry.io/12312012",
+            True,
+        ),
+        (
+            "https://abc@localsentry.example.com/12312012",
+            "http://localsentry.example.com",
+            False,
+        ),
+    ],
+)
+def test_should_propagate_trace_to_sentry(
+    sentry_init, dsn, url, expected_propagation_decision
+):
+    sentry_init(
+        dsn=dsn,
+        traces_sample_rate=1.0,
+    )
+
+    Hub.current.client.transport.parsed_dsn = Dsn(dsn)
+
+    assert should_propagate_trace(Hub.current, url) == expected_propagation_decision
+
+
+def test_start_transaction_updates_scope_name_source(sentry_init):
+    sentry_init(traces_sample_rate=1.0)
+
+    with push_scope() as scope:
+        with start_transaction(name="foobar", source="route"):
+            assert scope._transaction == "foobar"
+            assert scope._transaction_info == {"source": "route"}
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
new file mode 100644
index 0000000000..9896afb007
--- /dev/null
+++ b/tests/tracing/test_noop_span.py
@@ -0,0 +1,52 @@
+import sentry_sdk
+from sentry_sdk.tracing import NoOpSpan
+
+# This tests make sure, that the examples from the documentation [1]
+# are working when OTel (OpenTelementry) instrumentation is turned on
+# and therefore the Senntry tracing should not do anything.
+#
+# 1: https://docs.sentry.io/platforms/python/performance/instrumentation/custom-instrumentation/
+
+
+def test_noop_start_transaction(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_transaction(
+        op="task", name="test_transaction_name"
+    ) as transaction:
+        assert isinstance(transaction, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is transaction
+
+        transaction.name = "new name"
+
+
+def test_noop_start_span(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    with sentry_sdk.start_span(op="http", description="GET /") as span:
+        assert isinstance(span, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is span
+
+        span.set_tag("http.response.status_code", 418)
+        span.set_data("http.entity_type", "teapot")
+
+
+def test_noop_transaction_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+
+    transaction = sentry_sdk.start_transaction(name="task")
+    assert isinstance(transaction, NoOpSpan)
+
+    with transaction.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
+
+
+def test_noop_span_start_child(sentry_init):
+    sentry_init(instrumenter="otel", debug=True)
+    span = sentry_sdk.start_span(name="task")
+    assert isinstance(span, NoOpSpan)
+
+    with span.start_child(op="child_task") as child:
+        assert isinstance(child, NoOpSpan)
+        assert sentry_sdk.Hub.current.scope.span is child
diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py
index 9975abad5d..6101a948ef 100644
--- a/tests/tracing/test_sampling.py
+++ b/tests/tracing/test_sampling.py
@@ -2,9 +2,8 @@
 
 import pytest
 
-from sentry_sdk import Hub, start_span, start_transaction
+from sentry_sdk import Hub, start_span, start_transaction, capture_exception
 from sentry_sdk.tracing import Transaction
-from sentry_sdk.tracing_utils import is_valid_sample_rate
 from sentry_sdk.utils import logger
 
 try:
@@ -51,38 +50,6 @@ def test_no_double_sampling(sentry_init, capture_events):
     assert len(events) == 1
 
 
-@pytest.mark.parametrize(
-    "rate",
-    [0.0, 0.1231, 1.0, True, False],
-)
-def test_accepts_valid_sample_rate(rate):
-    with mock.patch.object(logger, "warning", mock.Mock()):
-        result = is_valid_sample_rate(rate)
-        assert logger.warning.called is False
-        assert result is True
-
-
-@pytest.mark.parametrize(
-    "rate",
-    [
-        "dogs are great",  # wrong type
-        (0, 1),  # wrong type
-        {"Maisey": "Charllie"},  # wrong type
-        [True, True],  # wrong type
-        {0.2012},  # wrong type
-        float("NaN"),  # wrong type
-        None,  # wrong type
-        -1.121,  # wrong value
-        1.231,  # wrong value
-    ],
-)
-def test_warns_on_invalid_sample_rate(rate, StringContaining):  # noqa: N803
-    with mock.patch.object(logger, "warning", mock.Mock()):
-        result = is_valid_sample_rate(rate)
-        logger.warning.assert_any_call(StringContaining("Given sample rate is invalid"))
-        assert result is False
-
-
 @pytest.mark.parametrize("sampling_decision", [True, False])
 def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
     sentry_init, sampling_decision
@@ -109,7 +76,6 @@ def test_uses_traces_sample_rate_correctly(
     sentry_init(traces_sample_rate=traces_sample_rate)
 
     with mock.patch.object(random, "random", return_value=0.5):
-
         transaction = start_transaction(name="dogpark")
         assert transaction.sampled is expected_decision
 
@@ -126,7 +92,6 @@ def test_uses_traces_sampler_return_value_correctly(
     sentry_init(traces_sampler=mock.Mock(return_value=traces_sampler_return_value))
 
     with mock.patch.object(random, "random", return_value=0.5):
-
         transaction = start_transaction(name="dogpark")
         assert transaction.sampled is expected_decision
 
@@ -261,6 +226,18 @@ def test_passes_custom_samling_context_from_start_transaction_to_traces_sampler(
     )
 
 
+def test_sample_rate_affects_errors(sentry_init, capture_events):
+    sentry_init(sample_rate=0)
+    events = capture_events()
+
+    try:
+        1 / 0
+    except Exception:
+        capture_exception()
+
+    assert len(events) == 0
+
+
 @pytest.mark.parametrize(
     "traces_sampler_return_value",
     [
diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py
index a6d296bb1f..faf33e8580 100644
--- a/tests/utils/test_contextvars.py
+++ b/tests/utils/test_contextvars.py
@@ -12,7 +12,7 @@ def test_leaks(maybe_monkeypatched_threading):
 
     from sentry_sdk import utils
 
-    _, ContextVar = utils._get_contextvars()  # noqa: N806
+    _, ContextVar, _ = utils._get_contextvars()  # noqa: N806
 
     ts = []
 
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
index b85975b4bb..d4067bd5c6 100644
--- a/tests/utils/test_general.py
+++ b/tests/utils/test_general.py
@@ -11,10 +11,12 @@
     safe_repr,
     exceptions_from_error_tuple,
     filename_for_module,
-    handle_in_app_impl,
     iter_event_stacktraces,
     to_base64,
     from_base64,
+    set_in_app_in_frames,
+    strip_string,
+    AnnotatedValue,
 )
 from sentry_sdk._compat import text_type, string_types
 
@@ -131,25 +133,376 @@ def test_parse_invalid_dsn(dsn):
         dsn = Dsn(dsn)
 
 
-@pytest.mark.parametrize("empty", [None, []])
-def test_in_app(empty):
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=["foo"],
-        in_app_exclude=empty,
-    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
-
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=["foo"],
-        in_app_exclude=["foo"],
-    ) == [{"module": "foo", "in_app": True}, {"module": "bar"}]
+@pytest.mark.parametrize(
+    "frame,in_app_include,in_app_exclude,project_root,resulting_frame",
+    [
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        # include
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,  # because there is no module set
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,  # because there is no module set
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        # exclude
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/home/ubuntu/fastapi/.venv/lib/python3.10/site-packages/fastapi/routing.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "C:\\Users\\winuser\\AppData\\Roaming\\Python\\Python35\\site-packages\\fastapi\\routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "abs_path": "/usr/lib/python2.7/dist-packages/fastapi/routing.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            None,
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": False,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            None,
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            ["fastapi"],
+            None,
+            None,
+            {
+                "module": "fastapi.routing",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "fastapi.routing",
+            },
+            None,
+            ["fastapi"],
+            None,
+            {
+                "module": "fastapi.routing",
+                "in_app": False,
+            },
+        ],
+        # with project_root set
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            None,
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            ["main"],
+            None,
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": True,
+            },
+        ],
+        [
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+            },
+            None,
+            ["main"],
+            "/home/ubuntu/fastapi",
+            {
+                "module": "main",
+                "abs_path": "/home/ubuntu/fastapi/main.py",
+                "in_app": False,
+            },
+        ],
+    ],
+)
+def test_set_in_app_in_frames(
+    frame, in_app_include, in_app_exclude, project_root, resulting_frame
+):
+    new_frames = set_in_app_in_frames(
+        [frame],
+        in_app_include=in_app_include,
+        in_app_exclude=in_app_exclude,
+        project_root=project_root,
+    )
 
-    assert handle_in_app_impl(
-        [{"module": "foo"}, {"module": "bar"}],
-        in_app_include=empty,
-        in_app_exclude=["foo"],
-    ) == [{"module": "foo", "in_app": False}, {"module": "bar", "in_app": True}]
+    assert new_frames[0] == resulting_frame
 
 
 def test_iter_stacktraces():
@@ -217,3 +570,34 @@ def test_failed_base64_conversion(input):
     # failures
     if type(input) not in string_types:
         assert to_base64(input) is None
+
+
+@pytest.mark.parametrize(
+    "input,max_length,result",
+    [
+        [None, None, None],
+        ["a" * 256, None, "a" * 256],
+        [
+            "a" * 257,
+            256,
+            AnnotatedValue(
+                value="a" * 253 + "...",
+                metadata={"len": 257, "rem": [["!limit", "x", 253, 256]]},
+            ),
+        ],
+        # fmt: off
+        [u"éééé", None, u"éééé"],
+        [u"éééé", 5, AnnotatedValue(value=u"é...", metadata={"len": 8, "rem": [["!limit", "x", 2, 5]]})],
+        # fmt: on
+        ["éééé", None, "éééé"],
+        [
+            "éééé",
+            5,
+            AnnotatedValue(
+                value="é...", metadata={"len": 8, "rem": [["!limit", "x", 2, 5]]}
+            ),
+        ],
+    ],
+)
+def test_strip_string(input, max_length, result):
+    assert strip_string(input, max_length) == result
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
index e1aa12308f..bfb87f4c29 100644
--- a/tests/utils/test_transaction.py
+++ b/tests/utils/test_transaction.py
@@ -1,5 +1,15 @@
+import sys
+from functools import partial
+
+import pytest
+
 from sentry_sdk.utils import transaction_from_function
 
+try:
+    from functools import partialmethod
+except ImportError:
+    pass
+
 
 class MyClass:
     def myfunc(self):
@@ -10,6 +20,16 @@ def myfunc():
     pass
 
 
+@partial
+def my_partial():
+    pass
+
+
+my_lambda = lambda: None
+
+my_partial_lambda = partial(lambda: None)
+
+
 def test_transaction_from_function():
     x = transaction_from_function
     assert x(MyClass) == "tests.utils.test_transaction.MyClass"
@@ -18,3 +38,26 @@ def test_transaction_from_function():
     assert x(None) is None
     assert x(42) is None
     assert x(lambda: None).endswith("")
+    assert x(my_lambda) == "tests.utils.test_transaction."
+    assert (
+        x(my_partial) == "partial()"
+    )
+    assert (
+        x(my_partial_lambda)
+        == "partial(>)"
+    )
+
+
+@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
+def test_transaction_from_function_partialmethod():
+    x = transaction_from_function
+
+    class MyPartialClass:
+        @partialmethod
+        def my_partial_method(self):
+            pass
+
+    assert (
+        x(MyPartialClass.my_partial_method)
+        == "partialmethod(.MyPartialClass.my_partial_method>)"
+    )
diff --git a/tox.ini b/tox.ini
index 570d13591f..ac886e8cff 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,86 +5,209 @@
 
 [tox]
 envlist =
-    # === Core ===
-    py{2.7,3.4,3.5,3.6,3.7,3.8,3.9,3.10}
-    pypy
+    # === Common ===
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common
 
+    # === Gevent ===
+    {py2.7,py3.6,py3.8,py3.10,py3.11}-gevent
 
     # === Integrations ===
-    # General format is {pythonversion}-{integrationname}-{frameworkversion}
+    # General format is {pythonversion}-{integrationname}-v{frameworkversion}
     # 1 blank line between different integrations
     # Each framework version should only be mentioned once. I.e:
-    #   {py3.7,py3.10}-django-{3.2}
-    #   {py3.10}-django-{4.0}
+    #   {py3.7,py3.10}-django-v{3.2}
+    #   {py3.10}-django-v{4.0}
     # instead of:
-    #   {py3.7}-django-{3.2}
-    #   {py3.7,py3.10}-django-{3.2,4.0}
-
-    {pypy,py2.7,py3.5}-django-{1.8,1.9,1.10}
-    {pypy,py2.7}-django-{1.8,1.9,1.10,1.11}
-    {py3.5,py3.6,py3.7}-django-{2.0,2.1}
-    {py3.7,py3.8,py3.9,py3.10}-django-{2.2,3.0,3.1,3.2}
-
-    {pypy,py2.7,py3.4,py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12,1.0}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-flask-1.1
-    {py3.6,py3.8,py3.9,py3.10}-flask-2.0
-
-    {py3.7,py3.8,py3.9,py3.10}-quart
-
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-bottle-0.12
-
-    {pypy,py2.7,py3.5,py3.6,py3.7}-falcon-1.4
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-falcon-2.0
-
-    {py3.5,py3.6,py3.7}-sanic-{0.8,18}
-    {py3.6,py3.7}-sanic-19
-    {py3.6,py3.7,py3.8}-sanic-20
-    {py3.7,py3.8,py3.9,py3.10}-sanic-21
-
-    {pypy,py2.7}-celery-3
-    {pypy,py2.7,py3.5,py3.6}-celery-{4.1,4.2}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8}-celery-{4.3,4.4}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-celery-5.0
-
-    py3.7-beam-{2.12,2.13,2.32,2.33}
-
-    # The aws_lambda tests deploy to the real AWS and have their own matrix of Python versions.
-    py3.7-aws_lambda
-
-    py3.7-gcp
-
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pyramid-{1.6,1.7,1.8,1.9,1.10}
-
-    {pypy,py2.7,py3.5,py3.6}-rq-{0.6,0.7,0.8,0.9,0.10,0.11}
-    {pypy,py2.7,py3.5,py3.6,py3.7,py3.8,py3.9}-rq-{0.12,0.13,1.0,1.1,1.2,1.3}
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-rq-{1.4,1.5}
-
-    py3.7-aiohttp-3.5
-    {py3.7,py3.8,py3.9,py3.10}-aiohttp-3.6
-
-    {py3.7,py3.8,py3.9}-tornado-{5}
-    {py3.7,py3.8,py3.9,py3.10}-tornado-{6}
-
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-trytond-{4.6,5.0,5.2}
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-trytond-{5.4}
-
-    {py2.7,py3.8,py3.9}-requests
-
-    {py2.7,py3.7,py3.8,py3.9}-redis
-    {py2.7,py3.7,py3.8,py3.9}-rediscluster-{1,2}
-
-    py{3.7,3.8,3.9,3.10}-asgi
-
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-sqlalchemy-{1.2,1.3}
-
-
-    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10}-pure_eval
-
-    {py3.6,py3.7,py3.8}-chalice-{1.16,1.17,1.18,1.19,1.20}
-
-    {py2.7,py3.6,py3.7,py3.8}-boto3-{1.9,1.10,1.11,1.12,1.13,1.14,1.15,1.16}
-
-    {py3.6,py3.7,py3.8,py3.9,py3.10}-httpx-{0.16,0.17}
+    #   {py3.7}-django-v{3.2}
+    #   {py3.7,py3.10}-django-v{3.2,4.0}
+    #
+    # At a minimum, we should test against at least the lowest
+    # and the latest supported version of a framework.
+
+    # AIOHTTP
+    {py3.7}-aiohttp-v{3.4}
+    {py3.7,py3.9,py3.11}-aiohttp-v{3.9}
+
+    # Ariadne
+    {py3.8,py3.11}-ariadne-v{0.20}
+    {py3.8,py3.11,py3.12}-ariadne-v{0.23}
+
+    # Arq
+    {py3.7,py3.11}-arq-v{0.23}
+    {py3.7,py3.11,py3.12}-arq-v{0.25}
+
+    # Asgi
+    {py3.7,py3.11,py3.12}-asgi
+
+    # asyncpg
+    {py3.7,py3.10}-asyncpg-v{0.23}
+    {py3.8,py3.11,py3.12}-asyncpg-v{0.29}
+
+    # AWS Lambda
+    # The aws_lambda tests deploy to the real AWS and have their own
+    # matrix of Python versions to run the test lambda function in.
+    # see `lambda_runtime` fixture in tests/integrations/aws_lambda.py
+    {py3.9}-aws_lambda
+
+    # Beam
+    {py3.7}-beam-v{2.12}
+    {py3.8,py3.11}-beam-v{2.50}
+
+    # Boto3
+    {py2.7,py3.6,py3.7}-boto3-v{1.12}
+    {py3.7,py3.11,py3.12}-boto3-v{1.21}
+    {py3.7,py3.11,py3.12}-boto3-v{1.29}
+
+    # Bottle
+    {py2.7,py3.5,py3.9}-bottle-v{0.12}
+
+    # Celery
+    {py2.7}-celery-v{3}
+    {py2.7,py3.5,py3.8}-celery-v{4}
+    {py3.6,py3.8}-celery-v{5.0}
+    {py3.7,py3.10}-celery-v{5.1,5.2}
+    {py3.8,py3.11}-celery-v{5.3}
+
+    # Chalice
+    {py3.6,py3.9}-chalice-v{1.16}
+
+    # Clickhouse Driver
+    {py3.8,py3.11}-clickhouse_driver-v{0.2.0}
+
+    # Cloud Resource Context
+    {py3.6,py3.11,py3.12}-cloud_resource_context
+
+    # Django
+    # - Django 1.x
+    {py2.7,py3.5}-django-v{1.8}
+    {py2.7,py3.5,py3.7}-django-v{1.11}
+    # - Django 2.x
+    {py3.5,py3.7}-django-v{2.0}
+    {py3.5,py3.9}-django-v{2.2}
+    # - Django 3.x
+    {py3.6,py3.9}-django-v{3.0}
+    {py3.6,py3.11}-django-v{3.2}
+    # - Django 4.x
+    {py3.8,py3.11,py3.12}-django-v{4.0,4.1,4.2}
+    # - Django 5.x
+    {py3.10,py3.11,py3.12}-django-v{5.0}
+
+    # Falcon
+    {py2.7,py3.5,py3.7}-falcon-v{1,1.4,2}
+    {py3.5,py3.6,py3.11,py3.12}-falcon-v{3}
+
+    # FastAPI
+    {py3.7,py3.10}-fastapi-v{0.79}
+    {py3.8,py3.11,py3.12}-fastapi-v{0.110}
+
+    # Flask
+    {py2.7,py3.5}-flask-v{0,0.11}
+    {py2.7,py3.5,py3.8}-flask-v{1}
+    {py3.8,py3.11,py3.12}-flask-v{2}
+    {py3.10,py3.11,py3.12}-flask-v{3}
+
+    # GCP
+    {py3.7}-gcp
+
+    # GQL
+    {py3.7,py3.11}-gql-v{3.4}
+    {py3.7,py3.11}-gql-v{3.5}
+
+    # Graphene
+    {py3.7,py3.11}-graphene-v{3.3}
+
+    # gRPC
+    {py3.7,py3.10}-grpc-v{1.21,1.30,1.40}
+    {py3.7,py3.11}-grpc-v{1.50}
+
+    # HTTPX
+    {py3.6,py3.9}-httpx-v{0.16,0.18}
+    {py3.6,py3.10}-httpx-v{0.20,0.22}
+    {py3.7,py3.11,py3.12}-httpx-v{0.23,0.24}
+    {py3.9,py3.11,py3.12}-httpx-v{0.25}
+
+    # Huey
+    {py2.7,py3.5,py3.11,py3.12}-huey-v{2.0}
+    {py3.5,py3.11,py3.12}-huey-v{2.5}
+
+    # Loguru
+    {py3.5,py3.11,py3.12}-loguru-v{0.5}
+    {py3.5,py3.11,py3.12}-loguru-v{0.7}
+
+    # OpenAI
+    {py3.9,py3.11,py3.12}-openai-v1
+    {py3.9,py3.11,py3.12}-openai-notiktoken
+
+    # OpenTelemetry (OTel)
+    {py3.7,py3.9,py3.11,py3.12}-opentelemetry
+
+    # pure_eval
+    {py3.5,py3.11,py3.12}-pure_eval
+
+    # PyMongo (Mongo DB)
+    {py2.7,py3.6}-pymongo-v{3.1}
+    {py2.7,py3.6,py3.9}-pymongo-v{3.12}
+    {py3.6,py3.11}-pymongo-v{4.0}
+    {py3.7,py3.11,py3.12}-pymongo-v{4.3,4.6}
+
+    # Pyramid
+    {py2.7,py3.5,py3.11}-pyramid-v{1.6}
+    {py2.7,py3.5,py3.11,py3.12}-pyramid-v{1.10}
+    {py3.6,py3.11,py3.12}-pyramid-v{2.0}
+
+    # Quart
+    {py3.7,py3.11}-quart-v{0.16}
+    {py3.8,py3.11,py3.12}-quart-v{0.19}
+
+    # Redis
+    {py2.7,py3.7,py3.8}-redis-v{3}
+    {py3.7,py3.8,py3.11}-redis-v{4}
+    {py3.7,py3.11,py3.12}-redis-v{5}
+
+    # Redis Cluster
+    {py2.7,py3.7,py3.8}-rediscluster-v{1,2}
+
+    # Requests
+    {py2.7,py3.8,py3.11,py3.12}-requests
+
+    # RQ (Redis Queue)
+    {py2.7,py3.5,py3.6}-rq-v{0.6}
+    {py2.7,py3.5,py3.9}-rq-v{0.13,1.0}
+    {py3.5,py3.11}-rq-v{1.5,1.10}
+    {py3.7,py3.11,py3.12}-rq-v{1.15}
+
+    # Sanic
+    {py3.5,py3.7}-sanic-v{0.8}
+    {py3.6,py3.8}-sanic-v{20}
+    {py3.7,py3.11}-sanic-v{22}
+    {py3.7,py3.11}-sanic-v{23}
+
+    # Starlette
+    {py3.7,py3.10}-starlette-v{0.19}
+    {py3.7,py3.11}-starlette-v{0.20,0.24,0.28}
+    {py3.8,py3.11,py3.12}-starlette-v{0.32}
+
+    # Starlite
+    {py3.8,py3.11}-starlite-v{1.48,1.51}
+    # 1.51.14 is the last starlite version; the project continues as litestar
+
+    # SQL Alchemy
+    {py2.7,py3.7,py3.9}-sqlalchemy-v{1.2,1.4}
+    {py3.7,py3.11}-sqlalchemy-v{2.0}
+
+    # Strawberry
+    {py3.8,py3.11}-strawberry-v{0.209}
+    {py3.8,py3.11,py3.12}-strawberry-v{0.224}
+
+    # Tornado
+    {py3.7,py3.9}-tornado-v{5}
+    {py3.8,py3.11,py3.12}-tornado-v{6}
+
+    # Trytond
+    {py3.5,py3.6}-trytond-v{4}
+    {py3.6,py3.8}-trytond-v{5}
+    {py3.6,py3.11}-trytond-v{6}
+    {py3.8,py3.11,py3.12}-trytond-v{7}
 
 [testenv]
 deps =
@@ -93,235 +216,419 @@ deps =
     # with the -r flag
     -r test-requirements.txt
 
-    py3.4: colorama==0.4.1
-    py3.4: watchdog==0.10.7
-
-    django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
-
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: channels>2
-    {py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: pytest-asyncio
-    {py2.7,py3.7,py3.8,py3.9,py3.10}-django-{1.11,2.2,3.0,3.1,3.2}: psycopg2-binary
-
-    django-{1.8,1.9,1.10,1.11,2.0,2.1}: pytest-django<4.0
-    django-{2.2,3.0,3.1,3.2}: pytest-django>=4.0
-    django-{2.2,3.0,3.1,3.2}: Werkzeug<2.0
-
-    django-1.8: Django>=1.8,<1.9
-    django-1.9: Django>=1.9,<1.10
-    django-1.10: Django>=1.10,<1.11
-    django-1.11: Django>=1.11,<1.12
-    django-2.0: Django>=2.0,<2.1
-    django-2.1: Django>=2.1,<2.2
-    django-2.2: Django>=2.2,<2.3
-    django-3.0: Django>=3.0,<3.1
-    django-3.1: Django>=3.1,<3.2
-    django-3.2: Django>=3.2,<3.3
-
-    flask: flask-login
-    flask-0.11: Flask>=0.11,<0.12
-    flask-0.12: Flask>=0.12,<0.13
-    flask-1.0: Flask>=1.0,<1.1
-    flask-1.1: Flask>=1.1,<1.2
-    flask-2.0: Flask>=2.0,<2.1
-
-    quart: quart>=0.16.1
-    quart: quart-auth
-    quart: pytest-asyncio
-
-    bottle-0.12: bottle>=0.12,<0.13
-
-    falcon-1.4: falcon>=1.4,<1.5
-    falcon-2.0: falcon>=2.0.0rc3,<3.0
-
-    sanic-0.8: sanic>=0.8,<0.9
-    sanic-18: sanic>=18.0,<19.0
-    sanic-19: sanic>=19.0,<20.0
-    sanic-20: sanic>=20.0,<21.0
-    sanic-21: sanic>=21.0,<22.0
-    {py3.7,py3.8,py3.9,py3.10}-sanic-21: sanic_testing
-    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
-    sanic: aiohttp
-    py3.5-sanic: ujson<4
-
-    beam-2.12: apache-beam>=2.12.0, <2.13.0
-    beam-2.13: apache-beam>=2.13.0, <2.14.0
-    beam-2.32: apache-beam>=2.32.0, <2.33.0
-    beam-2.33: apache-beam>=2.33.0, <2.34.0
-    beam-master: git+https://github.com/apache/beam#egg=apache-beam&subdirectory=sdks/python
-
-    celery: redis
-    celery-3: Celery>=3.1,<4.0
-    celery-4.1: Celery>=4.1,<4.2
-    celery-4.2: Celery>=4.2,<4.3
-    celery-4.3: Celery>=4.3,<4.4
-    # https://github.com/celery/vine/pull/29#issuecomment-689498382
-    celery-4.3: vine<5.0.0
-    # https://github.com/celery/celery/issues/6153
-    celery-4.4: Celery>=4.4,<4.5,!=4.4.4
-    celery-5.0: Celery>=5.0,<5.1
-
-    py3.5-celery: newrelic<6.0.0
-    {pypy,py2.7,py3.6,py3.7,py3.8,py3.9,py3.10}-celery: newrelic
-
-    requests: requests>=2.0
-
-    aws_lambda: boto3
+    linters: -r linter-requirements.txt
+    linters: werkzeug<2.3.0
+
+    # === Common ===
+    py3.8-common: hypothesis
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest-asyncio<=0.21.1
+    # See https://github.com/pytest-dev/pytest/issues/9621
+    # and https://github.com/pytest-dev/pytest-forked/issues/67
+    # for justification of the upper bound on pytest
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0
+
+    # === Gevent ===
+    {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: gevent>=22.10.0, <22.11.0
+    # See https://github.com/pytest-dev/pytest/issues/9621
+    # and https://github.com/pytest-dev/pytest-forked/issues/67
+    # for justification of the upper bound on pytest
+    {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest<7.0.0
 
-    pyramid-1.6: pyramid>=1.6,<1.7
-    pyramid-1.7: pyramid>=1.7,<1.8
-    pyramid-1.8: pyramid>=1.8,<1.9
-    pyramid-1.9: pyramid>=1.9,<1.10
-    pyramid-1.10: pyramid>=1.10,<1.11
+    # === Integrations ===
 
-    # https://github.com/jamesls/fakeredis/issues/245
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: fakeredis<1.0
-    rq-{0.6,0.7,0.8,0.9,0.10,0.11,0.12}: redis<3.2.2
-    rq-{0.13,1.0,1.1,1.2,1.3,1.4,1.5}: fakeredis>=1.0,<1.7.4
-
-    rq-0.6: rq>=0.6,<0.7
-    rq-0.7: rq>=0.7,<0.8
-    rq-0.8: rq>=0.8,<0.9
-    rq-0.9: rq>=0.9,<0.10
-    rq-0.10: rq>=0.10,<0.11
-    rq-0.11: rq>=0.11,<0.12
-    rq-0.12: rq>=0.12,<0.13
-    rq-0.13: rq>=0.13,<0.14
-    rq-1.0: rq>=1.0,<1.1
-    rq-1.1: rq>=1.1,<1.2
-    rq-1.2: rq>=1.2,<1.3
-    rq-1.3: rq>=1.3,<1.4
-    rq-1.4: rq>=1.4,<1.5
-    rq-1.5: rq>=1.5,<1.6
-
-    aiohttp-3.4: aiohttp>=3.4.0,<3.5.0
-    aiohttp-3.5: aiohttp>=3.5.0,<3.6.0
+    # AIOHTTP
+    aiohttp-v3.4: aiohttp~=3.4.0
+    aiohttp-v3.8: aiohttp~=3.8.0
     aiohttp: pytest-aiohttp
+    aiohttp-v3.8: pytest-asyncio<=0.21.1
+    aiohttp-v3.9: pytest-asyncio<=0.21.1
+
+    # Ariadne
+    ariadne-v0.20: ariadne~=0.20.0
+    ariadne-v0.23: ariadne~=0.23.0
+    ariadne: fastapi
+    ariadne: flask
+    ariadne: httpx
+
+    # Arq
+    arq-v0.23: arq~=0.23.0
+    arq-v0.23: pydantic<2
+    arq-v0.25: arq~=0.25.0
+    arq-v0.25: pydantic<2
+    arq: fakeredis>=2.2.0,<2.8
+    arq: pytest-asyncio<=0.21.1
+    arq: async-timeout
+
+    # Asgi
+    asgi: pytest-asyncio<=0.21.1
+    asgi: async-asgi-testclient
+
+    # Asyncpg
+    asyncpg-v0.23: asyncpg~=0.23.0
+    asyncpg-v0.29: asyncpg~=0.29.0
+    asyncpg: pytest-asyncio<=0.21.1
+
+    # AWS Lambda
+    aws_lambda: boto3
 
-    tornado-5: tornado>=5,<6
-    tornado-6: tornado>=6.0a1
-
-    trytond-5.4: trytond>=5.4,<5.5
-    trytond-5.2: trytond>=5.2,<5.3
-    trytond-5.0: trytond>=5.0,<5.1
-    trytond-4.6: trytond>=4.6,<4.7
-
-    trytond-{4.6,4.8,5.0,5.2,5.4}: werkzeug<2.0
-
-    redis: fakeredis<1.7.4
-
-    rediscluster-1: redis-py-cluster>=1.0.0,<2.0.0
-    rediscluster-2: redis-py-cluster>=2.0.0,<3.0.0
-
-    asgi: starlette
-    asgi: requests
-    asgi: fastapi
+    # Beam
+    beam-v2.12: apache-beam~=2.12.0
+    beam-v2.50: apache-beam~=2.50.0
 
-    sqlalchemy-1.2: sqlalchemy>=1.2,<1.3
-    sqlalchemy-1.3: sqlalchemy>=1.3,<1.4
+    # Boto3
+    boto3-v1.12: boto3~=1.12.0
+    boto3-v1.21: boto3~=1.21.0
+    boto3-v1.29: boto3~=1.29.0
 
-    linters: -r linter-requirements.txt
+    # Bottle
+    bottle: Werkzeug<2.1.0
+    bottle-v0.12: bottle~=0.12.0
 
-    py3.8: hypothesis
+    # Celery
+    celery: redis
+    celery-v3: Celery~=3.0
+    celery-v4: Celery~=4.0
+    celery-v5.0: Celery~=5.0.0
+    celery-v5.1: Celery~=5.1.0
+    celery-v5.2: Celery~=5.2.0
+    celery-v5.3: Celery~=5.3.0
+
+    {py3.7}-celery: importlib-metadata<5.0
+    {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
+
+    # Chalice
+    chalice-v1.16: chalice~=1.16.0
+    chalice: pytest-chalice==0.0.5
 
+    {py3.7}-chalice: botocore~=1.31
+    {py3.8}-chalice: botocore~=1.31
+
+    # Clickhouse Driver
+    clickhouse_driver-v0.2.0: clickhouse_driver~=0.2.0
+
+    # Django
+    django: psycopg2-binary
+    django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
+    django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0}: channels[daphne]
+    django-v{1.8,1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0
+    django-v{1.8,1.11,2.0}: pytest-django<4.0
+    django-v{2.2,3.0,3.2,4.0,4.1,4.2,5.0}: pytest-django
+    django-v{4.0,4.1,4.2,5.0}: djangorestframework
+    django-v{4.0,4.1,4.2,5.0}: pytest-asyncio<=0.21.1
+    django-v{4.0,4.1,4.2,5.0}: Werkzeug
+
+    django-v1.8: Django~=1.8.0
+    django-v1.11: Django~=1.11.0
+    django-v2.0: Django~=2.0.0
+    django-v2.2: Django~=2.2.0
+    django-v3.0: Django~=3.0.0
+    django-v3.2: Django~=3.2.0
+    django-v4.0: Django~=4.0.0
+    django-v4.1: Django~=4.1.0
+    django-v4.2: Django~=4.2.0
+    django-v5.0: Django~=5.0.0
+
+    # Falcon
+    falcon-v1.4: falcon~=1.4.0
+    falcon-v1: falcon~=1.0
+    falcon-v2: falcon~=2.0
+    falcon-v3: falcon~=3.0
+
+    # FastAPI
+    fastapi: httpx
+    # (this is a dependency of httpx)
+    fastapi: anyio<4.0.0
+    fastapi: pytest-asyncio<=0.21.1
+    fastapi: python-multipart
+    fastapi: requests
+    fastapi-v{0.79}: fastapi~=0.79.0
+    fastapi-v{0.110}: fastapi~=0.110.0
+
+    # Flask
+    flask: flask-login
+    flask-v{0.11,0,1,2.0}: Werkzeug<2.1.0
+    flask-v{0.11,0,1,2.0}: markupsafe<2.1.0
+    flask-v{3}: Werkzeug
+    flask-v0.11: Flask~=0.11.0
+    flask-v0: Flask~=0.11
+    flask-v1: Flask~=1.0
+    flask-v2: Flask~=2.0
+    flask-v3: Flask~=3.0
+
+    # GQL
+    gql-v{3.4}: gql[all]~=3.4.0
+    gql-v{3.5}: gql[all]~=3.5.0
+
+    # Graphene
+    graphene: blinker
+    graphene: fastapi
+    graphene: flask
+    graphene: httpx
+    graphene-v{3.3}: graphene~=3.3.0
+
+    # gRPC
+    grpc: protobuf
+    grpc: mypy-protobuf
+    grpc: types-protobuf
+    grpc: pytest-asyncio<=0.21.1
+    grpc-v1.21: grpcio-tools~=1.21.0
+    grpc-v1.30: grpcio-tools~=1.30.0
+    grpc-v1.40: grpcio-tools~=1.40.0
+    grpc-v1.50: grpcio-tools~=1.50.0
+
+    # HTTPX
+    httpx-v0.16: pytest-httpx==0.10.0
+    httpx-v0.18: pytest-httpx==0.12.0
+    httpx-v0.20: pytest-httpx==0.14.0
+    httpx-v0.22: pytest-httpx==0.19.0
+    httpx-v0.23: pytest-httpx~=0.21.0
+    httpx-v0.24: pytest-httpx==0.22.0
+    httpx-v0.25: pytest-httpx==0.25.0
+    httpx: pytest-httpx
+    # anyio is a dep of httpx
+    httpx: anyio<4.0.0
+    httpx-v0.16: httpx~=0.16.0
+    httpx-v0.18: httpx~=0.18.0
+    httpx-v0.20: httpx~=0.20.0
+    httpx-v0.22: httpx~=0.22.0
+    httpx-v0.23: httpx~=0.23.0
+    httpx-v0.24: httpx~=0.24.0
+    httpx-v0.25: httpx~=0.25.0
+
+    # Huey
+    huey-v2.0: huey~=2.0.0
+    huey-v2.5: huey~=2.5.0
+
+    # Loguru
+    loguru-v0.5: loguru~=0.5.0
+    loguru-v0.7: loguru~=0.7.0
+
+    # OpenAI
+    openai-v1: openai==1.16.2
+    openai-v1: tiktoken~=0.6.0
+    openai-notiktoken: openai
+
+    # OpenTelemetry (OTel)
+    opentelemetry: opentelemetry-distro
+
+    # pure_eval
     pure_eval: pure_eval
-    chalice-1.16: chalice>=1.16.0,<1.17.0
-    chalice-1.17: chalice>=1.17.0,<1.18.0
-    chalice-1.18: chalice>=1.18.0,<1.19.0
-    chalice-1.19: chalice>=1.19.0,<1.20.0
-    chalice-1.20: chalice>=1.20.0,<1.21.0
-    chalice: pytest-chalice==0.0.5
 
-    boto3-1.9: boto3>=1.9,<1.10
-    boto3-1.10: boto3>=1.10,<1.11
-    boto3-1.11: boto3>=1.11,<1.12
-    boto3-1.12: boto3>=1.12,<1.13
-    boto3-1.13: boto3>=1.13,<1.14
-    boto3-1.14: boto3>=1.14,<1.15
-    boto3-1.15: boto3>=1.15,<1.16
-    boto3-1.16: boto3>=1.16,<1.17
+    # PyMongo (MongoDB)
+    pymongo: mockupdb
+    pymongo-v3.1: pymongo~=3.1.0
+    pymongo-v3.13: pymongo~=3.13.0
+    pymongo-v4.0: pymongo~=4.0.0
+    pymongo-v4.3: pymongo~=4.3.0
+    pymongo-v4.6: pymongo~=4.6.0
+
+    # Pyramid
+    pyramid: Werkzeug<2.1.0
+    pyramid-v1.6: pyramid~=1.6.0
+    pyramid-v1.10: pyramid~=1.10.0
+    pyramid-v2.0: pyramid~=2.0.0
+
+    # Quart
+    quart: quart-auth
+    quart: pytest-asyncio<=0.21.1
+    quart-v0.16: blinker<1.6
+    quart-v0.16: jinja2<3.1.0
+    quart-v0.16: Werkzeug<2.1.0
+    quart-v0.16: hypercorn<0.15.0
+    quart-v0.16: quart~=0.16.0
+    quart-v0.19: Werkzeug>=3.0.0
+    quart-v0.19: quart~=0.19.0
+
+    # Redis
+    redis: fakeredis!=1.7.4
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-redis: pytest-asyncio<=0.21.1
+    redis-v3: redis~=3.0
+    redis-v4: redis~=4.0
+    redis-v5: redis~=5.0
+
+    # Redis Cluster
+    rediscluster-v1: redis-py-cluster~=1.0
+    rediscluster-v2: redis-py-cluster~=2.0
+
+    # Requests
+    requests: requests>=2.0
 
-    httpx-0.16: httpx>=0.16,<0.17
-    httpx-0.17: httpx>=0.17,<0.18
+    # RQ (Redis Queue)
+    # https://github.com/jamesls/fakeredis/issues/245
+    rq-v{0.6}: fakeredis<1.0
+    rq-v{0.6}: redis<3.2.2
+    rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4
+    rq-v{1.15}: fakeredis
+    rq-v0.6: rq~=0.6.0
+    rq-v0.13: rq~=0.13.0
+    rq-v1.0: rq~=1.0.0
+    rq-v1.5: rq~=1.5.0
+    rq-v1.10: rq~=1.10.0
+    rq-v1.15: rq~=1.15.0
+
+    # Sanic
+    sanic: websockets<11.0
+    sanic: aiohttp
+    sanic-v{22,23}: sanic_testing
+    {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
+    {py3.5}-sanic: ujson<4
+    sanic-v0.8: sanic~=0.8.0
+    sanic-v20: sanic~=20.0
+    sanic-v22: sanic~=22.0
+    sanic-v23: sanic~=23.0
+
+    # Starlette
+    starlette: pytest-asyncio<=0.21.1
+    starlette: python-multipart
+    starlette: requests
+    starlette: httpx
+    # (this is a dependency of httpx)
+    starlette: anyio<4.0.0
+    starlette: jinja2
+    starlette-v0.19: starlette~=0.19.0
+    starlette-v0.20: starlette~=0.20.0
+    starlette-v0.24: starlette~=0.24.0
+    starlette-v0.28: starlette~=0.28.0
+    starlette-v0.32: starlette~=0.32.0
+
+    # Starlite
+    starlite: pytest-asyncio<=0.21.1
+    starlite: python-multipart
+    starlite: requests
+    starlite: cryptography
+    starlite: pydantic<2.0.0
+    starlite-v{1.48}: starlite~=1.48.0
+    starlite-v{1.51}: starlite~=1.51.0
+
+    # SQLAlchemy
+    sqlalchemy-v1.2: sqlalchemy~=1.2.0
+    sqlalchemy-v1.4: sqlalchemy~=1.4.0
+    sqlalchemy-v2.0: sqlalchemy~=2.0.0
+
+    # Strawberry
+    strawberry: fastapi
+    strawberry: flask
+    strawberry: httpx
+    strawberry-v0.209: strawberry-graphql[fastapi,flask]~=0.209.0
+    strawberry-v0.224: strawberry-graphql[fastapi,flask]~=0.224.0
+
+    # Tornado
+    tornado-v5: tornado~=5.0
+    tornado-v6: tornado~=6.0
+
+    # Trytond
+    trytond-v4: trytond~=4.0
+    trytond-v5: trytond~=5.0
+    trytond-v6: trytond~=6.0
+    trytond-v7: trytond~=7.0
+
+    trytond-v{4}: werkzeug<1.0
+    trytond-v{5,6,7}: werkzeug<3.0
 
 setenv =
     PYTHONDONTWRITEBYTECODE=1
-    TESTPATH=tests
+    OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
+    common: TESTPATH=tests
+    gevent: TESTPATH=tests
+    aiohttp: TESTPATH=tests/integrations/aiohttp
+    ariadne: TESTPATH=tests/integrations/ariadne
+    arq: TESTPATH=tests/integrations/arq
+    asgi: TESTPATH=tests/integrations/asgi
+    asyncpg: TESTPATH=tests/integrations/asyncpg
+    aws_lambda: TESTPATH=tests/integrations/aws_lambda
     beam: TESTPATH=tests/integrations/beam
-    django: TESTPATH=tests/integrations/django
-    flask: TESTPATH=tests/integrations/flask
-    quart: TESTPATH=tests/integrations/quart
+    boto3: TESTPATH=tests/integrations/boto3
     bottle: TESTPATH=tests/integrations/bottle
-    falcon: TESTPATH=tests/integrations/falcon
     celery: TESTPATH=tests/integrations/celery
-    requests: TESTPATH=tests/integrations/requests
-    aws_lambda: TESTPATH=tests/integrations/aws_lambda
+    py{3.8,3.10}-celery: VIRTUALENV_PIP=23.3.2
+    chalice: TESTPATH=tests/integrations/chalice
+    clickhouse_driver: TESTPATH=tests/integrations/clickhouse_driver
+    cloud_resource_context: TESTPATH=tests/integrations/cloud_resource_context
+    django: TESTPATH=tests/integrations/django
+    falcon: TESTPATH=tests/integrations/falcon
+    fastapi:  TESTPATH=tests/integrations/fastapi
+    flask: TESTPATH=tests/integrations/flask
     gcp: TESTPATH=tests/integrations/gcp
-    sanic: TESTPATH=tests/integrations/sanic
+    gql: TESTPATH=tests/integrations/gql
+    graphene: TESTPATH=tests/integrations/graphene
+    httpx: TESTPATH=tests/integrations/httpx
+    huey: TESTPATH=tests/integrations/huey
+    loguru: TESTPATH=tests/integrations/loguru
+    openai: TESTPATH=tests/integrations/openai
+    opentelemetry: TESTPATH=tests/integrations/opentelemetry
+    pure_eval: TESTPATH=tests/integrations/pure_eval
+    pymongo: TESTPATH=tests/integrations/pymongo
     pyramid: TESTPATH=tests/integrations/pyramid
-    rq: TESTPATH=tests/integrations/rq
-    aiohttp: TESTPATH=tests/integrations/aiohttp
-    tornado: TESTPATH=tests/integrations/tornado
-    trytond: TESTPATH=tests/integrations/trytond
+    quart: TESTPATH=tests/integrations/quart
     redis: TESTPATH=tests/integrations/redis
     rediscluster: TESTPATH=tests/integrations/rediscluster
-    asgi: TESTPATH=tests/integrations/asgi
+    requests: TESTPATH=tests/integrations/requests
+    rq: TESTPATH=tests/integrations/rq
+    sanic: TESTPATH=tests/integrations/sanic
+    starlette: TESTPATH=tests/integrations/starlette
+    starlite: TESTPATH=tests/integrations/starlite
     sqlalchemy: TESTPATH=tests/integrations/sqlalchemy
-    pure_eval: TESTPATH=tests/integrations/pure_eval
-    chalice: TESTPATH=tests/integrations/chalice
-    boto3: TESTPATH=tests/integrations/boto3
-    httpx: TESTPATH=tests/integrations/httpx
+    strawberry: TESTPATH=tests/integrations/strawberry
+    tornado: TESTPATH=tests/integrations/tornado
+    trytond: TESTPATH=tests/integrations/trytond
+    socket: TESTPATH=tests/integrations/socket
+    grpc: TESTPATH=tests/integrations/grpc
 
     COVERAGE_FILE=.coverage-{envname}
 passenv =
     SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
     SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
-    SENTRY_PYTHON_TEST_AWS_IAM_ROLE
     SENTRY_PYTHON_TEST_POSTGRES_USER
     SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
     SENTRY_PYTHON_TEST_POSTGRES_NAME
+    SENTRY_PYTHON_TEST_POSTGRES_HOST
 usedevelop = True
 extras =
-    flask: flask
     bottle: bottle
     falcon: falcon
-    quart: quart
+    flask: flask
+    pymongo: pymongo
 
 basepython =
     py2.7: python2.7
-    py3.4: python3.4
     py3.5: python3.5
     py3.6: python3.6
     py3.7: python3.7
     py3.8: python3.8
     py3.9: python3.9
     py3.10: python3.10
+    py3.11: python3.11
+    py3.12: python3.12
 
     # Python version is pinned here because flake8 actually behaves differently
     # depending on which version is used. You can patch this out to point to
     # some random Python 3 binary, but then you get guaranteed mismatches with
     # CI. Other tools such as mypy and black have options that pin the Python
     # version.
-    linters: python3.9
-    pypy: pypy
+    linters: python3.12
 
 commands =
-    ; https://github.com/pytest-dev/pytest/issues/5532
-    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-{0.11,0.12}: pip install pytest<5
-    {py3.6,py3.7,py3.8,py3.9}-flask-{0.11}: pip install Werkzeug<2
+    {py3.7,py3.8}-boto3: pip install urllib3<2.0.0
 
+    ; https://github.com/pytest-dev/pytest/issues/5532
+    {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
+    {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
     ; https://github.com/pallets/flask/issues/4455
-    {py3.7,py3.8,py3.9,py3.10}-flask-{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
-
+    {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
     ; https://github.com/more-itertools/more-itertools/issues/578
-    py3.5-flask-{0.11,0.12}: pip install more-itertools<8.11.0
+    py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
 
     ; use old pytest for old Python versions:
-    {py2.7,py3.4,py3.5}: pip install pytest-forked==1.1.3
+    {py2.7,py3.5}: pip install pytest-forked==1.1.3
 
-    py.test {env:TESTPATH} {posargs}
+    ; Running `py.test` as an executable suffers from an import error
+    ; when loading tests in scenarios. In particular, django fails to
+    ; load the settings from the test module.
+    {py2.7}: python -m pytest --ignore-glob='*py3.py' -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
+    {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}: python -m pytest -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
 
 [testenv:linters]
 commands =
-    flake8 tests examples sentry_sdk
-    black --check tests examples sentry_sdk
+    flake8 tests sentry_sdk
+    black --check tests sentry_sdk
     mypy sentry_sdk