diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index d375588780..2b91d51cc0 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -9,27 +9,9 @@ updates:
- dependency-type: direct
- dependency-type: indirect
ignore:
- - dependency-name: pytest
- versions:
- - "> 3.7.3"
- - dependency-name: flake8 # Later versions dropped Python 2 support
- versions:
- - "> 5.0.4"
- - dependency-name: jsonschema # Later versions dropped Python 2 support
- versions:
- - "> 3.2.0"
- - dependency-name: pytest-cov
- versions:
- - "> 2.8.1"
- - dependency-name: pytest-forked
- versions:
- - "> 1.1.3"
- dependency-name: sphinx
versions:
- ">= 2.4.a, < 2.5"
- - dependency-name: tox
- versions:
- - "> 3.7.0"
- dependency-name: werkzeug
versions:
- "> 0.15.5, < 1"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index c56f87ca03..13d8b885f1 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -24,7 +24,7 @@ jobs:
timeout-minutes: 10
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: 3.12
@@ -39,7 +39,7 @@ jobs:
timeout-minutes: 10
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: 3.12
@@ -54,7 +54,7 @@ jobs:
timeout-minutes: 10
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: 3.12
@@ -83,7 +83,7 @@ jobs:
timeout-minutes: 10
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: 3.12
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 1c8422c7ee..c3a36dc124 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -46,7 +46,7 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v4.1.1
+ uses: actions/checkout@v4.1.4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index f55ec12407..47bc4de03d 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
name: "Release a new version"
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
with:
token: ${{ secrets.GH_RELEASE_PAT }}
fetch-depth: 0
diff --git a/.github/workflows/test-integrations-aws-lambda.yml b/.github/workflows/test-integrations-aws-lambda.yml
index 5f5664d8ad..773f41247b 100644
--- a/.github/workflows/test-integrations-aws-lambda.yml
+++ b/.github/workflows/test-integrations-aws-lambda.yml
@@ -30,7 +30,7 @@ jobs:
name: permissions check
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
with:
persist-credentials: false
- name: Check permissions on PR
@@ -53,7 +53,6 @@ jobs:
test-aws_lambda-pinned:
name: AWS Lambda (pinned)
timeout-minutes: 30
- needs: check-permissions
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
@@ -64,8 +63,9 @@ jobs:
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
+ needs: check-permissions
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
- uses: actions/setup-python@v5
@@ -73,7 +73,7 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
diff --git a/.github/workflows/test-integrations-cloud-computing.yml b/.github/workflows/test-integrations-cloud-computing.yml
index 25e6d9ca24..049b37d211 100644
--- a/.github/workflows/test-integrations-cloud-computing.yml
+++ b/.github/workflows/test-integrations-cloud-computing.yml
@@ -32,13 +32,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -80,13 +80,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -114,46 +114,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-cloud_computing-py27:
- name: Cloud Computing (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test boto3 py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-boto3" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test chalice py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-chalice" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test cloud_resource_context py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-cloud_resource_context" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test gcp py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-gcp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Cloud Computing tests passed
- needs: [test-cloud_computing-pinned, test-cloud_computing-py27]
+ needs: test-cloud_computing-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -162,7 +125,3 @@ jobs:
if: contains(needs.test-cloud_computing-pinned.result, 'failure') || contains(needs.test-cloud_computing-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-cloud_computing-py27.result, 'failure') || contains(needs.test-cloud_computing-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-common.yml b/.github/workflows/test-integrations-common.yml
index 8d147fbe41..c046190e1e 100644
--- a/.github/workflows/test-integrations-common.yml
+++ b/.github/workflows/test-integrations-common.yml
@@ -25,20 +25,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -54,34 +54,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-common-py27:
- name: Common (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test common py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-common" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Common tests passed
- needs: [test-common-pinned, test-common-py27]
+ needs: test-common-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -90,7 +65,3 @@ jobs:
if: contains(needs.test-common-pinned.result, 'failure') || contains(needs.test-common-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-common-py27.result, 'failure') || contains(needs.test-common-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-data-processing.yml b/.github/workflows/test-integrations-data-processing.yml
index c40d45845d..ebcd89efea 100644
--- a/.github/workflows/test-integrations-data-processing.yml
+++ b/.github/workflows/test-integrations-data-processing.yml
@@ -25,20 +25,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.7","3.8","3.9","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -81,20 +81,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -130,54 +130,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-data_processing-py27:
- name: Data Processing (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test arq py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-arq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test beam py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-beam" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test celery py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-celery" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test huey py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-huey" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test openai py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-openai" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test rq py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-rq" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Data Processing tests passed
- needs: [test-data_processing-pinned, test-data_processing-py27]
+ needs: test-data_processing-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -186,7 +141,3 @@ jobs:
if: contains(needs.test-data_processing-pinned.result, 'failure') || contains(needs.test-data_processing-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-data_processing-py27.result, 'failure') || contains(needs.test-data_processing-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-databases.yml b/.github/workflows/test-integrations-databases.yml
index 1074939095..50d02b72f7 100644
--- a/.github/workflows/test-integrations-databases.yml
+++ b/.github/workflows/test-integrations-databases.yml
@@ -46,21 +46,18 @@ jobs:
ports:
- 5432:5432
env:
+ SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- uses: getsentry/action-clickhouse-in-ci@v1
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -124,21 +121,18 @@ jobs:
ports:
- 5432:5432
env:
+ SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- uses: getsentry/action-clickhouse-in-ci@v1
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -174,76 +168,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-databases-py27:
- name: Databases (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- services:
- postgres:
- image: postgres
- env:
- POSTGRES_PASSWORD: sentry
- # Set health checks to wait until postgres has started
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
- # Maps tcp port 5432 on service container to the host
- ports:
- - 5432:5432
- env:
- SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
- SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres
- steps:
- - uses: actions/checkout@v4.1.1
- - uses: getsentry/action-clickhouse-in-ci@v1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test asyncpg py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-asyncpg" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test clickhouse_driver py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-clickhouse_driver" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test pymongo py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-pymongo" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test redis py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-redis" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test rediscluster py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-rediscluster" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test sqlalchemy py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-sqlalchemy" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Databases tests passed
- needs: [test-databases-pinned, test-databases-py27]
+ needs: test-databases-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -252,7 +179,3 @@ jobs:
if: contains(needs.test-databases-pinned.result, 'failure') || contains(needs.test-databases-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-databases-py27.result, 'failure') || contains(needs.test-databases-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-graphql.yml b/.github/workflows/test-integrations-graphql.yml
index 5595437fa7..2a00071382 100644
--- a/.github/workflows/test-integrations-graphql.yml
+++ b/.github/workflows/test-integrations-graphql.yml
@@ -32,13 +32,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -80,13 +80,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
diff --git a/.github/workflows/test-integrations-miscellaneous.yml b/.github/workflows/test-integrations-miscellaneous.yml
index 65b5a41f96..b8c8e0a3a0 100644
--- a/.github/workflows/test-integrations-miscellaneous.yml
+++ b/.github/workflows/test-integrations-miscellaneous.yml
@@ -25,20 +25,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.8","3.11","3.12"]
+ python-version: ["3.6","3.8","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -73,20 +73,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
diff --git a/.github/workflows/test-integrations-networking.yml b/.github/workflows/test-integrations-networking.yml
index c55537d049..18dfd72c34 100644
--- a/.github/workflows/test-integrations-networking.yml
+++ b/.github/workflows/test-integrations-networking.yml
@@ -32,13 +32,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -80,13 +80,13 @@ jobs:
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -114,46 +114,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-networking-py27:
- name: Networking (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test gevent py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-gevent" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test grpc py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-grpc" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test httpx py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-httpx" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test requests py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-requests" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Networking tests passed
- needs: [test-networking-pinned, test-networking-py27]
+ needs: test-networking-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -162,7 +125,3 @@ jobs:
if: contains(needs.test-networking-pinned.result, 'failure') || contains(needs.test-networking-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-networking-py27.result, 'failure') || contains(needs.test-networking-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-web-frameworks-1.yml b/.github/workflows/test-integrations-web-frameworks-1.yml
index f0f0fdef0c..861c36b485 100644
--- a/.github/workflows/test-integrations-web-frameworks-1.yml
+++ b/.github/workflows/test-integrations-web-frameworks-1.yml
@@ -46,20 +46,17 @@ jobs:
ports:
- 5432:5432
env:
+ SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -67,10 +64,6 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-django-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test fastapi latest
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test flask latest
run: |
set -x # print commands that are executed
@@ -79,6 +72,10 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh "py${{ matrix.python-version }}-starlette-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+ - name: Test fastapi latest
+ run: |
+ set -x # print commands that are executed
+ ./scripts/runtox.sh "py${{ matrix.python-version }}-fastapi-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Generate coverage XML
run: |
coverage combine .coverage*
@@ -94,7 +91,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.10","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
@@ -115,20 +112,17 @@ jobs:
ports:
- 5432:5432
env:
+ SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -136,10 +130,6 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test fastapi pinned
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Test flask pinned
run: |
set -x # print commands that are executed
@@ -148,64 +138,10 @@ jobs:
run: |
set -x # print commands that are executed
./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
- test-web_frameworks_1-py27:
- name: Web Frameworks 1 (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- services:
- postgres:
- image: postgres
- env:
- POSTGRES_PASSWORD: sentry
- # Set health checks to wait until postgres has started
- options: >-
- --health-cmd pg_isready
- --health-interval 10s
- --health-timeout 5s
- --health-retries 5
- # Maps tcp port 5432 on service container to the host
- ports:
- - 5432:5432
- env:
- SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
- SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: postgres
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test django py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-django" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test fastapi py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test flask py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-flask" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test starlette py27
+ - name: Test fastapi pinned
run: |
set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-starlette" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
+ ./scripts/runtox.sh --exclude-latest "py${{ matrix.python-version }}-fastapi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- name: Generate coverage XML
run: |
coverage combine .coverage*
@@ -216,7 +152,7 @@ jobs:
files: coverage.xml
check_required_tests:
name: All Web Frameworks 1 tests passed
- needs: [test-web_frameworks_1-pinned, test-web_frameworks_1-py27]
+ needs: test-web_frameworks_1-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -225,7 +161,3 @@ jobs:
if: contains(needs.test-web_frameworks_1-pinned.result, 'failure') || contains(needs.test-web_frameworks_1-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-web_frameworks_1-py27.result, 'failure') || contains(needs.test-web_frameworks_1-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/.github/workflows/test-integrations-web-frameworks-2.yml b/.github/workflows/test-integrations-web-frameworks-2.yml
index aebac6d512..0d86487900 100644
--- a/.github/workflows/test-integrations-web-frameworks-2.yml
+++ b/.github/workflows/test-integrations-web-frameworks-2.yml
@@ -25,20 +25,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -93,20 +93,20 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.5","3.6","3.7","3.8","3.9","3.11","3.12"]
+ python-version: ["3.6","3.7","3.8","3.9","3.11","3.12"]
# python3.6 reached EOL and is no longer being supported on
# new versions of hosted runners on Github Actions
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
+ pip install coverage tox
- name: Erase coverage
run: |
coverage erase
@@ -154,66 +154,9 @@ jobs:
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.xml
- test-web_frameworks_2-py27:
- name: Web Frameworks 2 (py27)
- timeout-minutes: 30
- runs-on: ubuntu-20.04
- container: python:2.7
- steps:
- - uses: actions/checkout@v4.1.1
- - name: Setup Test Env
- run: |
- pip install coverage "tox>=3,<4"
- - name: Erase coverage
- run: |
- coverage erase
- - name: Test aiohttp py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-aiohttp" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test asgi py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-asgi" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test bottle py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-bottle" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test falcon py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-falcon" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test pyramid py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-pyramid" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test quart py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-quart" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test sanic py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-sanic" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test starlite py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-starlite" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Test tornado py27
- run: |
- set -x # print commands that are executed
- ./scripts/runtox.sh --exclude-latest "py2.7-tornado" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- - name: Generate coverage XML
- run: |
- coverage combine .coverage*
- coverage xml -i
- - uses: codecov/codecov-action@v4
- with:
- token: ${{ secrets.CODECOV_TOKEN }}
- files: coverage.xml
check_required_tests:
name: All Web Frameworks 2 tests passed
- needs: [test-web_frameworks_2-pinned, test-web_frameworks_2-py27]
+ needs: test-web_frameworks_2-pinned
# Always run this, even if a dependent job failed
if: always()
runs-on: ubuntu-20.04
@@ -222,7 +165,3 @@ jobs:
if: contains(needs.test-web_frameworks_2-pinned.result, 'failure') || contains(needs.test-web_frameworks_2-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- - name: Check for 2.7 failures
- if: contains(needs.test-web_frameworks_2-py27.result, 'failure') || contains(needs.test-web_frameworks_2-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
diff --git a/CHANGELOG.md b/CHANGELOG.md
index aaf317cc81..16a3072db5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,195 @@
# Changelog
+## 2.0.1
+
+### Various fixes & improvements
+
+- Fix: Do not use convenience decorator (#3022) by @sentrivana
+- Refactoring propagation context (#2970) by @antonpirker
+- Use `pid` for test database name in Django tests (#2998) by @antonpirker
+- Remove outdated RC mention in docs (#3018) by @sentrivana
+- Delete inaccurate comment from docs (#3002) by @szokeasaurusrex
+- Add Lambda function that deletes test Lambda functions (#2960) by @antonpirker
+- Correct discarded transaction debug message (#3002) by @szokeasaurusrex
+- Add tests for discarded transaction debug messages (#3002) by @szokeasaurusrex
+- Fix comment typo in metrics (#2992) by @szokeasaurusrex
+- build(deps): bump actions/checkout from 4.1.1 to 4.1.4 (#3011) by @dependabot
+- build(deps): bump checkouts/data-schemas from `1e17eb5` to `4aa14a7` (#2997) by @dependabot
+
+## 2.0.0
+
+This is the first major update in a *long* time!
+
+We dropped support for some ancient languages and frameworks (Yes, Python 2.7 is no longer supported). Additionally we refactored a big part of the foundation of the SDK (how data inside the SDK is handled).
+
+We hope you like it!
+
+For a shorter version of what you need to do, to upgrade to Sentry SDK 2.0 see: https://docs.sentry.io/platforms/python/migration/1.x-to-2.x
+
+### New Features
+
+- Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry.
+- Added new API for custom instrumentation: `new_scope`, `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs.
+
+### Changed
+(These changes are all backwards-incompatible. **Breaking Change** (if you are just skimming for that phrase))
+
+- The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class.
+- The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`.
+- The `BackgroundWorker` thread used to process events was renamed from `raven-sentry.BackgroundWorker` to `sentry-sdk.BackgroundWorker`.
+- The `reraise` function was moved from `sentry_sdk._compat` to `sentry_sdk.utils`.
+- The `_ScopeManager` was moved from `sentry_sdk.hub` to `sentry_sdk.scope`.
+- Moved the contents of `tracing_utils_py3.py` to `tracing_utils.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`.
+- The actual implementation of `get_current_span` was moved to `sentry_sdk.tracing_utils`. `sentry_sdk.get_current_span` is still accessible as part of the top-level API.
+- `sentry_sdk.tracing_utils.add_query_source()`: Removed the `hub` parameter. It is not necessary anymore.
+- `sentry_sdk.tracing_utils.record_sql_queries()`: Removed the `hub` parameter. It is not necessary anymore.
+- `sentry_sdk.tracing_utils.get_current_span()` does now take a `scope` instead of a `hub` as parameter.
+- `sentry_sdk.tracing_utils.should_propagate_trace()` now takes a `Client` instead of a `Hub` as first parameter.
+- `sentry_sdk.utils.is_sentry_url()` now takes a `Client` instead of a `Hub` as first parameter.
+- `sentry_sdk.utils._get_contextvars` does not return a tuple with three values, but a tuple with two values. The `copy_context` was removed.
+- If you create a transaction manually and later mutate the transaction in a `configure_scope` block this does not work anymore. Here is a recipe on how to change your code to make it work:
+ Your existing implementation:
+ ```python
+ transaction = sentry_sdk.transaction(...)
+
+ # later in the code execution:
+
+ with sentry_sdk.configure_scope() as scope:
+ scope.set_transaction_name("new-transaction-name")
+ ```
+
+ needs to be changed to this:
+ ```python
+ transaction = sentry_sdk.transaction(...)
+
+ # later in the code execution:
+
+ scope = sentry_sdk.Scope.get_current_scope()
+ scope.set_transaction_name("new-transaction-name")
+ ```
+- The classes listed in the table below are now abstract base classes. Therefore, they can no longer be instantiated. Subclasses can only be instantiated if they implement all of the abstract methods.
+
+ Show table
+
+ | Class | Abstract methods |
+ | ------------------------------------- | -------------------------------------- |
+ | `sentry_sdk.integrations.Integration` | `setup_once` |
+ | `sentry_sdk.metrics.Metric` | `add`, `serialize_value`, and `weight` |
+ | `sentry_sdk.profiler.Scheduler` | `setup` and `teardown` |
+ | `sentry_sdk.transport.Transport` | `capture_envelope` |
+
+
+
+### Removed
+(These changes are all backwards-incompatible. **Breaking Change** (if you are just skimming for that phrase))
+
+- Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6.
+- Removed support for Celery 3.\*.
+- Removed support for Django 1.8, 1.9, 1.10.
+- Removed support for Flask 0.\*.
+- Removed support for gRPC < 1.39.
+- Removed support for Tornado < 6.
+- Removed `last_event_id()` top level API. The last event ID is still returned by `capture_event()`, `capture_exception()` and `capture_message()` but the top level API `sentry_sdk.last_event_id()` has been removed.
+- Removed support for sending events to the `/store` endpoint. Everything is now sent to the `/envelope` endpoint. If you're on SaaS you don't have to worry about this, but if you're running Sentry yourself you'll need version `20.6.0` or higher of self-hosted Sentry.
+- The deprecated `with_locals` configuration option was removed. Use `include_local_variables` instead. See https://docs.sentry.io/platforms/python/configuration/options/#include-local-variables.
+- The deprecated `request_bodies` configuration option was removed. Use `max_request_body_size`. See https://docs.sentry.io/platforms/python/configuration/options/#max-request-body-size.
+- Removed support for `user.segment`. It was also removed from the trace header as well as from the dynamic sampling context.
+- Removed support for the `install` method for custom integrations. Please use `setup_once` instead.
+- Removed `sentry_sdk.tracing.Span.new_span`. Use `sentry_sdk.tracing.Span.start_child` instead.
+- Removed `sentry_sdk.tracing.Transaction.new_span`. Use `sentry_sdk.tracing.Transaction.start_child` instead.
+- Removed support for creating transactions via `sentry_sdk.tracing.Span(transaction=...)`. To create a transaction, please use `sentry_sdk.tracing.Transaction(name=...)`.
+- Removed `sentry_sdk.utils.Auth.store_api_url`.
+- `sentry_sdk.utils.Auth.get_api_url`'s now accepts a `sentry_sdk.consts.EndpointType` enum instead of a string as its only parameter. We recommend omitting this argument when calling the function, since the parameter's default value is the only possible `sentry_sdk.consts.EndpointType` value. The parameter exists for future compatibility.
+- Removed `tracing_utils_py2.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`.
+- Removed the `sentry_sdk.profiler.Scheduler.stop_profiling` method. Any calls to this method can simply be removed, since this was a no-op method.
+
+### Deprecated
+
+- Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py).
+
+ Before:
+
+ ```python
+ with hub.start_span(...):
+ # do something
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.start_span(...):
+ # do something
+ ```
+
+- Hub cloning is deprecated.
+
+ Before:
+
+ ```python
+ with Hub(Hub.current) as hub:
+ # do something with the cloned hub
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.isolation_scope() as scope:
+ # do something with the forked scope
+ ```
+
+- `configure_scope` is deprecated. Use the new isolation scope directly via `Scope.get_isolation_scope()` instead.
+
+ Before:
+
+ ```python
+ with configure_scope() as scope:
+ # do something with `scope`
+ ```
+
+ After:
+
+ ```python
+ from sentry_sdk.scope import Scope
+
+ scope = Scope.get_isolation_scope()
+ # do something with `scope`
+ ```
+
+- `push_scope` is deprecated. Use the new `new_scope` context manager to fork the necessary scopes.
+
+ Before:
+
+ ```python
+ with push_scope() as scope:
+ # do something with `scope`
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.new_scope() as scope:
+ # do something with `scope`
+ ```
+
+- Accessing the client via the hub has been deprecated. Use the top-level `sentry_sdk.get_client()` to get the current client.
+- `profiler_mode` and `profiles_sample_rate` have been deprecated as `_experiments` options. Use them as top level options instead:
+ ```python
+ sentry_sdk.init(
+ ...,
+ profiler_mode="thread",
+ profiles_sample_rate=1.0,
+ )
+ ```
+- Deprecated `sentry_sdk.transport.Transport.capture_event`. Please use `sentry_sdk.transport.Transport.capture_envelope`, instead.
+- Passing a function to `sentry_sdk.init`'s `transport` keyword argument has been deprecated. If you wish to provide a custom transport, please pass a `sentry_sdk.transport.Transport` instance or a subclass.
+- The parameter `propagate_hub` in `ThreadingIntegration()` was deprecated and renamed to `propagate_scope`.
+
## 1.45.0
This is the final 1.x release for the forseeable future. Development will continue on the 2.x release line. The first 2.x version will be available in the next few weeks.
@@ -106,6 +296,7 @@ This is the final 1.x release for the forseeable future. Development will contin
- Fixed `Event | None` runtime `TypeError` (#2928) by @szokeasaurusrex
+
## 1.44.0
### Various fixes & improvements
@@ -145,6 +336,7 @@ This is the final 1.x release for the forseeable future. Development will contin
- Fixed OpenAI tests (#2834) by @antonpirker
- Bump `checkouts/data-schemas` from `ed078ed` to `8232f17` (#2832) by @dependabot
+
## 1.42.0
### Various fixes & improvements
diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md
new file mode 100644
index 0000000000..fd6e83e787
--- /dev/null
+++ b/MIGRATION_GUIDE.md
@@ -0,0 +1,190 @@
+# Sentry SDK 2.0 Migration Guide
+
+Looking to upgrade from Sentry SDK 1.x to 2.x? Here's a comprehensive list of what's changed. Looking for a more digestable summary? See the [guide in the docs](https://docs.sentry.io/platforms/python/migration/1.x-to-2.x) with the most common migration patterns.
+
+## New Features
+
+- Additional integrations will now be activated automatically if the SDK detects the respective package is installed: Ariadne, ARQ, asyncpg, Chalice, clickhouse-driver, GQL, Graphene, huey, Loguru, PyMongo, Quart, Starlite, Strawberry.
+- While refactoring the [inner workings](https://docs.sentry.io/platforms/python/enriching-events/scopes/) of the SDK we added new top-level APIs for custom instrumentation called `new_scope` and `isolation_scope`. See the [Deprecated](#deprecated) section to see how they map to the existing APIs.
+
+## Changed
+
+- The Pyramid integration will not capture errors that might happen in `authenticated_userid()` in a custom `AuthenticationPolicy` class.
+- The method `need_code_loation` of the `MetricsAggregator` was renamed to `need_code_location`.
+- The `BackgroundWorker` thread used to process events was renamed from `raven-sentry.BackgroundWorker` to `sentry-sdk.BackgroundWorker`.
+- The `reraise` function was moved from `sentry_sdk._compat` to `sentry_sdk.utils`.
+- The `_ScopeManager` was moved from `sentry_sdk.hub` to `sentry_sdk.scope`.
+- The signature for the metrics callback function set with `before_emit_metric` has changed from `before_emit_metric(key, tags)` to `before_emit_metric(key, value, unit, tags)`
+- Moved the contents of `tracing_utils_py3.py` to `tracing_utils.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`.
+- The actual implementation of `get_current_span` was moved to `sentry_sdk.tracing_utils`. `sentry_sdk.get_current_span` is still accessible as part of the top-level API.
+- `sentry_sdk.tracing_utils.add_query_source()`: Removed the `hub` parameter. It is not necessary anymore.
+- `sentry_sdk.tracing_utils.record_sql_queries()`: Removed the `hub` parameter. It is not necessary anymore.
+- `sentry_sdk.tracing_utils.get_current_span()` does now take a `scope` instead of a `hub` as parameter.
+- `sentry_sdk.tracing_utils.should_propagate_trace()` now takes a `Client` instead of a `Hub` as first parameter.
+- `sentry_sdk.utils.is_sentry_url()` now takes a `Client` instead of a `Hub` as first parameter.
+- `sentry_sdk.utils._get_contextvars` does not return a tuple with three values, but a tuple with two values. The `copy_context` was removed.
+- You no longer have to use `configure_scope` to mutate a transaction. Instead, you simply get the current scope to mutate the transaction. Here is a recipe on how to change your code to make it work:
+ Your existing implementation:
+ ```python
+ transaction = sentry_sdk.transaction(...)
+
+ # later in the code execution:
+
+ with sentry_sdk.configure_scope() as scope:
+ scope.set_transaction_name("new-transaction-name")
+ ```
+
+ needs to be changed to this:
+ ```python
+ transaction = sentry_sdk.transaction(...)
+
+ # later in the code execution:
+
+ scope = sentry_sdk.Scope.get_current_scope()
+ scope.set_transaction_name("new-transaction-name")
+ ```
+- The classes listed in the table below are now abstract base classes. Therefore, they can no longer be instantiated. Subclasses can only be instantiated if they implement all of the abstract methods.
+
+ Show table
+
+ | Class | Abstract methods |
+ | ------------------------------------- | -------------------------------------- |
+ | `sentry_sdk.integrations.Integration` | `setup_once` |
+ | `sentry_sdk.metrics.Metric` | `add`, `serialize_value`, and `weight` |
+ | `sentry_sdk.profiler.Scheduler` | `setup` and `teardown` |
+ | `sentry_sdk.transport.Transport` | `capture_envelope` |
+
+
+
+## Removed
+
+- Removed support for Python 2 and Python 3.5. The SDK now requires at least Python 3.6.
+- Removed support for Celery 3.\*.
+- Removed support for Django 1.8, 1.9, 1.10.
+- Removed support for Flask 0.\*.
+- Removed support for gRPC < 1.39.
+- Removed support for Tornado < 6.
+- Removed `last_event_id()` top level API. The last event ID is still returned by `capture_event()`, `capture_exception()` and `capture_message()` but the top level API `sentry_sdk.last_event_id()` has been removed.
+- Removed support for sending events to the `/store` endpoint. Everything is now sent to the `/envelope` endpoint. If you're on SaaS you don't have to worry about this, but if you're running Sentry yourself you'll need version `20.6.0` or higher of self-hosted Sentry.
+- The deprecated `with_locals` configuration option was removed. Use `include_local_variables` instead. See https://docs.sentry.io/platforms/python/configuration/options/#include-local-variables.
+- The deprecated `request_bodies` configuration option was removed. Use `max_request_body_size`. See https://docs.sentry.io/platforms/python/configuration/options/#max-request-body-size.
+- Removed support for `user.segment`. It was also removed from the trace header as well as from the dynamic sampling context.
+- Removed support for the `install` method for custom integrations. Please use `setup_once` instead.
+- Removed `sentry_sdk.tracing.Span.new_span`. Use `sentry_sdk.tracing.Span.start_child` instead.
+- Removed `sentry_sdk.tracing.Transaction.new_span`. Use `sentry_sdk.tracing.Transaction.start_child` instead.
+- Removed support for creating transactions via `sentry_sdk.tracing.Span(transaction=...)`. To create a transaction, please use `sentry_sdk.tracing.Transaction(name=...)`.
+- Removed `sentry_sdk.utils.Auth.store_api_url`.
+- `sentry_sdk.utils.Auth.get_api_url`'s now accepts a `sentry_sdk.consts.EndpointType` enum instead of a string as its only parameter. We recommend omitting this argument when calling the function, since the parameter's default value is the only possible `sentry_sdk.consts.EndpointType` value. The parameter exists for future compatibility.
+- Removed `tracing_utils_py2.py`. The `start_child_span_decorator` is now in `sentry_sdk.tracing_utils`.
+- Removed the `sentry_sdk.profiler.Scheduler.stop_profiling` method. Any calls to this method can simply be removed, since this was a no-op method.
+- Removed the experimental `metrics_summary_sample_rate` config option.
+- Removed the experimental `should_summarize_metric` config option.
+
+## Deprecated
+
+- Using the `Hub` directly as well as using hub-based APIs has been deprecated. Where available, use [the top-level API instead](sentry_sdk/api.py); otherwise use the [scope API](sentry_sdk/scope.py) or the [client API](sentry_sdk/client.py).
+
+ Before:
+
+ ```python
+ with hub.start_span(...):
+ # do something
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.start_span(...):
+ # do something
+ ```
+
+- Hub cloning is deprecated.
+
+ Before:
+
+ ```python
+ with Hub(Hub.current) as hub:
+ # do something with the cloned hub
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.isolation_scope() as scope:
+ # do something with the forked scope
+ ```
+
+- `configure_scope` is deprecated. Modify the current or isolation scope directly instead.
+
+ Before:
+
+ ```python
+ with configure_scope() as scope:
+ # do something with `scope`
+ ```
+
+ After:
+
+ ```python
+ from sentry_sdk.scope import Scope
+
+ scope = Scope.get_current_scope()
+ # do something with `scope`
+ ```
+
+ Or:
+
+ ```python
+ from sentry_sdk.scope import Scope
+
+ scope = Scope.get_isolation_scope()
+ # do something with `scope`
+ ```
+
+ When to use `get_current_scope()` and `get_isolation_scope()` depends on how long the change to the scope should be in effect. If you want the changed scope to affect the whole request-response cycle or the whole execution of task, use the isolation scope. If it's more localized, use the current scope.
+
+- `push_scope` is deprecated. Fork the current or the isolation scope instead.
+
+ Before:
+
+ ```python
+ with push_scope() as scope:
+ # do something with `scope`
+ ```
+
+ After:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.new_scope() as scope:
+ # do something with `scope`
+ ```
+
+ Or:
+
+ ```python
+ import sentry_sdk
+
+ with sentry_sdk.isolation_scope() as scope:
+ # do something with `scope`
+ ```
+
+ `new_scope()` will fork the current scope, while `isolation_scope()` will fork the isolation scope. The lifecycle of a single isolation scope roughly translates to the lifecycle of a transaction in most cases, so if you're looking to create a new separated scope for a whole request-response cycle or task execution, go for `isolation_scope()`. If you want to wrap a smaller unit code, fork the current scope instead with `new_scope()`.
+
+- Accessing the client via the hub has been deprecated. Use the top-level `sentry_sdk.get_client()` to get the current client.
+- `profiler_mode` and `profiles_sample_rate` have been deprecated as `_experiments` options. Use them as top level options instead:
+ ```python
+ sentry_sdk.init(
+ ...,
+ profiler_mode="thread",
+ profiles_sample_rate=1.0,
+ )
+ ```
+- Deprecated `sentry_sdk.transport.Transport.capture_event`. Please use `sentry_sdk.transport.Transport.capture_envelope`, instead.
+- Passing a function to `sentry_sdk.init`'s `transport` keyword argument has been deprecated. If you wish to provide a custom transport, please pass a `sentry_sdk.transport.Transport` instance or a subclass.
+- The parameter `propagate_hub` in `ThreadingIntegration()` was deprecated and renamed to `propagate_scope`.
diff --git a/Makefile b/Makefile
index ac0ef51f5f..29f511e502 100644
--- a/Makefile
+++ b/Makefile
@@ -30,7 +30,7 @@ format: .venv
.PHONY: format
test: .venv
- @$(VENV_PATH)/bin/tox -e py3.9
+ @$(VENV_PATH)/bin/tox -e py3.12
.PHONY: test
test-all: .venv
diff --git a/README.md b/README.md
index e9d661eee8..130783c0e9 100644
--- a/README.md
+++ b/README.md
@@ -14,8 +14,6 @@ _Bad software is everywhere, and we're tired of it. Sentry is on a mission to he
This is the official Python SDK for [Sentry](http://sentry.io/)
----
-
## Getting Started
### Install
@@ -76,7 +74,13 @@ See [the documentation](https://docs.sentry.io/platforms/python/integrations/) f
- [Google Cloud Functions](https://docs.sentry.io/platforms/python/integrations/gcp-functions/)
-## Migrating From `raven-python`
+## Migrating
+
+### Migrating From `1.x` to `2.x`
+
+If you're on SDK version 1.x, we highly recommend updating to the 2.x major. To make the process easier we've prepared a [migration guide](https://docs.sentry.io/platforms/python/migration/) with the most common changes as well as a [detailed changelog](MIGRATION_GUIDE.md).
+
+### Migrating From `raven-python`
The old `raven-python` client has entered maintenance mode and was moved [here](https://github.com/getsentry/raven-python).
diff --git a/checkouts/data-schemas b/checkouts/data-schemas
index 1e17eb5472..4aa14a74b6 160000
--- a/checkouts/data-schemas
+++ b/checkouts/data-schemas
@@ -1 +1 @@
-Subproject commit 1e17eb54727a77681a1b9e845c9a5d55b52d35a1
+Subproject commit 4aa14a74b6a3c8e468af08acbe2cf3a7064151d4
diff --git a/docs/api.rst b/docs/api.rst
index f504bbb642..034652e05c 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -41,13 +41,18 @@ Distributed Tracing
.. autofunction:: sentry_sdk.api.get_traceparent
+Client Management
+=================
+
+.. autofunction:: sentry_sdk.api.is_initialized
+.. autofunction:: sentry_sdk.api.get_client
+
+
Managing Scope (advanced)
=========================
.. autofunction:: sentry_sdk.api.configure_scope
.. autofunction:: sentry_sdk.api.push_scope
+.. autofunction:: sentry_sdk.api.new_scope
-.. Not documented (On purpose. Not sure if anyone should use those)
-.. last_event_id()
-.. flush()
diff --git a/docs/apidocs.rst b/docs/apidocs.rst
index 855778484d..27c8ef2f73 100644
--- a/docs/apidocs.rst
+++ b/docs/apidocs.rst
@@ -11,6 +11,12 @@ API Docs
.. autoclass:: sentry_sdk.Client
:members:
+.. autoclass:: sentry_sdk.client.BaseClient
+ :members:
+
+.. autoclass:: sentry_sdk.client.NonRecordingClient
+ :members:
+
.. autoclass:: sentry_sdk.client._Client
:members:
diff --git a/docs/conf.py b/docs/conf.py
index 5383a64224..ae1ab934b3 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,3 @@
-# -*- coding: utf-8 -*-
-
import os
import sys
import typing
@@ -30,7 +28,7 @@
copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year)
author = "Sentry Team and Contributors"
-release = "1.45.0"
+release = "2.0.1"
version = ".".join(release.split(".")[:2]) # The short X.Y version.
diff --git a/linter-requirements.txt b/linter-requirements.txt
index e86ffd506b..289df0cd7f 100644
--- a/linter-requirements.txt
+++ b/linter-requirements.txt
@@ -2,7 +2,7 @@ mypy
black
flake8==5.0.4 # flake8 depends on pyflakes>=3.0.0 and this dropped support for Python 2 "# type:" comments
types-certifi
-types-protobuf==4.24.0.20240408 # newer raises an error on mypy sentry_sdk
+types-protobuf
types-redis
types-setuptools
pymongo # There is no separate types module.
diff --git a/scripts/aws_lambda_functions/README.md b/scripts/aws_lambda_functions/README.md
new file mode 100644
index 0000000000..e07b445d5b
--- /dev/null
+++ b/scripts/aws_lambda_functions/README.md
@@ -0,0 +1,4 @@
+aws_lambda_functions
+====================
+
+In this directory you can place AWS Lambda functions that are used for administrative tasks (or whatever)
\ No newline at end of file
diff --git a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md
new file mode 100644
index 0000000000..de1120a026
--- /dev/null
+++ b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/README.md
@@ -0,0 +1,13 @@
+sentryPythonDeleteTestFunctions
+===============================
+
+This AWS Lambda function deletes all AWS Lambda functions in the current AWS account that are prefixed with `test_`.
+The functions that are deleted are created by the Google Actions CI checks running on every PR of the `sentry-python` repository.
+
+The Lambda function has been deployed here:
+- AWS Account ID: `943013980633`
+- Region: `us-east-1`
+- Function ARN: `arn:aws:lambda:us-east-1:943013980633:function:sentryPythonDeleteTestFunctions`
+
+This function also emits Sentry Metrics and Sentry Crons checkins to the `sentry-python` project in the `Sentry SDKs` organisation on Sentry.io:
+https://sentry-sdks.sentry.io/projects/sentry-python/?project=5461230
\ No newline at end of file
diff --git a/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py
new file mode 100644
index 0000000000..1fc3994176
--- /dev/null
+++ b/scripts/aws_lambda_functions/sentryPythonDeleteTestFunctions/lambda_function.py
@@ -0,0 +1,55 @@
+import boto3
+import sentry_sdk
+
+
+monitor_slug = "python-sdk-aws-lambda-tests-cleanup"
+monitor_config = {
+ "schedule": {
+ "type": "crontab",
+ "value": "0 12 * * 0", # 12 o'clock on Sunday
+ },
+ "timezone": "UTC",
+ "checkin_margin": 2,
+ "max_runtime": 20,
+ "failure_issue_threshold": 1,
+ "recovery_threshold": 1,
+}
+
+
+@sentry_sdk.crons.monitor(monitor_slug=monitor_slug)
+def delete_lambda_functions(prefix="test_"):
+ """
+ Delete all AWS Lambda functions in the current account
+ where the function name matches the prefix
+ """
+ client = boto3.client("lambda", region_name="us-east-1")
+ functions_deleted = 0
+
+ functions_paginator = client.get_paginator("list_functions")
+ for functions_page in functions_paginator.paginate():
+ for func in functions_page["Functions"]:
+ function_name = func["FunctionName"]
+ if function_name.startswith(prefix):
+ try:
+ response = client.delete_function(
+ FunctionName=func["FunctionArn"],
+ )
+ functions_deleted += 1
+ except Exception as ex:
+ print(f"Got exception: {ex}")
+
+ return functions_deleted
+
+
+def lambda_handler(event, context):
+ functions_deleted = delete_lambda_functions()
+
+ sentry_sdk.metrics.gauge(
+ key="num_aws_functions_deleted",
+ value=functions_deleted,
+ )
+
+ return {
+ 'statusCode': 200,
+ 'body': f"{functions_deleted} AWS Lambda functions deleted successfully."
+ }
diff --git a/scripts/build_aws_lambda_layer.py b/scripts/build_aws_lambda_layer.py
index 8704e4de01..c2cb46f0bb 100644
--- a/scripts/build_aws_lambda_layer.py
+++ b/scripts/build_aws_lambda_layer.py
@@ -52,7 +52,7 @@ def install_python_packages(self):
sentry_python_sdk = os.path.join(
DIST_PATH,
- f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl", # this is generated by "make dist" that is called by "make aws-lamber-layer"
+ f"sentry_sdk-{SDK_VERSION}-py2.py3-none-any.whl", # this is generated by "make dist" that is called by "make aws-lambda-layer"
)
subprocess.run(
[
diff --git a/scripts/bump-version.sh b/scripts/bump-version.sh
index 74546f5d9f..7d4a817cf6 100755
--- a/scripts/bump-version.sh
+++ b/scripts/bump-version.sh
@@ -21,6 +21,6 @@ function replace() {
grep "$2" $3 # verify that replacement was successful
}
-replace "version=\"[0-9.]+\"" "version=\"$NEW_VERSION\"" ./setup.py
-replace "VERSION = \"[0-9.]+\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py
-replace "release = \"[0-9.]+\"" "release = \"$NEW_VERSION\"" ./docs/conf.py
+replace "version=\"$OLD_VERSION\"" "version=\"$NEW_VERSION\"" ./setup.py
+replace "VERSION = \"$OLD_VERSION\"" "VERSION = \"$NEW_VERSION\"" ./sentry_sdk/consts.py
+replace "release = \"$OLD_VERSION\"" "release = \"$NEW_VERSION\"" ./docs/conf.py
diff --git a/scripts/init_serverless_sdk.py b/scripts/init_serverless_sdk.py
index be545b680b..a4953ca9d7 100644
--- a/scripts/init_serverless_sdk.py
+++ b/scripts/init_serverless_sdk.py
@@ -49,8 +49,8 @@ def extract_and_load_lambda_function_module(self, module_path):
module_name = module_path.split(os.path.sep)[-1]
module_file_path = module_path + ".py"
- # Supported python versions are 2.7, 3.6, 3.7, 3.8
- if py_version >= (3, 5):
+ # Supported python versions are 3.6, 3.7, 3.8
+ if py_version >= (3, 6):
import importlib.util
spec = importlib.util.spec_from_file_location(
@@ -58,12 +58,6 @@ def extract_and_load_lambda_function_module(self, module_path):
)
self.lambda_function_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(self.lambda_function_module)
- elif py_version[0] < 3:
- import imp
-
- self.lambda_function_module = imp.load_source(
- module_name, module_file_path
- )
else:
raise ValueError("Python version %s is not supported." % py_version)
else:
diff --git a/scripts/runtox.sh b/scripts/runtox.sh
index dbbb4f2e10..50da44dd53 100755
--- a/scripts/runtox.sh
+++ b/scripts/runtox.sh
@@ -40,4 +40,4 @@ if [ -z "${ENV}" ]; then
exit 0
fi
-exec $TOXPATH -vv -e "$ENV" -- "${@:2}"
+exec $TOXPATH -e "$ENV" -- "${@:2}"
diff --git a/scripts/split-tox-gh-actions/split-tox-gh-actions.py b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
index 13b81283ca..6b456c5544 100755
--- a/scripts/split-tox-gh-actions/split-tox-gh-actions.py
+++ b/scripts/split-tox-gh-actions/split-tox-gh-actions.py
@@ -95,9 +95,9 @@
],
"Web Frameworks 1": [
"django",
- "fastapi",
"flask",
"starlette",
+ "fastapi",
],
"Web Frameworks 2": [
"aiohttp",
@@ -263,11 +263,6 @@ def render_template(group, frameworks, py_versions_pinned, py_versions_latest):
if py_versions_latest[framework]:
categories.add("latest")
py_versions["latest"] |= set(py_versions_latest[framework])
- if "2.7" in py_versions_pinned[framework]:
- categories.add("py27")
-
- py_versions["pinned"].discard("2.7")
- py_versions["latest"].discard("2.7")
context = {
"group": group,
diff --git a/scripts/split-tox-gh-actions/templates/check_permissions.jinja b/scripts/split-tox-gh-actions/templates/check_permissions.jinja
index 2b9eaa83f9..8100b60a7d 100644
--- a/scripts/split-tox-gh-actions/templates/check_permissions.jinja
+++ b/scripts/split-tox-gh-actions/templates/check_permissions.jinja
@@ -2,7 +2,7 @@
name: permissions check
runs-on: ubuntu-20.04
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
with:
persist-credentials: false
diff --git a/scripts/split-tox-gh-actions/templates/check_required.jinja b/scripts/split-tox-gh-actions/templates/check_required.jinja
index f5aa11212f..b9b0f54015 100644
--- a/scripts/split-tox-gh-actions/templates/check_required.jinja
+++ b/scripts/split-tox-gh-actions/templates/check_required.jinja
@@ -1,8 +1,6 @@
check_required_tests:
name: All {{ group }} tests passed
- {% if "pinned" in categories and "py27" in categories %}
- needs: [test-{{ group | replace(" ", "_") | lower }}-pinned, test-{{ group | replace(" ", "_") | lower }}-py27]
- {% elif "pinned" in categories %}
+ {% if "pinned" in categories %}
needs: test-{{ group | replace(" ", "_") | lower }}-pinned
{% endif %}
# Always run this, even if a dependent job failed
@@ -13,9 +11,3 @@
if: contains(needs.test-{{ lowercase_group }}-pinned.result, 'failure') || contains(needs.test-{{ lowercase_group }}-pinned.result, 'skipped')
run: |
echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- {% if "py27" in categories %}
- - name: Check for 2.7 failures
- if: contains(needs.test-{{ lowercase_group }}-py27.result, 'failure') || contains(needs.test-{{ lowercase_group }}-py27.result, 'skipped')
- run: |
- echo "One of the dependent jobs has failed. You may need to re-run it." && exit 1
- {% endif %}
diff --git a/scripts/split-tox-gh-actions/templates/test_group.jinja b/scripts/split-tox-gh-actions/templates/test_group.jinja
index 91a231cd98..be06276e9f 100644
--- a/scripts/split-tox-gh-actions/templates/test_group.jinja
+++ b/scripts/split-tox-gh-actions/templates/test_group.jinja
@@ -1,15 +1,6 @@
test-{{ lowercase_group }}-{{ category }}:
name: {{ group }} ({{ category }})
timeout-minutes: 30
-
- {% if needs_github_secrets %}
- needs: check-permissions
- {% endif %}
-
- {% if category == "py27" %}
- runs-on: ubuntu-20.04
- container: python:2.7
- {% else %}
runs-on: {% raw %}${{ matrix.os }}{% endraw %}
strategy:
fail-fast: false
@@ -20,6 +11,9 @@
# ubuntu-20.04 is the last version that supported python3.6
# see https://github.com/actions/setup-python/issues/544#issuecomment-1332535877
os: [ubuntu-20.04]
+
+ {% if needs_github_secrets %}
+ needs: check-permissions
{% endif %}
{% if needs_postgres %}
@@ -38,41 +32,30 @@
ports:
- 5432:5432
env:
+ SENTRY_PYTHON_TEST_POSTGRES_HOST: localhost
SENTRY_PYTHON_TEST_POSTGRES_USER: postgres
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD: sentry
- SENTRY_PYTHON_TEST_POSTGRES_NAME: ci_test
- SENTRY_PYTHON_TEST_POSTGRES_HOST: {% if category == "py27" %}postgres{% else %}localhost{% endif %}
+
{% endif %}
steps:
- - uses: actions/checkout@v4.1.1
+ - uses: actions/checkout@v4.1.4
{% if needs_github_secrets %}
{% raw %}
with:
ref: ${{ github.event.pull_request.head.sha || github.ref }}
{% endraw %}
{% endif %}
- {% if category != "py27" %}
- uses: actions/setup-python@v5
with:
python-version: {% raw %}${{ matrix.python-version }}{% endraw %}
- {% endif %}
{% if needs_clickhouse %}
- uses: getsentry/action-clickhouse-in-ci@v1
{% endif %}
- name: Setup Test Env
run: |
- pip install coverage "tox>=3,<4"
- {% if needs_postgres %}
- {% if category == "py27" %}
- psql postgresql://postgres:sentry@postgres:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@postgres:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
- {% else %}
- psql postgresql://postgres:sentry@localhost:5432 -c "create database ${SENTRY_PYTHON_TEST_POSTGRES_NAME};" || true
- psql postgresql://postgres:sentry@localhost:5432 -c "grant all privileges on database ${SENTRY_PYTHON_TEST_POSTGRES_NAME} to ${SENTRY_PYTHON_TEST_POSTGRES_USER};" || true
- {% endif %}
- {% endif %}
+ pip install coverage tox
- name: Erase coverage
run: |
@@ -82,10 +65,7 @@
- name: Test {{ framework }} {{ category }}
run: |
set -x # print commands that are executed
-
- {% if category == "py27" %}
- ./scripts/runtox.sh --exclude-latest "py2.7-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
- {% elif category == "pinned" %}
+ {% if category == "pinned" %}
./scripts/runtox.sh --exclude-latest "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
{% elif category == "latest" %}
./scripts/runtox.sh "{% raw %}py${{ matrix.python-version }}{% endraw %}-{{ framework }}-latest" --cov=tests --cov=sentry_sdk --cov-report= --cov-branch
diff --git a/sentry_sdk/__init__.py b/sentry_sdk/__init__.py
index 562da90739..6c44867476 100644
--- a/sentry_sdk/__init__.py
+++ b/sentry_sdk/__init__.py
@@ -18,28 +18,31 @@
"HttpTransport",
"init",
"integrations",
+ "trace",
# From sentry_sdk.api
+ "add_breadcrumb",
"capture_event",
- "capture_message",
"capture_exception",
- "add_breadcrumb",
+ "capture_message",
"configure_scope",
- "push_scope",
+ "continue_trace",
"flush",
- "last_event_id",
- "start_span",
- "start_transaction",
- "set_tag",
+ "get_baggage",
+ "get_client",
+ "get_current_span",
+ "get_traceparent",
+ "is_initialized",
+ "isolation_scope",
+ "new_scope",
+ "push_scope",
"set_context",
"set_extra",
- "set_user",
"set_level",
"set_measurement",
- "get_current_span",
- "get_traceparent",
- "get_baggage",
- "continue_trace",
- "trace",
+ "set_tag",
+ "set_user",
+ "start_span",
+ "start_transaction",
]
# Initialize the debug support after everything is loaded
diff --git a/sentry_sdk/_compat.py b/sentry_sdk/_compat.py
index 38872051ff..f7fd6903a4 100644
--- a/sentry_sdk/_compat.py
+++ b/sentry_sdk/_compat.py
@@ -1,134 +1,18 @@
import sys
-import contextlib
-from datetime import datetime, timedelta
-from functools import wraps
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
- from typing import Optional
- from typing import Tuple
from typing import Any
- from typing import Type
from typing import TypeVar
- from typing import Callable
T = TypeVar("T")
-PY2 = sys.version_info[0] == 2
-PY33 = sys.version_info[0] == 3 and sys.version_info[1] >= 3
PY37 = sys.version_info[0] == 3 and sys.version_info[1] >= 7
PY310 = sys.version_info[0] == 3 and sys.version_info[1] >= 10
PY311 = sys.version_info[0] == 3 and sys.version_info[1] >= 11
-if PY2:
- import urlparse
-
- text_type = unicode # noqa
-
- string_types = (str, text_type)
- number_types = (int, long, float) # noqa
- int_types = (int, long) # noqa
- iteritems = lambda x: x.iteritems() # noqa: B301
- binary_sequence_types = (bytearray, memoryview)
-
- def datetime_utcnow():
- # type: () -> datetime
- return datetime.utcnow()
-
- def utc_from_timestamp(timestamp):
- # type: (float) -> datetime
- return datetime.utcfromtimestamp(timestamp)
-
- def duration_in_milliseconds(delta):
- # type: (timedelta) -> float
- seconds = delta.days * 24 * 60 * 60 + delta.seconds
- milliseconds = seconds * 1000 + float(delta.microseconds) / 1000
- return milliseconds
-
- def implements_str(cls):
- # type: (T) -> T
- cls.__unicode__ = cls.__str__
- cls.__str__ = lambda x: unicode(x).encode("utf-8") # noqa
- return cls
-
- # The line below is written as an "exec" because it triggers a syntax error in Python 3
- exec("def reraise(tp, value, tb=None):\n raise tp, value, tb")
-
- def contextmanager(func):
- # type: (Callable) -> Callable
- """
- Decorator which creates a contextmanager that can also be used as a
- decorator, similar to how the built-in contextlib.contextmanager
- function works in Python 3.2+.
- """
- contextmanager_func = contextlib.contextmanager(func)
-
- @wraps(func)
- class DecoratorContextManager:
- def __init__(self, *args, **kwargs):
- # type: (...) -> None
- self.the_contextmanager = contextmanager_func(*args, **kwargs)
-
- def __enter__(self):
- # type: () -> None
- self.the_contextmanager.__enter__()
-
- def __exit__(self, *args, **kwargs):
- # type: (...) -> None
- self.the_contextmanager.__exit__(*args, **kwargs)
-
- def __call__(self, decorated_func):
- # type: (Callable) -> Callable[...]
- @wraps(decorated_func)
- def when_called(*args, **kwargs):
- # type: (...) -> Any
- with self.the_contextmanager:
- return_val = decorated_func(*args, **kwargs)
- return return_val
-
- return when_called
-
- return DecoratorContextManager
-
-else:
- from datetime import timezone
- import urllib.parse as urlparse # noqa
-
- text_type = str
- string_types = (text_type,) # type: Tuple[type]
- number_types = (int, float) # type: Tuple[type, type]
- int_types = (int,)
- iteritems = lambda x: x.items()
- binary_sequence_types = (bytes, bytearray, memoryview)
-
- def datetime_utcnow():
- # type: () -> datetime
- return datetime.now(timezone.utc)
-
- def utc_from_timestamp(timestamp):
- # type: (float) -> datetime
- return datetime.fromtimestamp(timestamp, timezone.utc)
-
- def duration_in_milliseconds(delta):
- # type: (timedelta) -> float
- return delta / timedelta(milliseconds=1)
-
- def implements_str(x):
- # type: (T) -> T
- return x
-
- def reraise(tp, value, tb=None):
- # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> None
- assert value is not None
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
-
- # contextlib.contextmanager already can be used as decorator in Python 3.2+
- contextmanager = contextlib.contextmanager
-
def with_metaclass(meta, *bases):
# type: (Any, *Any) -> Any
diff --git a/sentry_sdk/_functools.py b/sentry_sdk/_functools.py
deleted file mode 100644
index 6bcc85f3b4..0000000000
--- a/sentry_sdk/_functools.py
+++ /dev/null
@@ -1,121 +0,0 @@
-"""
-A backport of Python 3 functools to Python 2/3. The only important change
-we rely upon is that `update_wrapper` handles AttributeError gracefully.
-
-Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
-
-All Rights Reserved
-
-
-PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
---------------------------------------------
-
-1. This LICENSE AGREEMENT is between the Python Software Foundation
-("PSF"), and the Individual or Organization ("Licensee") accessing and
-otherwise using this software ("Python") in source or binary form and
-its associated documentation.
-
-2. Subject to the terms and conditions of this License Agreement, PSF hereby
-grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
-analyze, test, perform and/or display publicly, prepare derivative works,
-distribute, and otherwise use Python alone or in any derivative version,
-provided, however, that PSF's License Agreement and PSF's notice of copyright,
-i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
-2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020 Python Software Foundation;
-All Rights Reserved" are retained in Python alone or in any derivative version
-prepared by Licensee.
-
-3. In the event Licensee prepares a derivative work that is based on
-or incorporates Python or any part thereof, and wants to make
-the derivative work available to others as provided herein, then
-Licensee hereby agrees to include in any such work a brief summary of
-the changes made to Python.
-
-4. PSF is making Python available to Licensee on an "AS IS"
-basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
-IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
-DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
-FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
-INFRINGE ANY THIRD PARTY RIGHTS.
-
-5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
-FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
-A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
-OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
-
-6. This License Agreement will automatically terminate upon a material
-breach of its terms and conditions.
-
-7. Nothing in this License Agreement shall be deemed to create any
-relationship of agency, partnership, or joint venture between PSF and
-Licensee. This License Agreement does not grant permission to use PSF
-trademarks or trade name in a trademark sense to endorse or promote
-products or services of Licensee, or any third party.
-
-8. By copying, installing or otherwise using Python, Licensee
-agrees to be bound by the terms and conditions of this License
-Agreement.
-"""
-
-from functools import partial
-
-from sentry_sdk._types import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from typing import Any
- from typing import Callable
-
-
-WRAPPER_ASSIGNMENTS = (
- "__module__",
- "__name__",
- "__qualname__",
- "__doc__",
- "__annotations__",
-)
-WRAPPER_UPDATES = ("__dict__",)
-
-
-def update_wrapper(
- wrapper, wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES
-):
- # type: (Any, Any, Any, Any) -> Any
- """Update a wrapper function to look like the wrapped function
-
- wrapper is the function to be updated
- wrapped is the original function
- assigned is a tuple naming the attributes assigned directly
- from the wrapped function to the wrapper function (defaults to
- functools.WRAPPER_ASSIGNMENTS)
- updated is a tuple naming the attributes of the wrapper that
- are updated with the corresponding attribute from the wrapped
- function (defaults to functools.WRAPPER_UPDATES)
- """
- for attr in assigned:
- try:
- value = getattr(wrapped, attr)
- except AttributeError:
- pass
- else:
- setattr(wrapper, attr, value)
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
- # Issue #17482: set __wrapped__ last so we don't inadvertently copy it
- # from the wrapped function when updating __dict__
- wrapper.__wrapped__ = wrapped
- # Return the wrapper so this can be used as a decorator via partial()
- return wrapper
-
-
-def wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES):
- # type: (Callable[..., Any], Any, Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]
- """Decorator factory to apply update_wrapper() to a wrapper function
-
- Returns a decorator that invokes update_wrapper() with the decorated
- function as the wrapper argument and the arguments to wraps() as the
- remaining arguments. Default arguments are as for update_wrapper().
- This is a convenience function to simplify applying partial() to
- update_wrapper().
- """
- return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated)
diff --git a/sentry_sdk/_lru_cache.py b/sentry_sdk/_lru_cache.py
index 91cf55d09a..37e86e5fe3 100644
--- a/sentry_sdk/_lru_cache.py
+++ b/sentry_sdk/_lru_cache.py
@@ -72,7 +72,7 @@
VALUE = 3
-class LRUCache(object):
+class LRUCache:
def __init__(self, max_size):
assert max_size > 0
diff --git a/sentry_sdk/_queue.py b/sentry_sdk/_queue.py
index 129b6e58a6..056d576fbe 100644
--- a/sentry_sdk/_queue.py
+++ b/sentry_sdk/_queue.py
@@ -94,7 +94,7 @@ class FullError(Exception):
pass
-class Queue(object):
+class Queue:
"""Create a queue object with a given maximum size.
If maxsize is <= 0, the queue size is infinite.
diff --git a/sentry_sdk/_types.py b/sentry_sdk/_types.py
index 368db17138..1577dbde4f 100644
--- a/sentry_sdk/_types.py
+++ b/sentry_sdk/_types.py
@@ -117,7 +117,6 @@
"monitor",
]
SessionStatus = Literal["ok", "exited", "crashed", "abnormal"]
- EndpointType = Literal["store", "envelope"]
DurationUnit = Literal[
"nanosecond",
diff --git a/sentry_sdk/_werkzeug.py b/sentry_sdk/_werkzeug.py
index 197c5c19b1..3f6b6b06a4 100644
--- a/sentry_sdk/_werkzeug.py
+++ b/sentry_sdk/_werkzeug.py
@@ -32,8 +32,6 @@
SUCH DAMAGE.
"""
-from sentry_sdk._compat import iteritems
-
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -54,7 +52,7 @@ def _get_headers(environ):
"""
Returns only proper HTTP headers.
"""
- for key, value in iteritems(environ):
+ for key, value in environ.items():
key = str(key)
if key.startswith("HTTP_") and key not in (
"HTTP_CONTENT_TYPE",
diff --git a/sentry_sdk/api.py b/sentry_sdk/api.py
index 3148c43f1a..f00ed9f96a 100644
--- a/sentry_sdk/api.py
+++ b/sentry_sdk/api.py
@@ -1,13 +1,16 @@
import inspect
+from contextlib import contextmanager
+from sentry_sdk import tracing_utils, Client
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.hub import Hub
-from sentry_sdk.scope import Scope
+from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.scope import Scope, _ScopeManager, new_scope, isolation_scope
from sentry_sdk.tracing import NoOpSpan, Transaction
if TYPE_CHECKING:
from typing import Any
from typing import Dict
+ from typing import Generator
from typing import Optional
from typing import overload
from typing import Callable
@@ -15,6 +18,9 @@
from typing import ContextManager
from typing import Union
+ from typing_extensions import Unpack
+
+ from sentry_sdk.client import BaseClient
from sentry_sdk._types import (
Event,
Hint,
@@ -23,8 +29,9 @@
ExcInfo,
MeasurementUnit,
LogLevelStr,
+ SamplingContext,
)
- from sentry_sdk.tracing import Span
+ from sentry_sdk.tracing import Span, TransactionKwargs
T = TypeVar("T")
F = TypeVar("F", bound=Callable[..., Any])
@@ -37,87 +44,116 @@ def overload(x):
# When changing this, update __all__ in __init__.py too
__all__ = [
+ "add_breadcrumb",
"capture_event",
- "capture_message",
"capture_exception",
- "add_breadcrumb",
+ "capture_message",
"configure_scope",
- "push_scope",
+ "continue_trace",
"flush",
- "last_event_id",
- "start_span",
- "start_transaction",
- "set_tag",
+ "get_baggage",
+ "get_client",
+ "get_current_span",
+ "get_traceparent",
+ "is_initialized",
+ "isolation_scope",
+ "new_scope",
+ "push_scope",
"set_context",
"set_extra",
- "set_user",
"set_level",
"set_measurement",
- "get_current_span",
- "get_traceparent",
- "get_baggage",
- "continue_trace",
+ "set_tag",
+ "set_user",
+ "start_span",
+ "start_transaction",
]
-def hubmethod(f):
+def scopemethod(f):
# type: (F) -> F
f.__doc__ = "%s\n\n%s" % (
- "Alias for :py:meth:`sentry_sdk.Hub.%s`" % f.__name__,
- inspect.getdoc(getattr(Hub, f.__name__)),
+ "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
+ inspect.getdoc(getattr(Scope, f.__name__)),
)
return f
-def scopemethod(f):
+def clientmethod(f):
# type: (F) -> F
f.__doc__ = "%s\n\n%s" % (
- "Alias for :py:meth:`sentry_sdk.Scope.%s`" % f.__name__,
- inspect.getdoc(getattr(Scope, f.__name__)),
+ "Alias for :py:meth:`sentry_sdk.Client.%s`" % f.__name__,
+ inspect.getdoc(getattr(Client, f.__name__)),
)
return f
-@hubmethod
+def is_initialized():
+ # type: () -> bool
+ """
+ .. versionadded:: 2.0.0
+
+ Returns whether Sentry has been initialized or not.
+
+ If a client is available and the client is active
+ (meaning it is configured to send data) then
+ Sentry is initialized.
+ """
+ return Scope.get_client().is_active()
+
+
+@scopemethod
+def get_client():
+ # type: () -> BaseClient
+ return Scope.get_client()
+
+
+@scopemethod
def capture_event(
event, # type: Event
hint=None, # type: Optional[Hint]
scope=None, # type: Optional[Any]
- **scope_kwargs # type: Any
+ **scope_kwargs, # type: Any
):
# type: (...) -> Optional[str]
- return Hub.current.capture_event(event, hint, scope=scope, **scope_kwargs)
+ return Scope.get_current_scope().capture_event(
+ event, hint, scope=scope, **scope_kwargs
+ )
-@hubmethod
+@scopemethod
def capture_message(
message, # type: str
level=None, # type: Optional[LogLevelStr]
scope=None, # type: Optional[Any]
- **scope_kwargs # type: Any
+ **scope_kwargs, # type: Any
):
# type: (...) -> Optional[str]
- return Hub.current.capture_message(message, level, scope=scope, **scope_kwargs)
+ return Scope.get_current_scope().capture_message(
+ message, level, scope=scope, **scope_kwargs
+ )
-@hubmethod
+@scopemethod
def capture_exception(
error=None, # type: Optional[Union[BaseException, ExcInfo]]
scope=None, # type: Optional[Any]
- **scope_kwargs # type: Any
+ **scope_kwargs, # type: Any
):
# type: (...) -> Optional[str]
- return Hub.current.capture_exception(error, scope=scope, **scope_kwargs)
+ return Scope.get_current_scope().capture_exception(
+ error, scope=scope, **scope_kwargs
+ )
-@hubmethod
+@scopemethod
def add_breadcrumb(
crumb=None, # type: Optional[Breadcrumb]
hint=None, # type: Optional[BreadcrumbHint]
- **kwargs # type: Any
+ **kwargs, # type: Any
):
# type: (...) -> None
- return Hub.current.add_breadcrumb(crumb, hint, **kwargs)
+ return Scope.get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs)
@overload
@@ -134,12 +170,32 @@ def configure_scope( # noqa: F811
pass
-@hubmethod
def configure_scope( # noqa: F811
callback=None, # type: Optional[Callable[[Scope], None]]
):
# type: (...) -> Optional[ContextManager[Scope]]
- return Hub.current.configure_scope(callback)
+ """
+ Reconfigures the scope.
+
+ :param callback: If provided, call the callback with the current scope.
+
+ :returns: If no callback is provided, returns a context manager that returns the scope.
+ """
+ scope = Scope.get_isolation_scope()
+ scope.generate_propagation_context()
+
+ if callback is not None:
+ # TODO: used to return None when client is None. Check if this changes behavior.
+ callback(scope)
+
+ return None
+
+ @contextmanager
+ def inner():
+ # type: () -> Generator[Scope, None, None]
+ yield scope
+
+ return inner()
@overload
@@ -156,95 +212,130 @@ def push_scope( # noqa: F811
pass
-@hubmethod
def push_scope( # noqa: F811
callback=None, # type: Optional[Callable[[Scope], None]]
):
# type: (...) -> Optional[ContextManager[Scope]]
- return Hub.current.push_scope(callback)
+ """
+ Pushes a new layer on the scope stack.
+
+ :param callback: If provided, this method pushes a scope, calls
+ `callback`, and pops the scope again.
+
+ :returns: If no `callback` is provided, a context manager that should
+ be used to pop the scope again.
+ """
+ if callback is not None:
+ with push_scope() as scope:
+ callback(scope)
+ return None
+
+ return _ScopeManager()
@scopemethod
def set_tag(key, value):
# type: (str, Any) -> None
- return Hub.current.scope.set_tag(key, value)
+ return Scope.get_isolation_scope().set_tag(key, value)
@scopemethod
def set_context(key, value):
# type: (str, Dict[str, Any]) -> None
- return Hub.current.scope.set_context(key, value)
+ return Scope.get_isolation_scope().set_context(key, value)
@scopemethod
def set_extra(key, value):
# type: (str, Any) -> None
- return Hub.current.scope.set_extra(key, value)
+ return Scope.get_isolation_scope().set_extra(key, value)
@scopemethod
def set_user(value):
# type: (Optional[Dict[str, Any]]) -> None
- return Hub.current.scope.set_user(value)
+ return Scope.get_isolation_scope().set_user(value)
@scopemethod
def set_level(value):
# type: (LogLevelStr) -> None
- return Hub.current.scope.set_level(value)
+ return Scope.get_isolation_scope().set_level(value)
-@hubmethod
+@clientmethod
def flush(
timeout=None, # type: Optional[float]
callback=None, # type: Optional[Callable[[int, float], None]]
):
# type: (...) -> None
- return Hub.current.flush(timeout=timeout, callback=callback)
+ return Scope.get_client().flush(timeout=timeout, callback=callback)
-@hubmethod
-def last_event_id():
- # type: () -> Optional[str]
-
- return Hub.current.last_event_id()
-
-
-@hubmethod
+@scopemethod
def start_span(
- span=None, # type: Optional[Span]
- **kwargs # type: Any
+ **kwargs, # type: Any
):
# type: (...) -> Span
- return Hub.current.start_span(span=span, **kwargs)
+ return Scope.get_current_scope().start_span(**kwargs)
-@hubmethod
+@scopemethod
def start_transaction(
transaction=None, # type: Optional[Transaction]
- **kwargs # type: Any
+ instrumenter=INSTRUMENTER.SENTRY, # type: str
+ custom_sampling_context=None, # type: Optional[SamplingContext]
+ **kwargs, # type: Unpack[TransactionKwargs]
):
# type: (...) -> Union[Transaction, NoOpSpan]
- return Hub.current.start_transaction(transaction, **kwargs)
+ """
+ Start and return a transaction on the current scope.
+
+ Start an existing transaction if given, otherwise create and start a new
+ transaction with kwargs.
+
+ This is the entry point to manual tracing instrumentation.
+
+ A tree structure can be built by adding child spans to the transaction,
+ and child spans to other spans. To start a new child span within the
+ transaction or any span, call the respective `.start_child()` method.
+
+ Every child span must be finished before the transaction is finished,
+ otherwise the unfinished spans are discarded.
+
+ When used as context managers, spans and transactions are automatically
+ finished at the end of the `with` block. If not using context managers,
+ call the `.finish()` method.
+
+ When the transaction is finished, it will be sent to Sentry with all its
+ finished child spans.
+
+ :param transaction: The transaction to start. If omitted, we create and
+ start a new transaction.
+ :param instrumenter: This parameter is meant for internal use only.
+ :param custom_sampling_context: The transaction's custom sampling context.
+ :param kwargs: Optional keyword arguments to be passed to the Transaction
+ constructor. See :py:class:`sentry_sdk.tracing.Transaction` for
+ available arguments.
+ """
+ return Scope.get_current_scope().start_transaction(
+ transaction, instrumenter, custom_sampling_context, **kwargs
+ )
def set_measurement(name, value, unit=""):
# type: (str, float, MeasurementUnit) -> None
- transaction = Hub.current.scope.transaction
+ transaction = Scope.get_current_scope().transaction
if transaction is not None:
transaction.set_measurement(name, value, unit)
-def get_current_span(hub=None):
- # type: (Optional[Hub]) -> Optional[Span]
+def get_current_span(scope=None):
+ # type: (Optional[Scope]) -> Optional[Span]
"""
Returns the currently active span if there is one running, otherwise `None`
"""
- if hub is None:
- hub = Hub.current
-
- current_span = hub.scope.span
- return current_span
+ return tracing_utils.get_current_span(scope)
def get_traceparent():
@@ -252,7 +343,7 @@ def get_traceparent():
"""
Returns the traceparent either from the active span or from the scope.
"""
- return Hub.current.get_traceparent()
+ return Scope.get_current_scope().get_traceparent()
def get_baggage():
@@ -260,7 +351,11 @@ def get_baggage():
"""
Returns Baggage either from the active span or from the scope.
"""
- return Hub.current.get_baggage()
+ baggage = Scope.get_current_scope().get_baggage()
+ if baggage is not None:
+ return baggage.serialize()
+
+ return None
def continue_trace(environ_or_headers, op=None, name=None, source=None):
@@ -268,4 +363,6 @@ def continue_trace(environ_or_headers, op=None, name=None, source=None):
"""
Sets the propagation context from environment or headers and returns a transaction.
"""
- return Hub.current.continue_trace(environ_or_headers, op, name, source)
+ return Scope.get_isolation_scope().continue_trace(
+ environ_or_headers, op, name, source
+ )
diff --git a/sentry_sdk/attachments.py b/sentry_sdk/attachments.py
index c15afd447b..6bb8a61514 100644
--- a/sentry_sdk/attachments.py
+++ b/sentry_sdk/attachments.py
@@ -8,7 +8,7 @@
from typing import Optional, Union, Callable
-class Attachment(object):
+class Attachment:
def __init__(
self,
bytes=None, # type: Union[None, bytes, Callable[[], bytes]]
diff --git a/sentry_sdk/client.py b/sentry_sdk/client.py
index 296de71804..dc31e5ce1b 100644
--- a/sentry_sdk/client.py
+++ b/sentry_sdk/client.py
@@ -1,22 +1,12 @@
-try:
- from collections.abc import Mapping
-except ImportError:
- from collections import Mapping # type: ignore[attr-defined]
-
-from importlib import import_module
import os
import uuid
import random
import socket
+from collections.abc import Mapping
+from datetime import datetime, timezone
+from importlib import import_module
-from sentry_sdk._compat import (
- PY37,
- datetime_utcnow,
- string_types,
- text_type,
- iteritems,
- check_uwsgi_thread_support,
-)
+from sentry_sdk._compat import PY37, check_uwsgi_thread_support
from sentry_sdk.utils import (
capture_internal_exceptions,
current_stacktrace,
@@ -30,7 +20,7 @@
logger,
)
from sentry_sdk.serializer import serialize
-from sentry_sdk.tracing import trace, has_tracing_enabled
+from sentry_sdk.tracing import trace
from sentry_sdk.transport import HttpTransport, make_transport
from sentry_sdk.consts import (
DEFAULT_MAX_VALUE_LENGTH,
@@ -59,10 +49,12 @@
from typing import Type
from typing import Union
+ from sentry_sdk._types import Event, Hint
from sentry_sdk.integrations import Integration
+ from sentry_sdk.metrics import MetricsAggregator
from sentry_sdk.scope import Scope
- from sentry_sdk._types import Event, Hint
from sentry_sdk.session import Session
+ from sentry_sdk.transport import Transport
_client_init_debug = ContextVar("client_init_debug")
@@ -77,7 +69,7 @@
def _get_options(*args, **kwargs):
# type: (*Optional[str], **Any) -> Dict[str, Any]
- if args and (isinstance(args[0], (text_type, bytes, str)) or args[0] is None):
+ if args and (isinstance(args[0], (bytes, str)) or args[0] is None):
dsn = args[0] # type: Optional[str]
args = args[1:]
else:
@@ -91,28 +83,8 @@ def _get_options(*args, **kwargs):
if dsn is not None and options.get("dsn") is None:
options["dsn"] = dsn
- for key, value in iteritems(options):
+ for key, value in options.items():
if key not in rv:
- # Option "with_locals" was renamed to "include_local_variables"
- if key == "with_locals":
- msg = (
- "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. "
- "Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
- )
- logger.warning(msg)
- rv["include_local_variables"] = value
- continue
-
- # Option "request_bodies" was renamed to "max_request_body_size"
- if key == "request_bodies":
- msg = (
- "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. "
- "Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
- )
- logger.warning(msg)
- rv["max_request_body_size"] = value
- continue
-
raise TypeError("Unknown option %r" % (key,))
rv[key] = value
@@ -170,19 +142,101 @@ def _get_options(*args, **kwargs):
module_not_found_error = ImportError # type: ignore
-class _Client(object):
- """The client is internally responsible for capturing the events and
+class BaseClient:
+ """
+ .. versionadded:: 2.0.0
+
+ The basic definition of a client that is used for sending data to Sentry.
+ """
+
+ def __init__(self, options=None):
+ # type: (Optional[Dict[str, Any]]) -> None
+ self.options = (
+ options if options is not None else DEFAULT_OPTIONS
+ ) # type: Dict[str, Any]
+
+ self.transport = None # type: Optional[Transport]
+ self.monitor = None # type: Optional[Monitor]
+ self.metrics_aggregator = None # type: Optional[MetricsAggregator]
+
+ def __getstate__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ return {"options": {}}
+
+ def __setstate__(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ pass
+
+ @property
+ def dsn(self):
+ # type: () -> Optional[str]
+ return None
+
+ def should_send_default_pii(self):
+ # type: () -> bool
+ return False
+
+ def is_active(self):
+ # type: () -> bool
+ """
+ .. versionadded:: 2.0.0
+
+ Returns whether the client is active (able to send data to Sentry)
+ """
+ return False
+
+ def capture_event(self, *args, **kwargs):
+ # type: (*Any, **Any) -> Optional[str]
+ return None
+
+ def capture_session(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ return None
+
+ def get_integration(self, *args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ return None
+
+ def close(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ return None
+
+ def flush(self, *args, **kwargs):
+ # type: (*Any, **Any) -> None
+ return None
+
+ def __enter__(self):
+ # type: () -> BaseClient
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ return None
+
+
+class NonRecordingClient(BaseClient):
+ """
+ .. versionadded:: 2.0.0
+
+ A client that does not send any events to Sentry. This is used as a fallback when the Sentry SDK is not yet initialized.
+ """
+
+ pass
+
+
+class _Client(BaseClient):
+ """
+ The client is internally responsible for capturing the events and
forwarding them to sentry through the configured transport. It takes
the client options as keyword arguments and optionally the DSN as first
argument.
- Alias of :py:class:`Client`. (Was created for better intelisense support)
+ Alias of :py:class:`sentry_sdk.Client`. (Was created for better intelisense support)
"""
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
- self.options = get_options(*args, **kwargs) # type: Dict[str, Any]
-
+ super(_Client, self).__init__(options=get_options(*args, **kwargs))
self._init_impl()
def __getstate__(self):
@@ -340,6 +394,24 @@ def _capture_envelope(envelope):
# need to check if it's safe to use them.
check_uwsgi_thread_support()
+ def is_active(self):
+ # type: () -> bool
+ """
+ .. versionadded:: 2.0.0
+
+ Returns whether the client is active (able to send data to Sentry)
+ """
+ return True
+
+ def should_send_default_pii(self):
+ # type: () -> bool
+ """
+ .. versionadded:: 2.0.0
+
+ Returns whether the client should send default PII (Personally Identifiable Information) data to Sentry.
+ """
+ return self.options.get("send_default_pii", False)
+
@property
def dsn(self):
# type: () -> Optional[str]
@@ -355,7 +427,7 @@ def _prepare_event(
# type: (...) -> Optional[Event]
if event.get("timestamp") is None:
- event["timestamp"] = datetime_utcnow()
+ event["timestamp"] = datetime.now(timezone.utc)
if scope is not None:
is_transaction = event.get("type") == "transaction"
@@ -398,7 +470,7 @@ def _prepare_event(
for key in "release", "environment", "server_name", "dist":
if event.get(key) is None and self.options[key] is not None:
- event[key] = text_type(self.options[key]).strip() # type: ignore[literal-required]
+ event[key] = str(self.options[key]).strip() # type: ignore[literal-required]
if event.get("sdk") is None:
sdk_info = dict(SDK_INFO)
sdk_info["integrations"] = sorted(self.integrations.keys())
@@ -477,7 +549,7 @@ def _is_ignored_error(self, event, hint):
for ignored_error in self.options["ignore_errors"]:
# String types are matched against the type name in the
# exception only
- if isinstance(ignored_error, string_types):
+ if isinstance(ignored_error, str):
if ignored_error == error_full_name or ignored_error == error_type_name:
return True
else:
@@ -580,7 +652,8 @@ def _update_session_from_event(
if session.user_agent is None:
headers = (event.get("request") or {}).get("headers")
- for k, v in iteritems(headers or {}):
+ headers_dict = headers if isinstance(headers, dict) else {}
+ for k, v in headers_dict.items():
if k.lower() == "user-agent":
user_agent = v
break
@@ -606,7 +679,6 @@ def capture_event(
:param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
- The `scope` and `scope_kwargs` parameters are mutually exclusive.
:returns: An event ID. May be `None` if there is no DSN set or of if the SDK decided to discard the event for other reasons. In such situations setting `debug=True` on `init()` may help.
"""
@@ -645,58 +717,40 @@ def capture_event(
):
return None
- tracing_enabled = has_tracing_enabled(self.options)
attachments = hint.get("attachments")
trace_context = event_opt.get("contexts", {}).get("trace") or {}
dynamic_sampling_context = trace_context.pop("dynamic_sampling_context", {})
- # If tracing is enabled all events should go to /envelope endpoint.
- # If no tracing is enabled only transactions, events with attachments, and checkins should go to the /envelope endpoint.
- should_use_envelope_endpoint = (
- tracing_enabled
- or is_transaction
- or is_checkin
- or bool(attachments)
- or bool(self.spotlight)
- )
- if should_use_envelope_endpoint:
- headers = {
- "event_id": event_opt["event_id"],
- "sent_at": format_timestamp(datetime_utcnow()),
- } # type: dict[str, object]
-
- if dynamic_sampling_context:
- headers["trace"] = dynamic_sampling_context
-
- envelope = Envelope(headers=headers)
-
- if is_transaction:
- if isinstance(profile, Profile):
- envelope.add_profile(profile.to_json(event_opt, self.options))
- envelope.add_transaction(event_opt)
- elif is_checkin:
- envelope.add_checkin(event_opt)
- else:
- envelope.add_event(event_opt)
+ headers = {
+ "event_id": event_opt["event_id"],
+ "sent_at": format_timestamp(datetime.now(timezone.utc)),
+ } # type: dict[str, object]
- for attachment in attachments or ():
- envelope.add_item(attachment.to_envelope_item())
+ if dynamic_sampling_context:
+ headers["trace"] = dynamic_sampling_context
- if self.spotlight:
- self.spotlight.capture_envelope(envelope)
+ envelope = Envelope(headers=headers)
- if self.transport is None:
- return None
+ if is_transaction:
+ if isinstance(profile, Profile):
+ envelope.add_profile(profile.to_json(event_opt, self.options))
+ envelope.add_transaction(event_opt)
+ elif is_checkin:
+ envelope.add_checkin(event_opt)
+ else:
+ envelope.add_event(event_opt)
- self.transport.capture_envelope(envelope)
+ for attachment in attachments or ():
+ envelope.add_item(attachment.to_envelope_item())
- else:
- if self.transport is None:
- return None
+ if self.spotlight:
+ self.spotlight.capture_envelope(envelope)
+
+ if self.transport is None:
+ return None
- # All other events go to the legacy /store/ endpoint (will be removed in the future).
- self.transport.capture_event(event_opt)
+ self.transport.capture_envelope(envelope)
return event_id
diff --git a/sentry_sdk/consts.py b/sentry_sdk/consts.py
index 1cf37211e1..b72701daed 100644
--- a/sentry_sdk/consts.py
+++ b/sentry_sdk/consts.py
@@ -1,8 +1,21 @@
+from enum import Enum
from sentry_sdk._types import TYPE_CHECKING
# up top to prevent circular import due to integration import
DEFAULT_MAX_VALUE_LENGTH = 1024
+
+# Also needs to be at the top to prevent circular import
+class EndpointType(Enum):
+ """
+ The type of an endpoint. This is an enum, rather than a constant, for historical reasons
+ (the old /store endpoint). The enum also preserve future compatibility, in case we ever
+ have a new endpoint.
+ """
+
+ ENVELOPE = "envelope"
+
+
if TYPE_CHECKING:
import sentry_sdk
@@ -42,9 +55,6 @@
"attach_explain_plans": dict[str, Any],
"max_spans": Optional[int],
"record_sql_params": Optional[bool],
- # TODO: Remove these 2 profiling related experiments
- "profiles_sample_rate": Optional[float],
- "profiler_mode": Optional[ProfilerMode],
"otel_powered_performance": Optional[bool],
"transport_zlib_compression_level": Optional[int],
"transport_num_pools": Optional[int],
@@ -255,7 +265,7 @@ class OP:
# This type exists to trick mypy and PyCharm into thinking `init` and `Client`
# take these arguments (even though they take opaque **kwargs)
-class ClientConstructor(object):
+class ClientConstructor:
def __init__(
self,
dsn=None, # type: Optional[str]
@@ -335,4 +345,4 @@ def _get_default_options():
del _get_default_options
-VERSION = "1.45.0"
+VERSION = "2.0.1"
diff --git a/sentry_sdk/crons/_decorator.py b/sentry_sdk/crons/_decorator.py
deleted file mode 100644
index 2d0612f681..0000000000
--- a/sentry_sdk/crons/_decorator.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from functools import wraps
-from inspect import iscoroutinefunction
-
-from sentry_sdk._types import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from collections.abc import Awaitable, Callable
- from typing import Any, cast, overload, ParamSpec, TypeVar, Union
-
- P = ParamSpec("P")
- R = TypeVar("R")
-
-
-class MonitorMixin:
- if TYPE_CHECKING:
-
- @overload
- def __call__(self, fn):
- # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
- # Unfortunately, mypy does not give us any reliable way to type check the
- # return value of an Awaitable (i.e. async function) for this overload,
- # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]].
- ...
-
- @overload
- def __call__(self, fn):
- # type: (Callable[P, R]) -> Callable[P, R]
- ...
-
- def __call__(
- self,
- fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]]
- ):
- # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]]
- if iscoroutinefunction(fn):
- return self._async_wrapper(fn)
-
- else:
- if TYPE_CHECKING:
- fn = cast("Callable[P, R]", fn)
- return self._sync_wrapper(fn)
-
- def _async_wrapper(self, fn):
- # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
- @wraps(fn)
- async def inner(*args: "P.args", **kwargs: "P.kwargs"):
- # type: (...) -> R
- with self: # type: ignore[attr-defined]
- return await fn(*args, **kwargs)
-
- return inner
-
- def _sync_wrapper(self, fn):
- # type: (Callable[P, R]) -> Callable[P, R]
- @wraps(fn)
- def inner(*args: "P.args", **kwargs: "P.kwargs"):
- # type: (...) -> R
- with self: # type: ignore[attr-defined]
- return fn(*args, **kwargs)
-
- return inner
diff --git a/sentry_sdk/crons/_decorator_py2.py b/sentry_sdk/crons/_decorator_py2.py
deleted file mode 100644
index 9e1da797e2..0000000000
--- a/sentry_sdk/crons/_decorator_py2.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from functools import wraps
-
-from sentry_sdk._types import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from typing import Any, Callable, ParamSpec, TypeVar
-
- P = ParamSpec("P")
- R = TypeVar("R")
-
-
-class MonitorMixin:
- def __call__(self, fn):
- # type: (Callable[P, R]) -> Callable[P, R]
- @wraps(fn)
- def inner(*args, **kwargs):
- # type: (Any, Any) -> Any
- with self: # type: ignore[attr-defined]
- return fn(*args, **kwargs)
-
- return inner
diff --git a/sentry_sdk/crons/api.py b/sentry_sdk/crons/api.py
index 1a95583301..7f27df9b3a 100644
--- a/sentry_sdk/crons/api.py
+++ b/sentry_sdk/crons/api.py
@@ -1,6 +1,6 @@
import uuid
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
@@ -17,7 +17,7 @@ def _create_check_in_event(
monitor_config=None, # type: Optional[MonitorConfig]
):
# type: (...) -> Event
- options = Hub.current.client.options if Hub.current.client else {}
+ options = sentry_sdk.get_client().options
check_in_id = check_in_id or uuid.uuid4().hex # type: str
check_in = {
@@ -52,7 +52,6 @@ def capture_checkin(
monitor_config=monitor_config,
)
- hub = Hub.current
- hub.capture_event(check_in_event)
+ sentry_sdk.capture_event(check_in_event)
return check_in_event["check_in_id"]
diff --git a/sentry_sdk/crons/decorator.py b/sentry_sdk/crons/decorator.py
index 6c5f747b97..885d42e0e1 100644
--- a/sentry_sdk/crons/decorator.py
+++ b/sentry_sdk/crons/decorator.py
@@ -1,25 +1,31 @@
-from sentry_sdk._compat import PY2
+from functools import wraps
+from inspect import iscoroutinefunction
+
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.crons import capture_checkin
from sentry_sdk.crons.consts import MonitorStatus
from sentry_sdk.utils import now
if TYPE_CHECKING:
- from typing import Optional, Type
+ from collections.abc import Awaitable, Callable
from types import TracebackType
+ from typing import (
+ Any,
+ Optional,
+ ParamSpec,
+ Type,
+ TypeVar,
+ Union,
+ cast,
+ overload,
+ )
from sentry_sdk._types import MonitorConfig
-if PY2:
- from sentry_sdk.crons._decorator_py2 import MonitorMixin
-else:
- # This is in its own module so that we don't make Python 2
- # angery over `async def`s.
- # Once we drop Python 2, remove the mixin and merge it
- # into the main monitor class.
- from sentry_sdk.crons._decorator import MonitorMixin
+ P = ParamSpec("P")
+ R = TypeVar("R")
-class monitor(MonitorMixin): # noqa: N801
+class monitor: # noqa: N801
"""
Decorator/context manager to capture checkin events for a monitor.
@@ -78,3 +84,51 @@ def __exit__(self, exc_type, exc_value, traceback):
duration=duration_s,
monitor_config=self.monitor_config,
)
+
+ if TYPE_CHECKING:
+
+ @overload
+ def __call__(self, fn):
+ # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
+ # Unfortunately, mypy does not give us any reliable way to type check the
+ # return value of an Awaitable (i.e. async function) for this overload,
+ # since calling iscouroutinefunction narrows the type to Callable[P, Awaitable[Any]].
+ ...
+
+ @overload
+ def __call__(self, fn):
+ # type: (Callable[P, R]) -> Callable[P, R]
+ ...
+
+ def __call__(
+ self,
+ fn, # type: Union[Callable[P, R], Callable[P, Awaitable[Any]]]
+ ):
+ # type: (...) -> Union[Callable[P, R], Callable[P, Awaitable[Any]]]
+ if iscoroutinefunction(fn):
+ return self._async_wrapper(fn)
+
+ else:
+ if TYPE_CHECKING:
+ fn = cast("Callable[P, R]", fn)
+ return self._sync_wrapper(fn)
+
+ def _async_wrapper(self, fn):
+ # type: (Callable[P, Awaitable[Any]]) -> Callable[P, Awaitable[Any]]
+ @wraps(fn)
+ async def inner(*args: "P.args", **kwargs: "P.kwargs"):
+ # type: (...) -> R
+ with self:
+ return await fn(*args, **kwargs)
+
+ return inner
+
+ def _sync_wrapper(self, fn):
+ # type: (Callable[P, R]) -> Callable[P, R]
+ @wraps(fn)
+ def inner(*args: "P.args", **kwargs: "P.kwargs"):
+ # type: (...) -> R
+ with self:
+ return fn(*args, **kwargs)
+
+ return inner
diff --git a/sentry_sdk/db/explain_plan/__init__.py b/sentry_sdk/db/explain_plan/__init__.py
index 2699b6f49e..39b0e7ba8f 100644
--- a/sentry_sdk/db/explain_plan/__init__.py
+++ b/sentry_sdk/db/explain_plan/__init__.py
@@ -1,6 +1,5 @@
-import datetime
+from datetime import datetime, timedelta, timezone
-from sentry_sdk._compat import datetime_utcnow
from sentry_sdk.consts import TYPE_CHECKING
if TYPE_CHECKING:
@@ -16,11 +15,11 @@ def cache_statement(statement, options):
# type: (str, dict[str, Any]) -> None
global EXPLAIN_CACHE
- now = datetime_utcnow()
+ now = datetime.now(timezone.utc)
explain_cache_timeout_seconds = options.get(
"explain_cache_timeout_seconds", EXPLAIN_CACHE_TIMEOUT_SECONDS
)
- expiration_time = now + datetime.timedelta(seconds=explain_cache_timeout_seconds)
+ expiration_time = now + timedelta(seconds=explain_cache_timeout_seconds)
EXPLAIN_CACHE[hash(statement)] = expiration_time
@@ -32,7 +31,7 @@ def remove_expired_cache_items():
"""
global EXPLAIN_CACHE
- now = datetime_utcnow()
+ now = datetime.now(timezone.utc)
for key, expiration_time in EXPLAIN_CACHE.items():
expiration_in_the_past = expiration_time < now
diff --git a/sentry_sdk/db/explain_plan/sqlalchemy.py b/sentry_sdk/db/explain_plan/sqlalchemy.py
index fac0729f70..1ca451e808 100644
--- a/sentry_sdk/db/explain_plan/sqlalchemy.py
+++ b/sentry_sdk/db/explain_plan/sqlalchemy.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
from sentry_sdk.consts import TYPE_CHECKING
from sentry_sdk.db.explain_plan import cache_statement, should_run_explain_plan
from sentry_sdk.integrations import DidNotEnable
diff --git a/sentry_sdk/debug.py b/sentry_sdk/debug.py
index fe8ae50cea..c99f85558d 100644
--- a/sentry_sdk/debug.py
+++ b/sentry_sdk/debug.py
@@ -2,9 +2,10 @@
import logging
from sentry_sdk import utils
+from sentry_sdk.client import _client_init_debug
from sentry_sdk.hub import Hub
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import logger
-from sentry_sdk.client import _client_init_debug
from logging import LogRecord
@@ -13,10 +14,8 @@ def filter(self, record):
# type: (LogRecord) -> bool
if _client_init_debug.get(False):
return True
- hub = Hub.current
- if hub is not None and hub.client is not None:
- return hub.client.options["debug"]
- return False
+
+ return Scope.get_client().options["debug"]
def init_debug_support():
diff --git a/sentry_sdk/envelope.py b/sentry_sdk/envelope.py
index fb214a45f4..33d050d156 100644
--- a/sentry_sdk/envelope.py
+++ b/sentry_sdk/envelope.py
@@ -2,7 +2,6 @@
import json
import mimetypes
-from sentry_sdk._compat import text_type, PY2
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.session import Session
from sentry_sdk.utils import json_dumps, capture_internal_exceptions
@@ -19,14 +18,20 @@
def parse_json(data):
- # type: (Union[bytes, text_type]) -> Any
+ # type: (Union[bytes, str]) -> Any
# on some python 3 versions this needs to be bytes
- if not PY2 and isinstance(data, bytes):
+ if isinstance(data, bytes):
data = data.decode("utf-8", "replace")
return json.loads(data)
-class Envelope(object):
+class Envelope:
+ """
+ Represents a Sentry Envelope. The calling code is responsible for adhering to the constraints
+ documented in the Sentry docs: https://develop.sentry.dev/sdk/envelopes/#data-model. In particular,
+ each envelope may have at most one Item with type "event" or "transaction" (but not both).
+ """
+
def __init__(
self,
headers=None, # type: Optional[Dict[str, Any]]
@@ -155,11 +160,11 @@ def __repr__(self):
return "" % (self.headers, self.items)
-class PayloadRef(object):
+class PayloadRef:
def __init__(
self,
bytes=None, # type: Optional[bytes]
- path=None, # type: Optional[Union[bytes, text_type]]
+ path=None, # type: Optional[Union[bytes, str]]
json=None, # type: Optional[Any]
):
# type: (...) -> None
@@ -199,10 +204,10 @@ def __repr__(self):
return "" % (self.inferred_content_type,)
-class Item(object):
+class Item:
def __init__(
self,
- payload, # type: Union[bytes, text_type, PayloadRef]
+ payload, # type: Union[bytes, str, PayloadRef]
headers=None, # type: Optional[Dict[str, Any]]
type=None, # type: Optional[str]
content_type=None, # type: Optional[str]
@@ -215,7 +220,7 @@ def __init__(
self.headers = headers
if isinstance(payload, bytes):
payload = PayloadRef(bytes=payload)
- elif isinstance(payload, text_type):
+ elif isinstance(payload, str):
payload = PayloadRef(bytes=payload.encode("utf-8"))
else:
payload = payload
diff --git a/sentry_sdk/hub.py b/sentry_sdk/hub.py
index a716d33433..f5a87113c2 100644
--- a/sentry_sdk/hub.py
+++ b/sentry_sdk/hub.py
@@ -1,10 +1,8 @@
-import copy
-import sys
from contextlib import contextmanager
from sentry_sdk._compat import with_metaclass
from sentry_sdk.consts import INSTRUMENTER
-from sentry_sdk.scope import Scope
+from sentry_sdk.scope import Scope, _ScopeManager
from sentry_sdk.client import Client
from sentry_sdk.tracing import (
NoOpSpan,
@@ -33,6 +31,9 @@
from typing import TypeVar
from typing import Union
+ from typing_extensions import Unpack
+
+ from sentry_sdk.client import BaseClient
from sentry_sdk.integrations import Integration
from sentry_sdk._types import (
Event,
@@ -41,8 +42,10 @@
BreadcrumbHint,
ExcInfo,
LogLevelStr,
+ SamplingContext,
)
from sentry_sdk.consts import ClientConstructor
+ from sentry_sdk.tracing import TransactionKwargs
T = TypeVar("T")
@@ -58,13 +61,15 @@ def overload(x):
def _should_send_default_pii():
# type: () -> bool
+ # TODO: Migrate existing code to `scope.should_send_default_pii()` and remove this function.
+ # New code should not use this function!
client = Hub.current.client
if not client:
return False
- return client.options["send_default_pii"]
+ return client.should_send_default_pii()
-class _InitGuard(object):
+class _InitGuard:
def __init__(self, client):
# type: (Client) -> None
self._client = client
@@ -82,16 +87,10 @@ def __exit__(self, exc_type, exc_value, tb):
def _check_python_deprecations():
# type: () -> None
- version = sys.version_info[:2]
-
- if version == (3, 4) or version == (3, 5):
- logger.warning(
- "sentry-sdk 2.0.0 will drop support for Python %s.",
- "{}.{}".format(*version),
- )
- logger.warning(
- "Please upgrade to the latest version to continue receiving upgrades and bugfixes."
- )
+ # Since we're likely to deprecate Python versions in the future, I'm keeping
+ # this handy function around. Use this to detect the Python version used and
+ # to output logger.warning()s if it's deprecated.
+ pass
def _init(*args, **kwargs):
@@ -145,56 +144,12 @@ def main(cls):
return GLOBAL_HUB
-class _ScopeManager(object):
- def __init__(self, hub):
- # type: (Hub) -> None
- self._hub = hub
- self._original_len = len(hub._stack)
- self._layer = hub._stack[-1]
-
- def __enter__(self):
- # type: () -> Scope
- scope = self._layer[1]
- assert scope is not None
- return scope
-
- def __exit__(self, exc_type, exc_value, tb):
- # type: (Any, Any, Any) -> None
- current_len = len(self._hub._stack)
- if current_len < self._original_len:
- logger.error(
- "Scope popped too soon. Popped %s scopes too many.",
- self._original_len - current_len,
- )
- return
- elif current_len > self._original_len:
- logger.warning(
- "Leaked %s scopes: %s",
- current_len - self._original_len,
- self._hub._stack[self._original_len :],
- )
-
- layer = self._hub._stack[self._original_len - 1]
- del self._hub._stack[self._original_len - 1 :]
-
- if layer[1] != self._layer[1]:
- logger.error(
- "Wrong scope found. Meant to pop %s, but popped %s.",
- layer[1],
- self._layer[1],
- )
- elif layer[0] != self._layer[0]:
- warning = (
- "init() called inside of pushed scope. This might be entirely "
- "legitimate but usually occurs when initializing the SDK inside "
- "a request handler or task/job function. Try to initialize the "
- "SDK as early as possible instead."
- )
- logger.warning(warning)
-
-
class Hub(with_metaclass(HubMeta)): # type: ignore
- """The hub wraps the concurrency management of the SDK. Each thread has
+ """
+ .. deprecated:: 2.0.0
+ The Hub is deprecated. Its functionality will be merged into :py:class:`sentry_sdk.scope.Scope`.
+
+ The hub wraps the concurrency management of the SDK. Each thread has
its own hub but the hub might transfer with the flow of execution if
context vars are available.
@@ -202,6 +157,7 @@ class Hub(with_metaclass(HubMeta)): # type: ignore
"""
_stack = None # type: List[Tuple[Optional[Client], Scope]]
+ _scope = None # type: Optional[Scope]
# Mypy doesn't pick up on the metaclass.
@@ -215,24 +171,50 @@ def __init__(
scope=None, # type: Optional[Any]
):
# type: (...) -> None
+
+ current_scope = None
+
if isinstance(client_or_hub, Hub):
- hub = client_or_hub
- client, other_scope = hub._stack[-1]
+ client = Scope.get_client()
if scope is None:
- scope = copy.copy(other_scope)
+ # hub cloning is going on, we use a fork of the current/isolation scope for context manager
+ scope = Scope.get_isolation_scope().fork()
+ current_scope = Scope.get_current_scope().fork()
else:
- client = client_or_hub
- if scope is None:
- scope = Scope()
+ client = client_or_hub # type: ignore
+ Scope.get_global_scope().set_client(client)
+
+ if scope is None: # so there is no Hub cloning going on
+ # just the current isolation scope is used for context manager
+ scope = Scope.get_isolation_scope()
+ current_scope = Scope.get_current_scope()
- self._stack = [(client, scope)]
+ if current_scope is None:
+ # just the current current scope is used for context manager
+ current_scope = Scope.get_current_scope()
+
+ self._stack = [(client, scope)] # type: ignore
self._last_event_id = None # type: Optional[str]
self._old_hubs = [] # type: List[Hub]
+ self._old_current_scopes = [] # type: List[Scope]
+ self._old_isolation_scopes = [] # type: List[Scope]
+ self._current_scope = current_scope # type: Scope
+ self._scope = scope # type: Scope
+
def __enter__(self):
# type: () -> Hub
self._old_hubs.append(Hub.current)
_local.set(self)
+
+ current_scope = Scope.get_current_scope()
+ self._old_current_scopes.append(current_scope)
+ scope._current_scope.set(self._current_scope)
+
+ isolation_scope = Scope.get_isolation_scope()
+ self._old_isolation_scopes.append(isolation_scope)
+ scope._isolation_scope.set(self._scope)
+
return self
def __exit__(
@@ -245,11 +227,21 @@ def __exit__(
old = self._old_hubs.pop()
_local.set(old)
+ old_current_scope = self._old_current_scopes.pop()
+ scope._current_scope.set(old_current_scope)
+
+ old_isolation_scope = self._old_isolation_scopes.pop()
+ scope._isolation_scope.set(old_isolation_scope)
+
def run(
self, callback # type: Callable[[], T]
):
# type: (...) -> T
- """Runs a callback in the context of the hub. Alternatively the
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+
+ Runs a callback in the context of the hub. Alternatively the
with statement can be used on the hub directly.
"""
with self:
@@ -259,28 +251,46 @@ def get_integration(
self, name_or_class # type: Union[str, Type[Integration]]
):
# type: (...) -> Any
- """Returns the integration for this hub by name or class. If there
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.client._Client.get_integration` instead.
+
+ Returns the integration for this hub by name or class. If there
is no client bound or the client does not have that integration
then `None` is returned.
If the return value is not `None` the hub is guaranteed to have a
client attached.
"""
- client = self.client
- if client is not None:
- return client.get_integration(name_or_class)
+ return Scope.get_client().get_integration(name_or_class)
@property
def client(self):
- # type: () -> Optional[Client]
- """Returns the current client on the hub."""
- return self._stack[-1][0]
+ # type: () -> Optional[BaseClient]
+ """
+ .. deprecated:: 2.0.0
+ This property is deprecated and will be removed in a future release.
+ Please use :py:func:`sentry_sdk.api.get_client` instead.
+
+ Returns the current client on the hub.
+ """
+ client = Scope.get_client()
+
+ if not client.is_active():
+ return None
+
+ return client
@property
def scope(self):
# type: () -> Scope
- """Returns the current scope on the hub."""
- return self._stack[-1][1]
+ """
+ .. deprecated:: 2.0.0
+ This property is deprecated and will be removed in a future release.
+ Returns the current scope on the hub.
+ """
+ return Scope.get_isolation_scope()
def last_event_id(self):
# type: () -> Optional[str]
@@ -296,16 +306,25 @@ def last_event_id(self):
return self._last_event_id
def bind_client(
- self, new # type: Optional[Client]
+ self, new # type: Optional[BaseClient]
):
# type: (...) -> None
- """Binds a new client to the hub."""
- top = self._stack[-1]
- self._stack[-1] = (new, top[1])
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.set_client` instead.
+
+ Binds a new client to the hub.
+ """
+ Scope.get_global_scope().set_client(new)
def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
# type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.capture_event` instead.
+
Captures an event.
Alias of :py:meth:`sentry_sdk.Scope.capture_event`.
@@ -321,12 +340,8 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
"""
- client, top_scope = self._stack[-1]
- if client is None:
- return None
-
- last_event_id = top_scope.capture_event(
- event, hint, client=client, scope=scope, **scope_kwargs
+ last_event_id = Scope.get_current_scope().capture_event(
+ event, hint, scope=scope, **scope_kwargs
)
is_transaction = event.get("type") == "transaction"
@@ -338,6 +353,10 @@ def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
def capture_message(self, message, level=None, scope=None, **scope_kwargs):
# type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.capture_message` instead.
+
Captures a message.
Alias of :py:meth:`sentry_sdk.Scope.capture_message`.
@@ -353,14 +372,10 @@ def capture_message(self, message, level=None, scope=None, **scope_kwargs):
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
- :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
"""
- client, top_scope = self._stack[-1]
- if client is None:
- return None
-
- last_event_id = top_scope.capture_message(
- message, level=level, client=client, scope=scope, **scope_kwargs
+ last_event_id = Scope.get_current_scope().capture_message(
+ message, level=level, scope=scope, **scope_kwargs
)
if last_event_id is not None:
@@ -370,7 +385,12 @@ def capture_message(self, message, level=None, scope=None, **scope_kwargs):
def capture_exception(self, error=None, scope=None, **scope_kwargs):
# type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
- """Captures an exception.
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.capture_exception` instead.
+
+ Captures an exception.
Alias of :py:meth:`sentry_sdk.Scope.capture_exception`.
@@ -383,14 +403,10 @@ def capture_exception(self, error=None, scope=None, **scope_kwargs):
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
- :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
"""
- client, top_scope = self._stack[-1]
- if client is None:
- return None
-
- last_event_id = top_scope.capture_exception(
- error, client=client, scope=scope, **scope_kwargs
+ last_event_id = Scope.get_current_scope().capture_exception(
+ error, scope=scope, **scope_kwargs
)
if last_event_id is not None:
@@ -403,10 +419,14 @@ def _capture_internal_exception(
):
# type: (...) -> Any
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.client._Client._capture_internal_exception` instead.
+
Capture an exception that is likely caused by a bug in the SDK
itself.
- Duplicated in :py:meth:`sentry_sdk.Client._capture_internal_exception`.
+ Duplicated in :py:meth:`sentry_sdk.client._Client._capture_internal_exception`.
These exceptions do not end up in Sentry and are just logged instead.
"""
@@ -415,6 +435,10 @@ def _capture_internal_exception(
def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
# type: (Optional[Breadcrumb], Optional[BreadcrumbHint], Any) -> None
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.add_breadcrumb` instead.
+
Adds a breadcrumb.
:param crumb: Dictionary with the data as the sentry v7/v8 protocol expects.
@@ -422,18 +446,15 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
:param hint: An optional value that can be used by `before_breadcrumb`
to customize the breadcrumbs that are emitted.
"""
- client, scope = self._stack[-1]
- if client is None:
- logger.info("Dropped breadcrumb because no client bound")
- return
-
- kwargs["client"] = client
-
- scope.add_breadcrumb(crumb, hint, **kwargs)
+ Scope.get_isolation_scope().add_breadcrumb(crumb, hint, **kwargs)
- def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
- # type: (Optional[Span], str, Any) -> Span
+ def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+ # type: (str, Any) -> Span
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.start_span` instead.
+
Start a span whose parent is the currently active span or transaction, if any.
The return value is a :py:class:`sentry_sdk.tracing.Span` instance,
@@ -448,18 +469,22 @@ def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
"""
- client, scope = self._stack[-1]
-
- kwargs["hub"] = self
- kwargs["client"] = client
-
- return scope.start_span(span=span, instrumenter=instrumenter, **kwargs)
+ scope = Scope.get_current_scope()
+ return scope.start_span(instrumenter=instrumenter, **kwargs)
def start_transaction(
- self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs
+ self,
+ transaction=None,
+ instrumenter=INSTRUMENTER.SENTRY,
+ custom_sampling_context=None,
+ **kwargs
):
- # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan]
+ # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.start_transaction` instead.
+
Start and return a transaction.
Start an existing transaction if given, otherwise create and start a new
@@ -483,23 +508,27 @@ def start_transaction(
For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
"""
- client, scope = self._stack[-1]
+ scope = Scope.get_current_scope()
- kwargs["hub"] = self
- kwargs["client"] = client
+ # For backwards compatibility, we allow passing the scope as the hub.
+ # We need a major release to make this nice. (if someone searches the code: deprecated)
+ # Type checking disabled for this line because deprecated keys are not allowed in the type signature.
+ kwargs["hub"] = scope # type: ignore
return scope.start_transaction(
- transaction=transaction, instrumenter=instrumenter, **kwargs
+ transaction, instrumenter, custom_sampling_context, **kwargs
)
def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
# type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.continue_trace` instead.
+
Sets the propagation context from environment or headers and returns a transaction.
"""
- scope = self._stack[-1][1]
-
- return scope.continue_trace(
+ return Scope.get_isolation_scope().continue_trace(
environ_or_headers=environ_or_headers, op=op, name=name, source=source
)
@@ -524,6 +553,9 @@ def push_scope( # noqa
):
# type: (...) -> Optional[ContextManager[Scope]]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+
Pushes a new layer on the scope stack.
:param callback: If provided, this method pushes a scope, calls
@@ -537,21 +569,14 @@ def push_scope( # noqa
callback(scope)
return None
- client, scope = self._stack[-1]
-
- new_scope = copy.copy(scope)
-
- if continue_trace:
- new_scope.generate_propagation_context()
-
- new_layer = (client, new_scope)
- self._stack.append(new_layer)
-
return _ScopeManager(self)
def pop_scope_unsafe(self):
# type: () -> Tuple[Optional[Client], Scope]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+
Pops a scope layer from the stack.
Try to use the context manager :py:meth:`push_scope` instead.
@@ -580,33 +605,31 @@ def configure_scope( # noqa
continue_trace=True, # type: bool
):
# type: (...) -> Optional[ContextManager[Scope]]
-
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+
Reconfigures the scope.
:param callback: If provided, call the callback with the current scope.
:returns: If no callback is provided, returns a context manager that returns the scope.
"""
-
- client, scope = self._stack[-1]
+ scope = Scope.get_isolation_scope()
if continue_trace:
scope.generate_propagation_context()
if callback is not None:
- if client is not None:
- callback(scope)
+ # TODO: used to return None when client is None. Check if this changes behavior.
+ callback(scope)
return None
@contextmanager
def inner():
# type: () -> Generator[Scope, None, None]
- if client is not None:
- yield scope
- else:
- yield Scope()
+ yield scope
return inner()
@@ -614,37 +637,54 @@ def start_session(
self, session_mode="application" # type: str
):
# type: (...) -> None
- """Starts a new session."""
- client, scope = self._stack[-1]
- scope.start_session(
- client=client,
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.start_session` instead.
+
+ Starts a new session.
+ """
+ Scope.get_isolation_scope().start_session(
session_mode=session_mode,
)
def end_session(self):
# type: (...) -> None
- """Ends the current session if there is one."""
- client, scope = self._stack[-1]
- scope.end_session(client=client)
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.end_session` instead.
+
+ Ends the current session if there is one.
+ """
+ Scope.get_isolation_scope().end_session()
def stop_auto_session_tracking(self):
# type: (...) -> None
- """Stops automatic session tracking.
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.stop_auto_session_tracking` instead.
+
+ Stops automatic session tracking.
This temporarily session tracking for the current scope when called.
To resume session tracking call `resume_auto_session_tracking`.
"""
- client, scope = self._stack[-1]
- scope.stop_auto_session_tracking(client=client)
+ Scope.get_isolation_scope().stop_auto_session_tracking()
def resume_auto_session_tracking(self):
# type: (...) -> None
- """Resumes automatic session tracking for the current scope if
+ """
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.resume_auto_session_tracking` instead.
+
+ Resumes automatic session tracking for the current scope if
disabled earlier. This requires that generally automatic session
tracking is enabled.
"""
- scope = self._stack[-1][1]
- scope.resume_auto_session_tracking()
+ Scope.get_isolation_scope().resume_auto_session_tracking()
def flush(
self,
@@ -653,27 +693,47 @@ def flush(
):
# type: (...) -> None
"""
- Alias for :py:meth:`sentry_sdk.Client.flush`
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.client._Client.flush` instead.
+
+ Alias for :py:meth:`sentry_sdk.client._Client.flush`
"""
- client, scope = self._stack[-1]
- if client is not None:
- return client.flush(timeout=timeout, callback=callback)
+ return Scope.get_client().flush(timeout=timeout, callback=callback)
def get_traceparent(self):
# type: () -> Optional[str]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.get_traceparent` instead.
+
Returns the traceparent either from the active span or from the scope.
"""
- client, scope = self._stack[-1]
- return scope.get_traceparent(client=client)
+ current_scope = Scope.get_current_scope()
+ traceparent = current_scope.get_traceparent()
+
+ if traceparent is None:
+ isolation_scope = Scope.get_isolation_scope()
+ traceparent = isolation_scope.get_traceparent()
+
+ return traceparent
def get_baggage(self):
# type: () -> Optional[str]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.get_baggage` instead.
+
Returns Baggage either from the active span or from the scope.
"""
- client, scope = self._stack[-1]
- baggage = scope.get_baggage(client=client)
+ current_scope = Scope.get_current_scope()
+ baggage = current_scope.get_baggage()
+
+ if baggage is None:
+ isolation_scope = Scope.get_isolation_scope()
+ baggage = isolation_scope.get_baggage()
if baggage is not None:
return baggage.serialize()
@@ -683,17 +743,25 @@ def get_baggage(self):
def iter_trace_propagation_headers(self, span=None):
# type: (Optional[Span]) -> Generator[Tuple[str, str], None, None]
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.iter_trace_propagation_headers` instead.
+
Return HTTP headers which allow propagation of trace data. Data taken
from the span representing the request, if available, or the current
span on the scope if not.
"""
- client, scope = self._stack[-1]
-
- return scope.iter_trace_propagation_headers(span=span, client=client)
+ return Scope.get_current_scope().iter_trace_propagation_headers(
+ span=span,
+ )
def trace_propagation_meta(self, span=None):
# type: (Optional[Span]) -> str
"""
+ .. deprecated:: 2.0.0
+ This function is deprecated and will be removed in a future release.
+ Please use :py:meth:`sentry_sdk.Scope.trace_propagation_meta` instead.
+
Return meta tags which should be injected into HTML templates
to allow propagation of trace information.
"""
@@ -702,9 +770,14 @@ def trace_propagation_meta(self, span=None):
"The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
)
- client, scope = self._stack[-1]
- return scope.trace_propagation_meta(span=span, client=client)
+ return Scope.get_current_scope().trace_propagation_meta(
+ span=span,
+ )
GLOBAL_HUB = Hub()
_local.set(GLOBAL_HUB)
+
+
+# Circular imports
+from sentry_sdk import scope
diff --git a/sentry_sdk/integrations/__init__.py b/sentry_sdk/integrations/__init__.py
index c9737ae589..b0ec5e2d3e 100644
--- a/sentry_sdk/integrations/__init__.py
+++ b/sentry_sdk/integrations/__init__.py
@@ -1,7 +1,6 @@
-from __future__ import absolute_import
+from abc import ABC, abstractmethod
from threading import Lock
-from sentry_sdk._compat import iteritems
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.utils import logger
@@ -70,21 +69,34 @@ def iter_default_integrations(with_auto_enabling_integrations):
_AUTO_ENABLING_INTEGRATIONS = [
"sentry_sdk.integrations.aiohttp.AioHttpIntegration",
+ "sentry_sdk.integrations.ariadne.AriadneIntegration",
+ "sentry_sdk.integrations.arq.ArqIntegration",
+ "sentry_sdk.integrations.asyncpg.AsyncPGIntegration",
"sentry_sdk.integrations.boto3.Boto3Integration",
"sentry_sdk.integrations.bottle.BottleIntegration",
"sentry_sdk.integrations.celery.CeleryIntegration",
+ "sentry_sdk.integrations.chalice.ChaliceIntegration",
+ "sentry_sdk.integrations.clickhouse_driver.ClickhouseDriverIntegration",
"sentry_sdk.integrations.django.DjangoIntegration",
"sentry_sdk.integrations.falcon.FalconIntegration",
"sentry_sdk.integrations.fastapi.FastApiIntegration",
"sentry_sdk.integrations.flask.FlaskIntegration",
+ "sentry_sdk.integrations.gql.GQLIntegration",
+ "sentry_sdk.integrations.graphene.GrapheneIntegration",
"sentry_sdk.integrations.httpx.HttpxIntegration",
+ "sentry_sdk.integrations.huey.HueyIntegration",
+ "sentry_sdk.integrations.loguru.LoguruIntegration",
"sentry_sdk.integrations.openai.OpenAIIntegration",
+ "sentry_sdk.integrations.pymongo.PyMongoIntegration",
"sentry_sdk.integrations.pyramid.PyramidIntegration",
+ "sentry_sdk.integrations.quart.QuartIntegration",
"sentry_sdk.integrations.redis.RedisIntegration",
"sentry_sdk.integrations.rq.RqIntegration",
"sentry_sdk.integrations.sanic.SanicIntegration",
"sentry_sdk.integrations.sqlalchemy.SqlalchemyIntegration",
"sentry_sdk.integrations.starlette.StarletteIntegration",
+ "sentry_sdk.integrations.starlite.StarliteIntegration",
+ "sentry_sdk.integrations.strawberry.StrawberryIntegration",
"sentry_sdk.integrations.tornado.TornadoIntegration",
]
@@ -125,7 +137,7 @@ def setup_integrations(
integrations[instance.identifier] = instance
used_as_default_integration.add(instance.identifier)
- for identifier, integration in iteritems(integrations):
+ for identifier, integration in integrations.items():
with _installer_lock:
if identifier not in _processed_integrations:
logger.debug(
@@ -133,16 +145,6 @@ def setup_integrations(
)
try:
type(integration).setup_once()
- except NotImplementedError:
- if getattr(integration, "install", None) is not None:
- logger.warning(
- "Integration %s: The install method is "
- "deprecated. Use `setup_once`.",
- identifier,
- )
- integration.install()
- else:
- raise
except DidNotEnable as e:
if identifier not in used_as_default_integration:
raise
@@ -157,7 +159,7 @@ def setup_integrations(
integrations = {
identifier: integration
- for identifier, integration in iteritems(integrations)
+ for identifier, integration in integrations.items()
if identifier in _installed_integrations
}
@@ -177,7 +179,7 @@ class DidNotEnable(Exception): # noqa: N818
"""
-class Integration(object):
+class Integration(ABC):
"""Baseclass for all integrations.
To accept options for an integration, implement your own constructor that
@@ -191,6 +193,7 @@ class Integration(object):
"""String unique ID of integration type"""
@staticmethod
+ @abstractmethod
def setup_once():
# type: () -> None
"""
@@ -203,4 +206,4 @@ def setup_once():
Inside those hooks `Integration.current` can be used to access the
instance again.
"""
- raise NotImplementedError()
+ pass
diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py
index 41946cc7c2..17a88523e5 100644
--- a/sentry_sdk/integrations/_asgi_common.py
+++ b/sentry_sdk/integrations/_asgi_common.py
@@ -8,8 +8,11 @@
from typing import Any
from typing import Dict
from typing import Optional
+ from typing import Union
from typing_extensions import Literal
+ from sentry_sdk.utils import AnnotatedValue
+
def _get_headers(asgi_scope):
# type: (Any) -> Dict[str, str]
@@ -29,7 +32,7 @@ def _get_headers(asgi_scope):
def _get_url(asgi_scope, default_scheme, host):
- # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
+ # type: (Dict[str, Any], Literal["ws", "http"], Optional[Union[AnnotatedValue, str]]) -> str
"""
Extract URL from the ASGI scope, without also including the querystring.
"""
diff --git a/sentry_sdk/integrations/_wsgi_common.py b/sentry_sdk/integrations/_wsgi_common.py
index b72ebde126..6e6705a7d3 100644
--- a/sentry_sdk/integrations/_wsgi_common.py
+++ b/sentry_sdk/integrations/_wsgi_common.py
@@ -1,12 +1,9 @@
-from __future__ import absolute_import
-
import json
from copy import deepcopy
-from sentry_sdk.hub import Hub, _should_send_default_pii
+import sentry_sdk
+from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.utils import AnnotatedValue
-from sentry_sdk._compat import text_type, iteritems
-
from sentry_sdk._types import TYPE_CHECKING
try:
@@ -16,10 +13,9 @@
if TYPE_CHECKING:
- import sentry_sdk
-
from typing import Any
from typing import Dict
+ from typing import Mapping
from typing import Optional
from typing import Union
from sentry_sdk._types import Event
@@ -42,7 +38,7 @@
def request_body_within_bounds(client, content_length):
- # type: (Optional[sentry_sdk.Client], int) -> bool
+ # type: (Optional[sentry_sdk.client.BaseClient], int) -> bool
if client is None:
return False
@@ -54,15 +50,24 @@ def request_body_within_bounds(client, content_length):
)
-class RequestExtractor(object):
+class RequestExtractor:
+ """
+ Base class for request extraction.
+ """
+
+ # It does not make sense to make this class an ABC because it is not used
+ # for typing, only so that child classes can inherit common methods from
+ # it. Only some child classes implement all methods that raise
+ # NotImplementedError in this class.
+
def __init__(self, request):
# type: (Any) -> None
self.request = request
def extract_into_event(self, event):
# type: (Event) -> None
- client = Hub.current.client
- if client is None:
+ client = sentry_sdk.get_client()
+ if not client.is_active():
return
data = None # type: Optional[Union[AnnotatedValue, Dict[str, Any]]]
@@ -70,7 +75,7 @@ def extract_into_event(self, event):
content_length = self.content_length()
request_info = event.get("request", {})
- if _should_send_default_pii():
+ if should_send_default_pii():
request_info["cookies"] = dict(self.cookies())
if not request_body_within_bounds(client, content_length):
@@ -125,9 +130,12 @@ def parsed_body(self):
form = self.form()
files = self.files()
if form or files:
- data = dict(iteritems(form))
- for key, _ in iteritems(files):
- data[key] = AnnotatedValue.removed_because_raw_data()
+ data = {}
+ if form:
+ data = dict(form.items())
+ if files:
+ for key in files.keys():
+ data[key] = AnnotatedValue.removed_because_raw_data()
return data
@@ -147,7 +155,7 @@ def json(self):
if raw_data is None:
return None
- if isinstance(raw_data, text_type):
+ if isinstance(raw_data, str):
return json.loads(raw_data)
else:
return json.loads(raw_data.decode("utf-8"))
@@ -180,8 +188,8 @@ def _is_json_content_type(ct):
def _filter_headers(headers):
- # type: (Dict[str, str]) -> Dict[str, str]
- if _should_send_default_pii():
+ # type: (Mapping[str, str]) -> Mapping[str, Union[AnnotatedValue, str]]
+ if should_send_default_pii():
return headers
return {
@@ -190,5 +198,5 @@ def _filter_headers(headers):
if k.upper().replace("-", "_") not in SENSITIVE_HEADERS
else AnnotatedValue.removed_because_over_size_limit()
)
- for k, v in iteritems(headers)
+ for k, v in headers.items()
}
diff --git a/sentry_sdk/integrations/aiohttp.py b/sentry_sdk/integrations/aiohttp.py
index 19974030ed..9edaaf5cc9 100644
--- a/sentry_sdk/integrations/aiohttp.py
+++ b/sentry_sdk/integrations/aiohttp.py
@@ -1,13 +1,13 @@
import sys
import weakref
+import sentry_sdk
from sentry_sdk.api import continue_trace
-from sentry_sdk._compat import reraise
from sentry_sdk.consts import OP, SPANDATA
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.scope import Scope
+from sentry_sdk.sessions import auto_session_tracking_scope
from sentry_sdk.integrations._wsgi_common import (
_filter_headers,
request_body_within_bounds,
@@ -20,10 +20,12 @@
from sentry_sdk.tracing_utils import should_propagate_trace
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
logger,
parse_url,
parse_version,
+ reraise,
transaction_from_function,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
@@ -97,19 +99,18 @@ def setup_once():
async def sentry_app_handle(self, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(AioHttpIntegration) is None:
+ if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None:
return await old_handle(self, request, *args, **kwargs)
weak_request = weakref.ref(request)
- with Hub(hub) as hub:
- with auto_session_tracking(hub, session_mode="request"):
+ with sentry_sdk.isolation_scope() as scope:
+ with auto_session_tracking_scope(scope, session_mode="request"):
# Scope data will not leak between requests because aiohttp
# create a task to wrap each request.
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
- scope.add_event_processor(_make_request_processor(weak_request))
+ scope.generate_propagation_context()
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
headers = dict(request.headers)
transaction = continue_trace(
@@ -120,7 +121,7 @@ async def sentry_app_handle(self, request, *args, **kwargs):
name="generic AIOHTTP request",
source=TRANSACTION_SOURCE_ROUTE,
)
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction,
custom_sampling_context={"aiohttp_request": request},
):
@@ -135,7 +136,7 @@ async def sentry_app_handle(self, request, *args, **kwargs):
except Exception:
# This will probably map to a 500 but seems like we
# have no way to tell. Do not set span status.
- reraise(*_capture_exception(hub))
+ reraise(*_capture_exception())
transaction.set_http_status(response.status)
return response
@@ -148,8 +149,7 @@ async def sentry_urldispatcher_resolve(self, request):
# type: (UrlDispatcher, Request) -> UrlMappingMatchInfo
rv = await old_urldispatcher_resolve(self, request)
- hub = Hub.current
- integration = hub.get_integration(AioHttpIntegration)
+ integration = sentry_sdk.get_client().get_integration(AioHttpIntegration)
name = None
@@ -164,11 +164,10 @@ async def sentry_urldispatcher_resolve(self, request):
pass
if name is not None:
- with Hub.current.configure_scope() as scope:
- scope.set_transaction_name(
- name,
- source=SOURCE_FOR_STYLE[integration.transaction_style],
- )
+ Scope.get_current_scope().set_transaction_name(
+ name,
+ source=SOURCE_FOR_STYLE[integration.transaction_style],
+ )
return rv
@@ -176,12 +175,9 @@ async def sentry_urldispatcher_resolve(self, request):
old_client_session_init = ClientSession.__init__
+ @ensure_integration_enabled(AioHttpIntegration, old_client_session_init)
def init(*args, **kwargs):
# type: (Any, Any) -> None
- hub = Hub.current
- if hub.get_integration(AioHttpIntegration) is None:
- return old_client_session_init(*args, **kwargs)
-
client_trace_configs = list(kwargs.get("trace_configs") or ())
trace_config = create_trace_config()
client_trace_configs.append(trace_config)
@@ -194,10 +190,10 @@ def init(*args, **kwargs):
def create_trace_config():
# type: () -> TraceConfig
+
async def on_request_start(session, trace_config_ctx, params):
# type: (ClientSession, SimpleNamespace, TraceRequestStartParams) -> None
- hub = Hub.current
- if hub.get_integration(AioHttpIntegration) is None:
+ if sentry_sdk.get_client().get_integration(AioHttpIntegration) is None:
return
method = params.method.upper()
@@ -206,7 +202,7 @@ async def on_request_start(session, trace_config_ctx, params):
with capture_internal_exceptions():
parsed_url = parse_url(str(params.url), sanitize=False)
- span = hub.start_span(
+ span = sentry_sdk.start_span(
op=OP.HTTP_CLIENT,
description="%s %s"
% (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
@@ -217,8 +213,12 @@ async def on_request_start(session, trace_config_ctx, params):
span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
- if should_propagate_trace(hub, str(params.url)):
- for key, value in hub.iter_trace_propagation_headers(span):
+ client = sentry_sdk.get_client()
+
+ if should_propagate_trace(client, str(params.url)):
+ for key, value in Scope.get_current_scope().iter_trace_propagation_headers(
+ span=span
+ ):
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
key=key, value=value, url=params.url
@@ -275,42 +275,40 @@ def aiohttp_processor(
request_info["query_string"] = request.query_string
request_info["method"] = request.method
request_info["env"] = {"REMOTE_ADDR": request.remote}
-
- hub = Hub.current
request_info["headers"] = _filter_headers(dict(request.headers))
# Just attach raw data here if it is within bounds, if available.
# Unfortunately there's no way to get structured data from aiohttp
# without awaiting on some coroutine.
- request_info["data"] = get_aiohttp_request_data(hub, request)
+ request_info["data"] = get_aiohttp_request_data(request)
return event
return aiohttp_processor
-def _capture_exception(hub):
- # type: (Hub) -> ExcInfo
+def _capture_exception():
+ # type: () -> ExcInfo
exc_info = sys.exc_info()
event, hint = event_from_exception(
exc_info,
- client_options=hub.client.options, # type: ignore
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "aiohttp", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return exc_info
BODY_NOT_READ_MESSAGE = "[Can't show request body due to implementation details.]"
-def get_aiohttp_request_data(hub, request):
- # type: (Hub, Request) -> Union[Optional[str], AnnotatedValue]
+def get_aiohttp_request_data(request):
+ # type: (Request) -> Union[Optional[str], AnnotatedValue]
bytes_body = request._read_bytes
if bytes_body is not None:
# we have body to show
- if not request_body_within_bounds(hub.client, len(bytes_body)):
+ if not request_body_within_bounds(sentry_sdk.get_client(), len(bytes_body)):
return AnnotatedValue.removed_because_over_size_limit()
encoding = request.charset or "utf-8"
diff --git a/sentry_sdk/integrations/argv.py b/sentry_sdk/integrations/argv.py
index fea08619d5..3154f0c431 100644
--- a/sentry_sdk/integrations/argv.py
+++ b/sentry_sdk/integrations/argv.py
@@ -1,8 +1,6 @@
-from __future__ import absolute_import
-
import sys
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
@@ -23,7 +21,7 @@ def setup_once():
@add_global_event_processor
def processor(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
- if Hub.current.get_integration(ArgvIntegration) is not None:
+ if sentry_sdk.get_client().get_integration(ArgvIntegration) is not None:
extra = event.setdefault("extra", {})
# If some event processor decided to set extra to e.g. an
# `int`, don't crash. Not here.
diff --git a/sentry_sdk/integrations/ariadne.py b/sentry_sdk/integrations/ariadne.py
index 5b98a88443..86407408a6 100644
--- a/sentry_sdk/integrations/ariadne.py
+++ b/sentry_sdk/integrations/ariadne.py
@@ -1,11 +1,13 @@
from importlib import import_module
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk import get_client, capture_event
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.integrations._wsgi_common import request_body_within_bounds
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
package_version,
)
@@ -51,73 +53,60 @@ def _patch_graphql():
old_handle_errors = ariadne_graphql.handle_graphql_errors
old_handle_query_result = ariadne_graphql.handle_query_result
+ @ensure_integration_enabled(AriadneIntegration, old_parse_query)
def _sentry_patched_parse_query(context_value, query_parser, data):
# type: (Optional[Any], Optional[QueryParser], Any) -> DocumentNode
- hub = Hub.current
- integration = hub.get_integration(AriadneIntegration)
- if integration is None:
- return old_parse_query(context_value, query_parser, data)
-
- with hub.configure_scope() as scope:
- event_processor = _make_request_event_processor(data)
- scope.add_event_processor(event_processor)
+ event_processor = _make_request_event_processor(data)
+ Scope.get_isolation_scope().add_event_processor(event_processor)
result = old_parse_query(context_value, query_parser, data)
return result
+ @ensure_integration_enabled(AriadneIntegration, old_handle_errors)
def _sentry_patched_handle_graphql_errors(errors, *args, **kwargs):
# type: (List[GraphQLError], Any, Any) -> GraphQLResult
- hub = Hub.current
- integration = hub.get_integration(AriadneIntegration)
- if integration is None:
- return old_handle_errors(errors, *args, **kwargs)
-
result = old_handle_errors(errors, *args, **kwargs)
- with hub.configure_scope() as scope:
- event_processor = _make_response_event_processor(result[1])
- scope.add_event_processor(event_processor)
+ event_processor = _make_response_event_processor(result[1])
+ Scope.get_isolation_scope().add_event_processor(event_processor)
- if hub.client:
+ client = get_client()
+ if client.is_active():
with capture_internal_exceptions():
for error in errors:
event, hint = event_from_exception(
error,
- client_options=hub.client.options,
+ client_options=client.options,
mechanism={
- "type": integration.identifier,
+ "type": AriadneIntegration.identifier,
"handled": False,
},
)
- hub.capture_event(event, hint=hint)
+ capture_event(event, hint=hint)
return result
+ @ensure_integration_enabled(AriadneIntegration, old_handle_query_result)
def _sentry_patched_handle_query_result(result, *args, **kwargs):
# type: (Any, Any, Any) -> GraphQLResult
- hub = Hub.current
- integration = hub.get_integration(AriadneIntegration)
- if integration is None:
- return old_handle_query_result(result, *args, **kwargs)
-
query_result = old_handle_query_result(result, *args, **kwargs)
- with hub.configure_scope() as scope:
- event_processor = _make_response_event_processor(query_result[1])
- scope.add_event_processor(event_processor)
+ event_processor = _make_response_event_processor(query_result[1])
+ Scope.get_isolation_scope().add_event_processor(event_processor)
- if hub.client:
+ client = get_client()
+ if client.is_active():
with capture_internal_exceptions():
for error in result.errors or []:
event, hint = event_from_exception(
error,
- client_options=hub.client.options,
+ client_options=client.options,
mechanism={
- "type": integration.identifier,
+ "type": AriadneIntegration.identifier,
"handled": False,
},
)
- hub.capture_event(event, hint=hint)
+ capture_event(event, hint=hint)
return query_result
@@ -143,8 +132,8 @@ def inner(event, hint):
except (TypeError, ValueError):
return event
- if _should_send_default_pii() and request_body_within_bounds(
- Hub.current.client, content_length
+ if should_send_default_pii() and request_body_within_bounds(
+ get_client(), content_length
):
request_info = event.setdefault("request", {})
request_info["api_target"] = "graphql"
@@ -165,7 +154,7 @@ def _make_response_event_processor(response):
def inner(event, hint):
# type: (Event, dict[str, Any]) -> Event
with capture_internal_exceptions():
- if _should_send_default_pii() and response.get("errors"):
+ if should_send_default_pii() and response.get("errors"):
contexts = event.setdefault("contexts", {})
contexts["response"] = {
"data": response,
diff --git a/sentry_sdk/integrations/arq.py b/sentry_sdk/integrations/arq.py
index ed045b854a..12f73aa95f 100644
--- a/sentry_sdk/integrations/arq.py
+++ b/sentry_sdk/integrations/arq.py
@@ -1,20 +1,19 @@
-from __future__ import absolute_import
-
import sys
-from sentry_sdk._compat import reraise
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk import Hub
from sentry_sdk.consts import OP
-from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_TASK
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
SENSITIVE_DATA_SUBSTITUTE,
parse_version,
+ reraise,
)
try:
@@ -73,12 +72,11 @@ def patch_enqueue_job():
async def _sentry_enqueue_job(self, function, *args, **kwargs):
# type: (ArqRedis, str, *Any, **Any) -> Optional[Job]
- hub = Hub.current
-
- if hub.get_integration(ArqIntegration) is None:
+ integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+ if integration is None:
return await old_enqueue_job(self, function, *args, **kwargs)
- with hub.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function):
+ with sentry_sdk.start_span(op=OP.QUEUE_SUBMIT_ARQ, description=function):
return await old_enqueue_job(self, function, *args, **kwargs)
ArqRedis.enqueue_job = _sentry_enqueue_job
@@ -90,12 +88,11 @@ def patch_run_job():
async def _sentry_run_job(self, job_id, score):
# type: (Worker, str, int) -> None
- hub = Hub(Hub.current)
-
- if hub.get_integration(ArqIntegration) is None:
+ integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+ if integration is None:
return await old_run_job(self, job_id, score)
- with hub.push_scope() as scope:
+ with sentry_sdk.isolation_scope() as scope:
scope._name = "arq"
scope.clear_breadcrumbs()
@@ -106,7 +103,7 @@ async def _sentry_run_job(self, job_id, score):
source=TRANSACTION_SOURCE_TASK,
)
- with hub.start_transaction(transaction):
+ with sentry_sdk.start_transaction(transaction):
return await old_run_job(self, job_id, score)
Worker.run_job = _sentry_run_job
@@ -114,21 +111,21 @@ async def _sentry_run_job(self, job_id, score):
def _capture_exception(exc_info):
# type: (ExcInfo) -> None
- hub = Hub.current
+ scope = Scope.get_current_scope()
- if hub.scope.transaction is not None:
+ if scope.transaction is not None:
if exc_info[0] in ARQ_CONTROL_FLOW_EXCEPTIONS:
- hub.scope.transaction.set_status("aborted")
+ scope.transaction.set_status("aborted")
return
- hub.scope.transaction.set_status("internal_error")
+ scope.transaction.set_status("internal_error")
event, hint = event_from_exception(
exc_info,
- client_options=hub.client.options if hub.client else None,
+ client_options=Scope.get_client().options,
mechanism={"type": ArqIntegration.identifier, "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _make_event_processor(ctx, *args, **kwargs):
@@ -136,11 +133,10 @@ def _make_event_processor(ctx, *args, **kwargs):
def event_processor(event, hint):
# type: (Event, Hint) -> Optional[Event]
- hub = Hub.current
-
with capture_internal_exceptions():
- if hub.scope.transaction is not None:
- hub.scope.transaction.name = ctx["job_name"]
+ scope = Scope.get_current_scope()
+ if scope.transaction is not None:
+ scope.transaction.name = ctx["job_name"]
event["transaction"] = ctx["job_name"]
tags = event.setdefault("tags", {})
@@ -150,10 +146,10 @@ def event_processor(event, hint):
extra["arq-job"] = {
"task": ctx["job_name"],
"args": (
- args if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+ args if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
),
"kwargs": (
- kwargs if _should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
+ kwargs if should_send_default_pii() else SENSITIVE_DATA_SUBSTITUTE
),
"retry": ctx["job_try"],
}
@@ -165,13 +161,14 @@ def event_processor(event, hint):
def _wrap_coroutine(name, coroutine):
# type: (str, WorkerCoroutine) -> WorkerCoroutine
+
async def _sentry_coroutine(ctx, *args, **kwargs):
# type: (Dict[Any, Any], *Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(ArqIntegration) is None:
+ integration = sentry_sdk.get_client().get_integration(ArqIntegration)
+ if integration is None:
return await coroutine(ctx, *args, **kwargs)
- hub.scope.add_event_processor(
+ Scope.get_isolation_scope().add_event_processor(
_make_event_processor({**ctx, "job_name": name}, *args, **kwargs)
)
@@ -191,13 +188,9 @@ def patch_create_worker():
# type: () -> None
old_create_worker = arq.worker.create_worker
+ @ensure_integration_enabled(ArqIntegration, old_create_worker)
def _sentry_create_worker(*args, **kwargs):
# type: (*Any, **Any) -> Worker
- hub = Hub.current
-
- if hub.get_integration(ArqIntegration) is None:
- return old_create_worker(*args, **kwargs)
-
settings_cls = args[0]
if hasattr(settings_cls, "functions"):
diff --git a/sentry_sdk/integrations/asgi.py b/sentry_sdk/integrations/asgi.py
index 901c6f5d23..8aca37ea40 100644
--- a/sentry_sdk/integrations/asgi.py
+++ b/sentry_sdk/integrations/asgi.py
@@ -7,19 +7,19 @@
import asyncio
import inspect
from copy import deepcopy
+from functools import partial
-from sentry_sdk._functools import partial
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.api import continue_trace
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations._asgi_common import (
_get_headers,
_get_request_data,
_get_url,
)
-from sentry_sdk.sessions import auto_session_tracking
+from sentry_sdk.sessions import auto_session_tracking_scope
from sentry_sdk.tracing import (
SOURCE_FOR_STYLE,
TRANSACTION_SOURCE_ROUTE,
@@ -54,17 +54,15 @@
TRANSACTION_STYLE_VALUES = ("endpoint", "url")
-def _capture_exception(hub, exc, mechanism_type="asgi"):
- # type: (Hub, Any, str) -> None
+def _capture_exception(exc, mechanism_type="asgi"):
+ # type: (Any, str) -> None
- # Check client here as it might have been unset while streaming response
- if hub.client is not None:
- event, hint = event_from_exception(
- exc,
- client_options=hub.client.options,
- mechanism={"type": mechanism_type, "handled": False},
- )
- hub.capture_event(event, hint=hint)
+ event, hint = event_from_exception(
+ exc,
+ client_options=sentry_sdk.get_client().options,
+ mechanism={"type": mechanism_type, "handled": False},
+ )
+ sentry_sdk.capture_event(event, hint=hint)
def _looks_like_asgi3(app):
@@ -157,19 +155,17 @@ async def _run_app(self, scope, receive, send, asgi_version):
return await self.app(scope, receive, send)
except Exception as exc:
- _capture_exception(Hub.current, exc, mechanism_type=self.mechanism_type)
+ _capture_exception(exc, mechanism_type=self.mechanism_type)
raise exc from None
_asgi_middleware_applied.set(True)
try:
- hub = Hub(Hub.current)
- with auto_session_tracking(hub, session_mode="request"):
- with hub:
- with hub.configure_scope() as sentry_scope:
- sentry_scope.clear_breadcrumbs()
- sentry_scope._name = "asgi"
- processor = partial(self.event_processor, asgi_scope=scope)
- sentry_scope.add_event_processor(processor)
+ with sentry_sdk.isolation_scope() as sentry_scope:
+ with auto_session_tracking_scope(sentry_scope, session_mode="request"):
+ sentry_scope.clear_breadcrumbs()
+ sentry_scope._name = "asgi"
+ processor = partial(self.event_processor, asgi_scope=scope)
+ sentry_scope.add_event_processor(processor)
ty = scope["type"]
(
@@ -208,7 +204,7 @@ async def _run_app(self, scope, receive, send, asgi_version):
transaction.source,
)
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction, custom_sampling_context={"asgi_scope": scope}
):
logger.debug("[ASGI] Started transaction: %s", transaction)
@@ -235,9 +231,7 @@ async def _sentry_wrapped_send(event):
scope, receive, _sentry_wrapped_send
)
except Exception as exc:
- _capture_exception(
- hub, exc, mechanism_type=self.mechanism_type
- )
+ _capture_exception(exc, mechanism_type=self.mechanism_type)
raise exc from None
finally:
_asgi_middleware_applied.set(False)
diff --git a/sentry_sdk/integrations/asyncio.py b/sentry_sdk/integrations/asyncio.py
index 7f9b5b0c6d..18c092e0c0 100644
--- a/sentry_sdk/integrations/asyncio.py
+++ b/sentry_sdk/integrations/asyncio.py
@@ -1,12 +1,10 @@
-from __future__ import absolute_import
import sys
-from sentry_sdk._compat import reraise
+import sentry_sdk
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.utils import event_from_exception
+from sentry_sdk.utils import event_from_exception, reraise
try:
import asyncio
@@ -43,15 +41,16 @@ def _sentry_task_factory(loop, coro, **kwargs):
async def _coro_creating_hub_and_span():
# type: () -> Any
- hub = Hub(Hub.current)
result = None
- with hub:
- with hub.start_span(op=OP.FUNCTION, description=get_name(coro)):
+ with sentry_sdk.isolation_scope():
+ with sentry_sdk.start_span(
+ op=OP.FUNCTION, description=get_name(coro)
+ ):
try:
result = await coro
except Exception:
- reraise(*_capture_exception(hub))
+ reraise(*_capture_exception())
return result
@@ -78,21 +77,20 @@ async def _coro_creating_hub_and_span():
pass
-def _capture_exception(hub):
- # type: (Hub) -> ExcInfo
+def _capture_exception():
+ # type: () -> ExcInfo
exc_info = sys.exc_info()
- integration = hub.get_integration(AsyncioIntegration)
- if integration is not None:
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
+ client = sentry_sdk.get_client()
+ integration = client.get_integration(AsyncioIntegration)
+ if integration is not None:
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "asyncio", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return exc_info
diff --git a/sentry_sdk/integrations/asyncpg.py b/sentry_sdk/integrations/asyncpg.py
index 19aa9c3a69..cfcb8a0528 100644
--- a/sentry_sdk/integrations/asyncpg.py
+++ b/sentry_sdk/integrations/asyncpg.py
@@ -2,17 +2,20 @@
import contextlib
from typing import Any, TypeVar, Callable, Awaitable, Iterator
-from asyncpg.cursor import BaseCursor # type: ignore
-
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing import Span
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
-from sentry_sdk.utils import parse_version, capture_internal_exceptions
+from sentry_sdk.utils import (
+ ensure_integration_enabled,
+ parse_version,
+ capture_internal_exceptions,
+)
try:
import asyncpg # type: ignore[import-not-found]
+ from asyncpg.cursor import BaseCursor # type: ignore
except ImportError:
raise DidNotEnable("asyncpg not installed.")
@@ -55,24 +58,22 @@ def setup_once() -> None:
def _wrap_execute(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
async def _inner(*args: Any, **kwargs: Any) -> T:
- hub = Hub.current
- integration = hub.get_integration(AsyncPGIntegration)
+ if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
+ return await f(*args, **kwargs)
# Avoid recording calls to _execute twice.
# Calls to Connection.execute with args also call
# Connection._execute, which is recorded separately
# args[0] = the connection object, args[1] is the query
- if integration is None or len(args) > 2:
+ if len(args) > 2:
return await f(*args, **kwargs)
query = args[1]
- with record_sql_queries(
- hub, None, query, None, None, executemany=False
- ) as span:
+ with record_sql_queries(None, query, None, None, executemany=False) as span:
res = await f(*args, **kwargs)
with capture_internal_exceptions():
- add_query_source(hub, span)
+ add_query_source(span)
return res
@@ -84,21 +85,19 @@ async def _inner(*args: Any, **kwargs: Any) -> T:
@contextlib.contextmanager
def _record(
- hub: Hub,
cursor: SubCursor | None,
query: str,
params_list: tuple[Any, ...] | None,
*,
executemany: bool = False,
) -> Iterator[Span]:
- integration = hub.get_integration(AsyncPGIntegration)
- if not integration._record_params:
+ integration = sentry_sdk.get_client().get_integration(AsyncPGIntegration)
+ if integration is not None and not integration._record_params:
params_list = None
param_style = "pyformat" if params_list else None
with record_sql_queries(
- hub,
cursor,
query,
params_list,
@@ -113,15 +112,11 @@ def _wrap_connection_method(
f: Callable[..., Awaitable[T]], *, executemany: bool = False
) -> Callable[..., Awaitable[T]]:
async def _inner(*args: Any, **kwargs: Any) -> T:
- hub = Hub.current
- integration = hub.get_integration(AsyncPGIntegration)
-
- if integration is None:
+ if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
return await f(*args, **kwargs)
-
query = args[1]
params_list = args[2] if len(args) > 2 else None
- with _record(hub, None, query, params_list, executemany=executemany) as span:
+ with _record(None, query, params_list, executemany=executemany) as span:
_set_db_data(span, args[0])
res = await f(*args, **kwargs)
@@ -131,18 +126,12 @@ async def _inner(*args: Any, **kwargs: Any) -> T:
def _wrap_cursor_creation(f: Callable[..., T]) -> Callable[..., T]:
+ @ensure_integration_enabled(AsyncPGIntegration, f)
def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807
- hub = Hub.current
- integration = hub.get_integration(AsyncPGIntegration)
-
- if integration is None:
- return f(*args, **kwargs)
-
query = args[1]
params_list = args[2] if len(args) > 2 else None
with _record(
- hub,
None,
query,
params_list,
@@ -159,16 +148,13 @@ def _inner(*args: Any, **kwargs: Any) -> T: # noqa: N807
def _wrap_connect_addr(f: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
async def _inner(*args: Any, **kwargs: Any) -> T:
- hub = Hub.current
- integration = hub.get_integration(AsyncPGIntegration)
-
- if integration is None:
+ if sentry_sdk.get_client().get_integration(AsyncPGIntegration) is None:
return await f(*args, **kwargs)
user = kwargs["params"].user
database = kwargs["params"].database
- with hub.start_span(op=OP.DB, description="connect") as span:
+ with sentry_sdk.start_span(op=OP.DB, description="connect") as span:
span.set_data(SPANDATA.DB_SYSTEM, "postgresql")
addr = kwargs.get("addr")
if addr:
@@ -181,7 +167,9 @@ async def _inner(*args: Any, **kwargs: Any) -> T:
span.set_data(SPANDATA.DB_USER, user)
with capture_internal_exceptions():
- hub.add_breadcrumb(message="connect", category="query", data=span._data)
+ sentry_sdk.add_breadcrumb(
+ message="connect", category="query", data=span._data
+ )
res = await f(*args, **kwargs)
return res
diff --git a/sentry_sdk/integrations/atexit.py b/sentry_sdk/integrations/atexit.py
index af70dd9fc9..d11e35fafa 100644
--- a/sentry_sdk/integrations/atexit.py
+++ b/sentry_sdk/integrations/atexit.py
@@ -1,13 +1,12 @@
-from __future__ import absolute_import
-
import os
import sys
import atexit
-from sentry_sdk.hub import Hub
+import sentry_sdk
+from sentry_sdk import Scope
from sentry_sdk.utils import logger
from sentry_sdk.integrations import Integration
-
+from sentry_sdk.utils import ensure_integration_enabled
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -45,17 +44,13 @@ def __init__(self, callback=None):
def setup_once():
# type: () -> None
@atexit.register
+ @ensure_integration_enabled(AtexitIntegration)
def _shutdown():
# type: () -> None
logger.debug("atexit: got shutdown signal")
- hub = Hub.main
- integration = hub.get_integration(AtexitIntegration)
- if integration is not None:
- logger.debug("atexit: shutting down client")
-
- # If there is a session on the hub, close it now.
- hub.end_session()
+ client = sentry_sdk.get_client()
+ integration = client.get_integration(AtexitIntegration)
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
- client.close(callback=integration.callback)
+ logger.debug("atexit: shutting down client")
+ Scope.get_isolation_scope().end_session()
+ client.close(callback=integration.callback)
diff --git a/sentry_sdk/integrations/aws_lambda.py b/sentry_sdk/integrations/aws_lambda.py
index 3cefc90cfb..bd1e3619de 100644
--- a/sentry_sdk/integrations/aws_lambda.py
+++ b/sentry_sdk/integrations/aws_lambda.py
@@ -1,26 +1,27 @@
import sys
from copy import deepcopy
-from datetime import timedelta
+from datetime import datetime, timedelta, timezone
from os import environ
+import sentry_sdk
from sentry_sdk.api import continue_trace
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
logger,
TimeoutThread,
+ reraise,
)
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations._wsgi_common import _filter_headers
-from sentry_sdk._compat import datetime_utcnow, reraise
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
- from datetime import datetime
from typing import Any
from typing import TypeVar
from typing import Callable
@@ -37,20 +38,13 @@
def _wrap_init_error(init_error):
# type: (F) -> F
+ @ensure_integration_enabled(AwsLambdaIntegration, init_error)
def sentry_init_error(*args, **kwargs):
# type: (*Any, **Any) -> Any
-
- hub = Hub.current
- integration = hub.get_integration(AwsLambdaIntegration)
- if integration is None:
- return init_error(*args, **kwargs)
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
+ client = sentry_sdk.get_client()
with capture_internal_exceptions():
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
+ Scope.get_isolation_scope().clear_breadcrumbs()
exc_info = sys.exc_info()
if exc_info and all(exc_info):
@@ -59,7 +53,7 @@ def sentry_init_error(*args, **kwargs):
client_options=client.options,
mechanism={"type": "aws_lambda", "handled": False},
)
- hub.capture_event(sentry_event, hint=hint)
+ sentry_sdk.capture_event(sentry_event, hint=hint)
return init_error(*args, **kwargs)
@@ -68,6 +62,7 @@ def sentry_init_error(*args, **kwargs):
def _wrap_handler(handler):
# type: (F) -> F
+ @ensure_integration_enabled(AwsLambdaIntegration, handler)
def sentry_handler(aws_event, aws_context, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
@@ -94,16 +89,12 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
# this is empty
request_data = {}
- hub = Hub.current
- integration = hub.get_integration(AwsLambdaIntegration)
- if integration is None:
- return handler(aws_event, aws_context, *args, **kwargs)
+ client = sentry_sdk.get_client()
+ integration = client.get_integration(AwsLambdaIntegration)
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
configured_time = aws_context.get_remaining_time_in_millis()
- with hub.push_scope() as scope:
+ with sentry_sdk.isolation_scope() as scope:
timeout_thread = None
with capture_internal_exceptions():
scope.clear_breadcrumbs()
@@ -149,7 +140,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
name=aws_context.function_name,
source=TRANSACTION_SOURCE_COMPONENT,
)
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction,
custom_sampling_context={
"aws_event": aws_event,
@@ -165,7 +156,7 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
client_options=client.options,
mechanism={"type": "aws_lambda", "handled": False},
)
- hub.capture_event(sentry_event, hint=hint)
+ sentry_sdk.capture_event(sentry_event, hint=hint)
reraise(*exc_info)
finally:
if timeout_thread:
@@ -177,12 +168,12 @@ def sentry_handler(aws_event, aws_context, *args, **kwargs):
def _drain_queue():
# type: () -> None
with capture_internal_exceptions():
- hub = Hub.current
- integration = hub.get_integration(AwsLambdaIntegration)
+ client = sentry_sdk.get_client()
+ integration = client.get_integration(AwsLambdaIntegration)
if integration is not None:
# Flush out the event queue before AWS kills the
# process.
- hub.flush()
+ client.flush()
class AwsLambdaIntegration(Integration):
@@ -211,7 +202,7 @@ def setup_once():
)
return
- pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6 or 2.7
+ pre_37 = hasattr(lambda_bootstrap, "handle_http_request") # Python 3.6
if pre_37:
old_handle_event_request = lambda_bootstrap.handle_event_request
@@ -287,8 +278,6 @@ def inner(*args, **kwargs):
def get_lambda_bootstrap():
# type: () -> Optional[Any]
- # Python 2.7: Everything is in `__main__`.
- #
# Python 3.7: If the bootstrap module is *already imported*, it is the
# one we actually want to use (no idea what's in __main__)
#
@@ -325,7 +314,7 @@ def get_lambda_bootstrap():
def _make_request_event_processor(aws_event, aws_context, configured_timeout):
# type: (Any, Any, Any) -> EventProcessor
- start_time = datetime_utcnow()
+ start_time = datetime.now(timezone.utc)
def event_processor(sentry_event, hint, start_time=start_time):
# type: (Event, Hint, datetime) -> Optional[Event]
@@ -361,7 +350,7 @@ def event_processor(sentry_event, hint, start_time=start_time):
if "headers" in aws_event:
request["headers"] = _filter_headers(aws_event["headers"])
- if _should_send_default_pii():
+ if should_send_default_pii():
user_info = sentry_event.setdefault("user", {})
identity = aws_event.get("identity")
@@ -430,7 +419,9 @@ def _get_cloudwatch_logs_url(aws_context, start_time):
log_group=aws_context.log_group_name,
log_stream=aws_context.log_stream_name,
start_time=(start_time - timedelta(seconds=1)).strftime(formatstring),
- end_time=(datetime_utcnow() + timedelta(seconds=2)).strftime(formatstring),
+ end_time=(datetime.now(timezone.utc) + timedelta(seconds=2)).strftime(
+ formatstring
+ ),
)
return url
diff --git a/sentry_sdk/integrations/beam.py b/sentry_sdk/integrations/beam.py
index ea45087d05..a2323cb406 100644
--- a/sentry_sdk/integrations/beam.py
+++ b/sentry_sdk/integrations/beam.py
@@ -1,24 +1,24 @@
-from __future__ import absolute_import
-
import sys
import types
-from sentry_sdk._functools import wraps
+from functools import wraps
-from sentry_sdk.hub import Hub
-from sentry_sdk._compat import reraise
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+import sentry_sdk
from sentry_sdk.integrations import Integration
from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+ event_from_exception,
+ reraise,
+)
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any
from typing import Iterator
from typing import TypeVar
- from typing import Optional
from typing import Callable
- from sentry_sdk.client import Client
from sentry_sdk._types import ExcInfo
T = TypeVar("T")
@@ -116,9 +116,7 @@ def _wrap_task_call(func):
# type: (F) -> F
"""
Wrap task call with a try catch to get exceptions.
- Pass the client on to raise_exception so it can get rebinded.
"""
- client = Hub.current.client
@wraps(func)
def _inner(*args, **kwargs):
@@ -126,53 +124,45 @@ def _inner(*args, **kwargs):
try:
gen = func(*args, **kwargs)
except Exception:
- raise_exception(client)
+ raise_exception()
if not isinstance(gen, types.GeneratorType):
return gen
- return _wrap_generator_call(gen, client)
+ return _wrap_generator_call(gen)
setattr(_inner, USED_FUNC, True)
return _inner # type: ignore
-def _capture_exception(exc_info, hub):
- # type: (ExcInfo, Hub) -> None
+@ensure_integration_enabled(BeamIntegration)
+def _capture_exception(exc_info):
+ # type: (ExcInfo) -> None
"""
Send Beam exception to Sentry.
"""
- integration = hub.get_integration(BeamIntegration)
- if integration is None:
- return
-
- client = hub.client
- if client is None:
- return
+ client = sentry_sdk.get_client()
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "beam", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
-def raise_exception(client):
- # type: (Optional[Client]) -> None
+def raise_exception():
+ # type: () -> None
"""
- Raise an exception. If the client is not in the hub, rebind it.
+ Raise an exception.
"""
- hub = Hub.current
- if hub.client is None:
- hub.bind_client(client)
exc_info = sys.exc_info()
with capture_internal_exceptions():
- _capture_exception(exc_info, hub)
+ _capture_exception(exc_info)
reraise(*exc_info)
-def _wrap_generator_call(gen, client):
- # type: (Iterator[T], Optional[Client]) -> Iterator[T]
+def _wrap_generator_call(gen):
+ # type: (Iterator[T]) -> Iterator[T]
"""
Wrap the generator to handle any failures.
"""
@@ -182,4 +172,4 @@ def _wrap_generator_call(gen, client):
except StopIteration:
break
except Exception:
- raise_exception(client)
+ raise_exception()
diff --git a/sentry_sdk/integrations/boto3.py b/sentry_sdk/integrations/boto3.py
index a21772fc1a..e1c9ae698f 100644
--- a/sentry_sdk/integrations/boto3.py
+++ b/sentry_sdk/integrations/boto3.py
@@ -1,13 +1,17 @@
-from __future__ import absolute_import
+from functools import partial
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing import Span
-from sentry_sdk._functools import partial
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.utils import capture_internal_exceptions, parse_url, parse_version
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+ parse_url,
+ parse_version,
+)
if TYPE_CHECKING:
from typing import Any
@@ -58,15 +62,11 @@ def sentry_patched_init(self, *args, **kwargs):
BaseClient.__init__ = sentry_patched_init
+@ensure_integration_enabled(Boto3Integration)
def _sentry_request_created(service_id, request, operation_name, **kwargs):
# type: (str, AWSRequest, str, **Any) -> None
- hub = Hub.current
- if hub.get_integration(Boto3Integration) is None:
- return
-
description = "aws.%s.%s" % (service_id, operation_name)
- span = hub.start_span(
- hub=hub,
+ span = sentry_sdk.start_span(
op=OP.HTTP_CLIENT,
description=description,
)
diff --git a/sentry_sdk/integrations/bottle.py b/sentry_sdk/integrations/bottle.py
index 6f3678466e..472f0a352b 100644
--- a/sentry_sdk/integrations/bottle.py
+++ b/sentry_sdk/integrations/bottle.py
@@ -1,9 +1,8 @@
-from __future__ import absolute_import
-
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.tracing import SOURCE_FOR_STYLE
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
parse_version,
transaction_from_function,
@@ -11,7 +10,7 @@
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
from sentry_sdk.integrations._wsgi_common import RequestExtractor
-
+from sentry_sdk.scope import Scope
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -57,7 +56,6 @@ def __init__(self, transaction_style="endpoint"):
@staticmethod
def setup_once():
# type: () -> None
-
version = parse_version(BOTTLE_VERSION)
if version is None:
@@ -66,63 +64,42 @@ def setup_once():
if version < (0, 12):
raise DidNotEnable("Bottle 0.12 or newer required.")
- # monkey patch method Bottle.__call__
old_app = Bottle.__call__
+ @ensure_integration_enabled(BottleIntegration, old_app)
def sentry_patched_wsgi_app(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
-
- hub = Hub.current
- integration = hub.get_integration(BottleIntegration)
- if integration is None:
- return old_app(self, environ, start_response)
-
return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
environ, start_response
)
Bottle.__call__ = sentry_patched_wsgi_app
- # monkey patch method Bottle._handle
old_handle = Bottle._handle
+ @ensure_integration_enabled(BottleIntegration, old_handle)
def _patched_handle(self, environ):
# type: (Bottle, Dict[str, Any]) -> Any
- hub = Hub.current
- integration = hub.get_integration(BottleIntegration)
- if integration is None:
- return old_handle(self, environ)
-
- # create new scope
- scope_manager = hub.push_scope()
-
- with scope_manager:
- app = self
- with hub.configure_scope() as scope:
- scope._name = "bottle"
- scope.add_event_processor(
- _make_request_event_processor(app, bottle_request, integration)
- )
- res = old_handle(self, environ)
+ integration = sentry_sdk.get_client().get_integration(BottleIntegration)
+
+ scope = Scope.get_isolation_scope()
+ scope._name = "bottle"
+ scope.add_event_processor(
+ _make_request_event_processor(self, bottle_request, integration)
+ )
+ res = old_handle(self, environ)
- # scope cleanup
return res
Bottle._handle = _patched_handle
- # monkey patch method Route._make_callback
old_make_callback = Route._make_callback
+ @ensure_integration_enabled(BottleIntegration, old_make_callback)
def patched_make_callback(self, *args, **kwargs):
# type: (Route, *object, **object) -> Any
- hub = Hub.current
- integration = hub.get_integration(BottleIntegration)
+ client = sentry_sdk.get_client()
prepared_callback = old_make_callback(self, *args, **kwargs)
- if integration is None:
- return prepared_callback
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
def wrapped_callback(*args, **kwargs):
# type: (*object, **object) -> Any
@@ -137,7 +114,7 @@ def wrapped_callback(*args, **kwargs):
client_options=client.options,
mechanism={"type": "bottle", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
raise exception
return res
diff --git a/sentry_sdk/integrations/celery.py b/sentry_sdk/integrations/celery.py
deleted file mode 100644
index 984197316f..0000000000
--- a/sentry_sdk/integrations/celery.py
+++ /dev/null
@@ -1,680 +0,0 @@
-from __future__ import absolute_import
-
-import sys
-import time
-
-try:
- from typing import cast
-except ImportError:
- cast = lambda _, o: o
-
-from sentry_sdk.api import continue_trace
-from sentry_sdk.consts import OP
-from sentry_sdk._compat import reraise
-from sentry_sdk._functools import wraps
-from sentry_sdk.crons import capture_checkin, MonitorStatus
-from sentry_sdk.hub import Hub
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK
-from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.utils import (
- capture_internal_exceptions,
- event_from_exception,
- logger,
- match_regex_list,
-)
-
-if TYPE_CHECKING:
- from typing import Any
- from typing import Callable
- from typing import Dict
- from typing import List
- from typing import Optional
- from typing import Tuple
- from typing import TypeVar
- from typing import Union
-
- from sentry_sdk.tracing import Span
- from sentry_sdk._types import (
- EventProcessor,
- Event,
- Hint,
- ExcInfo,
- MonitorConfig,
- MonitorConfigScheduleType,
- MonitorConfigScheduleUnit,
- )
-
- F = TypeVar("F", bound=Callable[..., Any])
-
-
-try:
- from celery import VERSION as CELERY_VERSION # type: ignore
- from celery import Task, Celery
- from celery.app.trace import task_has_custom
- from celery.beat import Scheduler # type: ignore
- from celery.exceptions import ( # type: ignore
- Ignore,
- Reject,
- Retry,
- SoftTimeLimitExceeded,
- )
- from celery.schedules import crontab, schedule # type: ignore
- from celery.signals import ( # type: ignore
- task_failure,
- task_success,
- task_retry,
- )
-except ImportError:
- raise DidNotEnable("Celery not installed")
-
-try:
- from redbeat.schedulers import RedBeatScheduler # type: ignore
-except ImportError:
- RedBeatScheduler = None
-
-
-CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
-
-
-class CeleryIntegration(Integration):
- identifier = "celery"
-
- def __init__(
- self,
- propagate_traces=True,
- monitor_beat_tasks=False,
- exclude_beat_tasks=None,
- ):
- # type: (bool, bool, Optional[List[str]]) -> None
- self.propagate_traces = propagate_traces
- self.monitor_beat_tasks = monitor_beat_tasks
- self.exclude_beat_tasks = exclude_beat_tasks
-
- if monitor_beat_tasks:
- _patch_beat_apply_entry()
- _patch_redbeat_maybe_due()
- _setup_celery_beat_signals()
-
- @staticmethod
- def setup_once():
- # type: () -> None
- if CELERY_VERSION < (3,):
- raise DidNotEnable("Celery 3 or newer required.")
-
- import celery.app.trace as trace # type: ignore
-
- old_build_tracer = trace.build_tracer
-
- def sentry_build_tracer(name, task, *args, **kwargs):
- # type: (Any, Any, *Any, **Any) -> Any
- if not getattr(task, "_sentry_is_patched", False):
- # determine whether Celery will use __call__ or run and patch
- # accordingly
- if task_has_custom(task, "__call__"):
- type(task).__call__ = _wrap_task_call(task, type(task).__call__)
- else:
- task.run = _wrap_task_call(task, task.run)
-
- # `build_tracer` is apparently called for every task
- # invocation. Can't wrap every celery task for every invocation
- # or we will get infinitely nested wrapper functions.
- task._sentry_is_patched = True
-
- return _wrap_tracer(task, old_build_tracer(name, task, *args, **kwargs))
-
- trace.build_tracer = sentry_build_tracer
-
- from celery.app.task import Task # type: ignore
-
- Task.apply_async = _wrap_apply_async(Task.apply_async)
-
- _patch_worker_exit()
-
- # This logger logs every status of every task that ran on the worker.
- # Meaning that every task's breadcrumbs are full of stuff like "Task
- # raised unexpected ".
- ignore_logger("celery.worker.job")
- ignore_logger("celery.app.trace")
-
- # This is stdout/err redirected to a logger, can't deal with this
- # (need event_level=logging.WARN to reproduce)
- ignore_logger("celery.redirected")
-
-
-def _now_seconds_since_epoch():
- # type: () -> float
- # We cannot use `time.perf_counter()` when dealing with the duration
- # of a Celery task, because the start of a Celery task and
- # the end are recorded in different processes.
- # Start happens in the Celery Beat process,
- # the end in a Celery Worker process.
- return time.time()
-
-
-class NoOpMgr:
- def __enter__(self):
- # type: () -> None
- return None
-
- def __exit__(self, exc_type, exc_value, traceback):
- # type: (Any, Any, Any) -> None
- return None
-
-
-def _wrap_apply_async(f):
- # type: (F) -> F
- @wraps(f)
- def apply_async(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(CeleryIntegration)
-
- if integration is None:
- return f(*args, **kwargs)
-
- # Note: kwargs can contain headers=None, so no setdefault!
- # Unsure which backend though.
- kwarg_headers = kwargs.get("headers") or {}
- propagate_traces = kwarg_headers.pop(
- "sentry-propagate-traces", integration.propagate_traces
- )
-
- if not propagate_traces:
- return f(*args, **kwargs)
-
- try:
- task_started_from_beat = args[1][0] == "BEAT"
- except (IndexError, TypeError):
- task_started_from_beat = False
-
- task = args[0]
-
- span_mgr = (
- hub.start_span(op=OP.QUEUE_SUBMIT_CELERY, description=task.name)
- if not task_started_from_beat
- else NoOpMgr()
- ) # type: Union[Span, NoOpMgr]
-
- with span_mgr as span:
- with capture_internal_exceptions():
- headers = (
- dict(hub.iter_trace_propagation_headers(span))
- if span is not None
- else {}
- )
- if integration.monitor_beat_tasks:
- headers.update(
- {
- "sentry-monitor-start-timestamp-s": "%.9f"
- % _now_seconds_since_epoch(),
- }
- )
-
- if headers:
- existing_baggage = kwarg_headers.get(BAGGAGE_HEADER_NAME)
- sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
-
- combined_baggage = sentry_baggage or existing_baggage
- if sentry_baggage and existing_baggage:
- combined_baggage = "{},{}".format(
- existing_baggage,
- sentry_baggage,
- )
-
- kwarg_headers.update(headers)
- if combined_baggage:
- kwarg_headers[BAGGAGE_HEADER_NAME] = combined_baggage
-
- # https://github.com/celery/celery/issues/4875
- #
- # Need to setdefault the inner headers too since other
- # tracing tools (dd-trace-py) also employ this exact
- # workaround and we don't want to break them.
- kwarg_headers.setdefault("headers", {}).update(headers)
- if combined_baggage:
- kwarg_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
-
- # Add the Sentry options potentially added in `sentry_apply_entry`
- # to the headers (done when auto-instrumenting Celery Beat tasks)
- for key, value in kwarg_headers.items():
- if key.startswith("sentry-"):
- kwarg_headers["headers"][key] = value
-
- kwargs["headers"] = kwarg_headers
-
- return f(*args, **kwargs)
-
- return apply_async # type: ignore
-
-
-def _wrap_tracer(task, f):
- # type: (Any, F) -> F
-
- # Need to wrap tracer for pushing the scope before prerun is sent, and
- # popping it after postrun is sent.
- #
- # This is the reason we don't use signals for hooking in the first place.
- # Also because in Celery 3, signal dispatch returns early if one handler
- # crashes.
- @wraps(f)
- def _inner(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(CeleryIntegration) is None:
- return f(*args, **kwargs)
-
- with hub.push_scope() as scope:
- scope._name = "celery"
- scope.clear_breadcrumbs()
- scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
-
- transaction = None
-
- # Celery task objects are not a thing to be trusted. Even
- # something such as attribute access can fail.
- with capture_internal_exceptions():
- transaction = continue_trace(
- args[3].get("headers") or {},
- op=OP.QUEUE_TASK_CELERY,
- name="unknown celery task",
- source=TRANSACTION_SOURCE_TASK,
- )
- transaction.name = task.name
- transaction.set_status("ok")
-
- if transaction is None:
- return f(*args, **kwargs)
-
- with hub.start_transaction(
- transaction,
- custom_sampling_context={
- "celery_job": {
- "task": task.name,
- # for some reason, args[1] is a list if non-empty but a
- # tuple if empty
- "args": list(args[1]),
- "kwargs": args[2],
- }
- },
- ):
- return f(*args, **kwargs)
-
- return _inner # type: ignore
-
-
-def _wrap_task_call(task, f):
- # type: (Any, F) -> F
-
- # Need to wrap task call because the exception is caught before we get to
- # see it. Also celery's reported stacktrace is untrustworthy.
-
- # functools.wraps is important here because celery-once looks at this
- # method's name.
- # https://github.com/getsentry/sentry-python/issues/421
- @wraps(f)
- def _inner(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- try:
- return f(*args, **kwargs)
- except Exception:
- exc_info = sys.exc_info()
- with capture_internal_exceptions():
- _capture_exception(task, exc_info)
- reraise(*exc_info)
-
- return _inner # type: ignore
-
-
-def _make_event_processor(task, uuid, args, kwargs, request=None):
- # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
- def event_processor(event, hint):
- # type: (Event, Hint) -> Optional[Event]
-
- with capture_internal_exceptions():
- tags = event.setdefault("tags", {})
- tags["celery_task_id"] = uuid
- extra = event.setdefault("extra", {})
- extra["celery-job"] = {
- "task_name": task.name,
- "args": args,
- "kwargs": kwargs,
- }
-
- if "exc_info" in hint:
- with capture_internal_exceptions():
- if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
- event["fingerprint"] = [
- "celery",
- "SoftTimeLimitExceeded",
- getattr(task, "name", task),
- ]
-
- return event
-
- return event_processor
-
-
-def _capture_exception(task, exc_info):
- # type: (Any, ExcInfo) -> None
- hub = Hub.current
-
- if hub.get_integration(CeleryIntegration) is None:
- return
- if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
- # ??? Doesn't map to anything
- _set_status(hub, "aborted")
- return
-
- _set_status(hub, "internal_error")
-
- if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
- return
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
- event, hint = event_from_exception(
- exc_info,
- client_options=client.options,
- mechanism={"type": "celery", "handled": False},
- )
-
- hub.capture_event(event, hint=hint)
-
-
-def _set_status(hub, status):
- # type: (Hub, str) -> None
- with capture_internal_exceptions():
- with hub.configure_scope() as scope:
- if scope.span is not None:
- scope.span.set_status(status)
-
-
-def _patch_worker_exit():
- # type: () -> None
-
- # Need to flush queue before worker shutdown because a crashing worker will
- # call os._exit
- from billiard.pool import Worker # type: ignore
-
- old_workloop = Worker.workloop
-
- def sentry_workloop(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- try:
- return old_workloop(*args, **kwargs)
- finally:
- with capture_internal_exceptions():
- hub = Hub.current
- if hub.get_integration(CeleryIntegration) is not None:
- hub.flush()
-
- Worker.workloop = sentry_workloop
-
-
-def _get_headers(task):
- # type: (Task) -> Dict[str, Any]
- headers = task.request.get("headers") or {}
-
- # flatten nested headers
- if "headers" in headers:
- headers.update(headers["headers"])
- del headers["headers"]
-
- headers.update(task.request.get("properties") or {})
-
- return headers
-
-
-def _get_humanized_interval(seconds):
- # type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
- TIME_UNITS = ( # noqa: N806
- ("day", 60 * 60 * 24.0),
- ("hour", 60 * 60.0),
- ("minute", 60.0),
- )
-
- seconds = float(seconds)
- for unit, divider in TIME_UNITS:
- if seconds >= divider:
- interval = int(seconds / divider)
- return (interval, cast("MonitorConfigScheduleUnit", unit))
-
- return (int(seconds), "second")
-
-
-def _get_monitor_config(celery_schedule, app, monitor_name):
- # type: (Any, Celery, str) -> MonitorConfig
- monitor_config = {} # type: MonitorConfig
- schedule_type = None # type: Optional[MonitorConfigScheduleType]
- schedule_value = None # type: Optional[Union[str, int]]
- schedule_unit = None # type: Optional[MonitorConfigScheduleUnit]
-
- if isinstance(celery_schedule, crontab):
- schedule_type = "crontab"
- schedule_value = (
- "{0._orig_minute} "
- "{0._orig_hour} "
- "{0._orig_day_of_month} "
- "{0._orig_month_of_year} "
- "{0._orig_day_of_week}".format(celery_schedule)
- )
- elif isinstance(celery_schedule, schedule):
- schedule_type = "interval"
- (schedule_value, schedule_unit) = _get_humanized_interval(
- celery_schedule.seconds
- )
-
- if schedule_unit == "second":
- logger.warning(
- "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
- monitor_name,
- schedule_value,
- )
- return {}
-
- else:
- logger.warning(
- "Celery schedule type '%s' not supported by Sentry Crons.",
- type(celery_schedule),
- )
- return {}
-
- monitor_config["schedule"] = {}
- monitor_config["schedule"]["type"] = schedule_type
- monitor_config["schedule"]["value"] = schedule_value
-
- if schedule_unit is not None:
- monitor_config["schedule"]["unit"] = schedule_unit
-
- monitor_config["timezone"] = (
- (
- hasattr(celery_schedule, "tz")
- and celery_schedule.tz is not None
- and str(celery_schedule.tz)
- )
- or app.timezone
- or "UTC"
- )
-
- return monitor_config
-
-
-def _patch_beat_apply_entry():
- # type: () -> None
- original_apply_entry = Scheduler.apply_entry
-
- def sentry_apply_entry(*args, **kwargs):
- # type: (*Any, **Any) -> None
- scheduler, schedule_entry = args
- app = scheduler.app
-
- celery_schedule = schedule_entry.schedule
- monitor_name = schedule_entry.name
-
- hub = Hub.current
- integration = hub.get_integration(CeleryIntegration)
- if integration is None:
- return original_apply_entry(*args, **kwargs)
-
- if match_regex_list(monitor_name, integration.exclude_beat_tasks):
- return original_apply_entry(*args, **kwargs)
-
- with hub.configure_scope() as scope:
- # When tasks are started from Celery Beat, make sure each task has its own trace.
- scope.set_new_propagation_context()
-
- monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
-
- is_supported_schedule = bool(monitor_config)
- if is_supported_schedule:
- headers = schedule_entry.options.pop("headers", {})
- headers.update(
- {
- "sentry-monitor-slug": monitor_name,
- "sentry-monitor-config": monitor_config,
- }
- )
-
- check_in_id = capture_checkin(
- monitor_slug=monitor_name,
- monitor_config=monitor_config,
- status=MonitorStatus.IN_PROGRESS,
- )
- headers.update({"sentry-monitor-check-in-id": check_in_id})
-
- # Set the Sentry configuration in the options of the ScheduleEntry.
- # Those will be picked up in `apply_async` and added to the headers.
- schedule_entry.options["headers"] = headers
-
- return original_apply_entry(*args, **kwargs)
-
- Scheduler.apply_entry = sentry_apply_entry
-
-
-def _patch_redbeat_maybe_due():
- # type: () -> None
-
- if RedBeatScheduler is None:
- return
-
- original_maybe_due = RedBeatScheduler.maybe_due
-
- def sentry_maybe_due(*args, **kwargs):
- # type: (*Any, **Any) -> None
- scheduler, schedule_entry = args
- app = scheduler.app
-
- celery_schedule = schedule_entry.schedule
- monitor_name = schedule_entry.name
-
- hub = Hub.current
- integration = hub.get_integration(CeleryIntegration)
- if integration is None:
- return original_maybe_due(*args, **kwargs)
-
- if match_regex_list(monitor_name, integration.exclude_beat_tasks):
- return original_maybe_due(*args, **kwargs)
-
- with hub.configure_scope() as scope:
- # When tasks are started from Celery Beat, make sure each task has its own trace.
- scope.set_new_propagation_context()
-
- monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
-
- is_supported_schedule = bool(monitor_config)
- if is_supported_schedule:
- headers = schedule_entry.options.pop("headers", {})
- headers.update(
- {
- "sentry-monitor-slug": monitor_name,
- "sentry-monitor-config": monitor_config,
- }
- )
-
- check_in_id = capture_checkin(
- monitor_slug=monitor_name,
- monitor_config=monitor_config,
- status=MonitorStatus.IN_PROGRESS,
- )
- headers.update({"sentry-monitor-check-in-id": check_in_id})
-
- # Set the Sentry configuration in the options of the ScheduleEntry.
- # Those will be picked up in `apply_async` and added to the headers.
- schedule_entry.options["headers"] = headers
-
- return original_maybe_due(*args, **kwargs)
-
- RedBeatScheduler.maybe_due = sentry_maybe_due
-
-
-def _setup_celery_beat_signals():
- # type: () -> None
- task_success.connect(crons_task_success)
- task_failure.connect(crons_task_failure)
- task_retry.connect(crons_task_retry)
-
-
-def crons_task_success(sender, **kwargs):
- # type: (Task, Dict[Any, Any]) -> None
- logger.debug("celery_task_success %s", sender)
- headers = _get_headers(sender)
-
- if "sentry-monitor-slug" not in headers:
- return
-
- monitor_config = headers.get("sentry-monitor-config", {})
-
- start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
-
- capture_checkin(
- monitor_slug=headers["sentry-monitor-slug"],
- monitor_config=monitor_config,
- check_in_id=headers["sentry-monitor-check-in-id"],
- duration=_now_seconds_since_epoch() - start_timestamp_s,
- status=MonitorStatus.OK,
- )
-
-
-def crons_task_failure(sender, **kwargs):
- # type: (Task, Dict[Any, Any]) -> None
- logger.debug("celery_task_failure %s", sender)
- headers = _get_headers(sender)
-
- if "sentry-monitor-slug" not in headers:
- return
-
- monitor_config = headers.get("sentry-monitor-config", {})
-
- start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
-
- capture_checkin(
- monitor_slug=headers["sentry-monitor-slug"],
- monitor_config=monitor_config,
- check_in_id=headers["sentry-monitor-check-in-id"],
- duration=_now_seconds_since_epoch() - start_timestamp_s,
- status=MonitorStatus.ERROR,
- )
-
-
-def crons_task_retry(sender, **kwargs):
- # type: (Task, Dict[Any, Any]) -> None
- logger.debug("celery_task_retry %s", sender)
- headers = _get_headers(sender)
-
- if "sentry-monitor-slug" not in headers:
- return
-
- monitor_config = headers.get("sentry-monitor-config", {})
-
- start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
-
- capture_checkin(
- monitor_slug=headers["sentry-monitor-slug"],
- monitor_config=monitor_config,
- check_in_id=headers["sentry-monitor-check-in-id"],
- duration=_now_seconds_since_epoch() - start_timestamp_s,
- status=MonitorStatus.ERROR,
- )
diff --git a/sentry_sdk/integrations/celery/__init__.py b/sentry_sdk/integrations/celery/__init__.py
new file mode 100644
index 0000000000..74205a0184
--- /dev/null
+++ b/sentry_sdk/integrations/celery/__init__.py
@@ -0,0 +1,386 @@
+import sys
+from functools import wraps
+
+import sentry_sdk
+from sentry_sdk import isolation_scope
+from sentry_sdk.api import continue_trace
+from sentry_sdk.consts import OP
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations.celery.beat import (
+ _patch_beat_apply_entry,
+ _patch_redbeat_maybe_due,
+ _setup_celery_beat_signals,
+)
+from sentry_sdk.integrations.celery.utils import _now_seconds_since_epoch
+from sentry_sdk.integrations.logging import ignore_logger
+from sentry_sdk.tracing import BAGGAGE_HEADER_NAME, TRANSACTION_SOURCE_TASK
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.scope import Scope
+from sentry_sdk.tracing_utils import Baggage
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+ event_from_exception,
+ reraise,
+)
+
+if TYPE_CHECKING:
+ from typing import Any
+ from typing import Callable
+ from typing import List
+ from typing import Optional
+ from typing import TypeVar
+
+ from sentry_sdk._types import EventProcessor, Event, Hint, ExcInfo
+ from sentry_sdk.tracing import Span
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+ from celery import VERSION as CELERY_VERSION # type: ignore
+ from celery.app.trace import task_has_custom
+ from celery.exceptions import ( # type: ignore
+ Ignore,
+ Reject,
+ Retry,
+ SoftTimeLimitExceeded,
+ )
+except ImportError:
+ raise DidNotEnable("Celery not installed")
+
+
+CELERY_CONTROL_FLOW_EXCEPTIONS = (Retry, Ignore, Reject)
+
+
+class CeleryIntegration(Integration):
+ identifier = "celery"
+
+ def __init__(
+ self,
+ propagate_traces=True,
+ monitor_beat_tasks=False,
+ exclude_beat_tasks=None,
+ ):
+ # type: (bool, bool, Optional[List[str]]) -> None
+ self.propagate_traces = propagate_traces
+ self.monitor_beat_tasks = monitor_beat_tasks
+ self.exclude_beat_tasks = exclude_beat_tasks
+
+ if monitor_beat_tasks:
+ _patch_beat_apply_entry()
+ _patch_redbeat_maybe_due()
+ _setup_celery_beat_signals()
+
+ @staticmethod
+ def setup_once():
+ # type: () -> None
+ if CELERY_VERSION < (4, 4, 7):
+ raise DidNotEnable("Celery 4.4.7 or newer required.")
+
+ _patch_build_tracer()
+ _patch_task_apply_async()
+ _patch_worker_exit()
+
+ # This logger logs every status of every task that ran on the worker.
+ # Meaning that every task's breadcrumbs are full of stuff like "Task
+ # raised unexpected ".
+ ignore_logger("celery.worker.job")
+ ignore_logger("celery.app.trace")
+
+ # This is stdout/err redirected to a logger, can't deal with this
+ # (need event_level=logging.WARN to reproduce)
+ ignore_logger("celery.redirected")
+
+
+def _set_status(status):
+ # type: (str) -> None
+ with capture_internal_exceptions():
+ scope = Scope.get_current_scope()
+ if scope.span is not None:
+ scope.span.set_status(status)
+
+
+def _capture_exception(task, exc_info):
+ # type: (Any, ExcInfo) -> None
+ client = sentry_sdk.get_client()
+ if client.get_integration(CeleryIntegration) is None:
+ return
+
+ if isinstance(exc_info[1], CELERY_CONTROL_FLOW_EXCEPTIONS):
+ # ??? Doesn't map to anything
+ _set_status("aborted")
+ return
+
+ _set_status("internal_error")
+
+ if hasattr(task, "throws") and isinstance(exc_info[1], task.throws):
+ return
+
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=client.options,
+ mechanism={"type": "celery", "handled": False},
+ )
+
+ sentry_sdk.capture_event(event, hint=hint)
+
+
+def _make_event_processor(task, uuid, args, kwargs, request=None):
+ # type: (Any, Any, Any, Any, Optional[Any]) -> EventProcessor
+ def event_processor(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+
+ with capture_internal_exceptions():
+ tags = event.setdefault("tags", {})
+ tags["celery_task_id"] = uuid
+ extra = event.setdefault("extra", {})
+ extra["celery-job"] = {
+ "task_name": task.name,
+ "args": args,
+ "kwargs": kwargs,
+ }
+
+ if "exc_info" in hint:
+ with capture_internal_exceptions():
+ if issubclass(hint["exc_info"][0], SoftTimeLimitExceeded):
+ event["fingerprint"] = [
+ "celery",
+ "SoftTimeLimitExceeded",
+ getattr(task, "name", task),
+ ]
+
+ return event
+
+ return event_processor
+
+
+def _update_celery_task_headers(original_headers, span, monitor_beat_tasks):
+ # type: (dict[str, Any], Optional[Span], bool) -> dict[str, Any]
+ """
+ Updates the headers of the Celery task with the tracing information
+ and eventually Sentry Crons monitoring information for beat tasks.
+ """
+ updated_headers = original_headers.copy()
+ with capture_internal_exceptions():
+ headers = {}
+ if span is not None:
+ headers = dict(
+ Scope.get_current_scope().iter_trace_propagation_headers(span=span)
+ )
+
+ if monitor_beat_tasks:
+ headers.update(
+ {
+ "sentry-monitor-start-timestamp-s": "%.9f"
+ % _now_seconds_since_epoch(),
+ }
+ )
+
+ if headers:
+ existing_baggage = updated_headers.get(BAGGAGE_HEADER_NAME)
+ sentry_baggage = headers.get(BAGGAGE_HEADER_NAME)
+
+ combined_baggage = sentry_baggage or existing_baggage
+ if sentry_baggage and existing_baggage:
+ # Merge incoming and sentry baggage, where the sentry trace information
+ # in the incoming baggage takes precedence and the third-party items
+ # are concatenated.
+ incoming = Baggage.from_incoming_header(existing_baggage)
+ combined = Baggage.from_incoming_header(sentry_baggage)
+ combined.sentry_items.update(incoming.sentry_items)
+ combined.third_party_items = ",".join(
+ [
+ x
+ for x in [
+ combined.third_party_items,
+ incoming.third_party_items,
+ ]
+ if x is not None and x != ""
+ ]
+ )
+ combined_baggage = combined.serialize(include_third_party=True)
+
+ updated_headers.update(headers)
+ if combined_baggage:
+ updated_headers[BAGGAGE_HEADER_NAME] = combined_baggage
+
+ # https://github.com/celery/celery/issues/4875
+ #
+ # Need to setdefault the inner headers too since other
+ # tracing tools (dd-trace-py) also employ this exact
+ # workaround and we don't want to break them.
+ updated_headers.setdefault("headers", {}).update(headers)
+ if combined_baggage:
+ updated_headers["headers"][BAGGAGE_HEADER_NAME] = combined_baggage
+
+ # Add the Sentry options potentially added in `sentry_apply_entry`
+ # to the headers (done when auto-instrumenting Celery Beat tasks)
+ for key, value in updated_headers.items():
+ if key.startswith("sentry-"):
+ updated_headers["headers"][key] = value
+
+ return updated_headers
+
+
+def _wrap_apply_async(f):
+ # type: (F) -> F
+ @wraps(f)
+ @ensure_integration_enabled(CeleryIntegration, f)
+ def apply_async(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ # Note: kwargs can contain headers=None, so no setdefault!
+ # Unsure which backend though.
+ kwarg_headers = kwargs.get("headers") or {}
+ integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
+ propagate_traces = kwarg_headers.pop(
+ "sentry-propagate-traces", integration.propagate_traces
+ )
+
+ if not propagate_traces:
+ return f(*args, **kwargs)
+
+ task = args[0]
+
+ with sentry_sdk.start_span(
+ op=OP.QUEUE_SUBMIT_CELERY, description=task.name
+ ) as span:
+ kwargs["headers"] = _update_celery_task_headers(
+ kwarg_headers, span, integration.monitor_beat_tasks
+ )
+ return f(*args, **kwargs)
+
+ return apply_async # type: ignore
+
+
+def _wrap_tracer(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap tracer for pushing the scope before prerun is sent, and
+ # popping it after postrun is sent.
+ #
+ # This is the reason we don't use signals for hooking in the first place.
+ # Also because in Celery 3, signal dispatch returns early if one handler
+ # crashes.
+ @wraps(f)
+ @ensure_integration_enabled(CeleryIntegration, f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ with isolation_scope() as scope:
+ scope._name = "celery"
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_event_processor(task, *args, **kwargs))
+
+ transaction = None
+
+ # Celery task objects are not a thing to be trusted. Even
+ # something such as attribute access can fail.
+ with capture_internal_exceptions():
+ headers = args[3].get("headers") or {}
+ transaction = continue_trace(
+ headers,
+ op=OP.QUEUE_TASK_CELERY,
+ name="unknown celery task",
+ source=TRANSACTION_SOURCE_TASK,
+ )
+ transaction.name = task.name
+ transaction.set_status("ok")
+
+ if transaction is None:
+ return f(*args, **kwargs)
+
+ with sentry_sdk.start_transaction(
+ transaction,
+ custom_sampling_context={
+ "celery_job": {
+ "task": task.name,
+ # for some reason, args[1] is a list if non-empty but a
+ # tuple if empty
+ "args": list(args[1]),
+ "kwargs": args[2],
+ }
+ },
+ ):
+ return f(*args, **kwargs)
+
+ return _inner # type: ignore
+
+
+def _wrap_task_call(task, f):
+ # type: (Any, F) -> F
+
+ # Need to wrap task call because the exception is caught before we get to
+ # see it. Also celery's reported stacktrace is untrustworthy.
+
+ # functools.wraps is important here because celery-once looks at this
+ # method's name.
+ # https://github.com/getsentry/sentry-python/issues/421
+ @wraps(f)
+ def _inner(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return f(*args, **kwargs)
+ except Exception:
+ exc_info = sys.exc_info()
+ with capture_internal_exceptions():
+ _capture_exception(task, exc_info)
+ reraise(*exc_info)
+
+ return _inner # type: ignore
+
+
+def _patch_build_tracer():
+ # type: () -> None
+ import celery.app.trace as trace # type: ignore
+
+ original_build_tracer = trace.build_tracer
+
+ def sentry_build_tracer(name, task, *args, **kwargs):
+ # type: (Any, Any, *Any, **Any) -> Any
+ if not getattr(task, "_sentry_is_patched", False):
+ # determine whether Celery will use __call__ or run and patch
+ # accordingly
+ if task_has_custom(task, "__call__"):
+ type(task).__call__ = _wrap_task_call(task, type(task).__call__)
+ else:
+ task.run = _wrap_task_call(task, task.run)
+
+ # `build_tracer` is apparently called for every task
+ # invocation. Can't wrap every celery task for every invocation
+ # or we will get infinitely nested wrapper functions.
+ task._sentry_is_patched = True
+
+ return _wrap_tracer(task, original_build_tracer(name, task, *args, **kwargs))
+
+ trace.build_tracer = sentry_build_tracer
+
+
+def _patch_task_apply_async():
+ # type: () -> None
+ from celery.app.task import Task # type: ignore
+
+ Task.apply_async = _wrap_apply_async(Task.apply_async)
+
+
+def _patch_worker_exit():
+ # type: () -> None
+
+ # Need to flush queue before worker shutdown because a crashing worker will
+ # call os._exit
+ from billiard.pool import Worker # type: ignore
+
+ original_workloop = Worker.workloop
+
+ def sentry_workloop(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ try:
+ return original_workloop(*args, **kwargs)
+ finally:
+ with capture_internal_exceptions():
+ if (
+ sentry_sdk.get_client().get_integration(CeleryIntegration)
+ is not None
+ ):
+ sentry_sdk.flush()
+
+ Worker.workloop = sentry_workloop
diff --git a/sentry_sdk/integrations/celery/beat.py b/sentry_sdk/integrations/celery/beat.py
new file mode 100644
index 0000000000..060045eb37
--- /dev/null
+++ b/sentry_sdk/integrations/celery/beat.py
@@ -0,0 +1,305 @@
+import sentry_sdk
+from sentry_sdk.crons import capture_checkin, MonitorStatus
+from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.integrations.celery.utils import (
+ _get_humanized_interval,
+ _now_seconds_since_epoch,
+)
+from sentry_sdk._types import TYPE_CHECKING
+from sentry_sdk.scope import Scope
+from sentry_sdk.utils import (
+ logger,
+ match_regex_list,
+)
+
+if TYPE_CHECKING:
+ from collections.abc import Callable
+ from typing import Any, Optional, TypeVar, Union
+ from sentry_sdk._types import (
+ MonitorConfig,
+ MonitorConfigScheduleType,
+ MonitorConfigScheduleUnit,
+ )
+
+ F = TypeVar("F", bound=Callable[..., Any])
+
+
+try:
+ from celery import Task, Celery # type: ignore
+ from celery.beat import Scheduler # type: ignore
+ from celery.schedules import crontab, schedule # type: ignore
+ from celery.signals import ( # type: ignore
+ task_failure,
+ task_success,
+ task_retry,
+ )
+except ImportError:
+ raise DidNotEnable("Celery not installed")
+
+try:
+ from redbeat.schedulers import RedBeatScheduler # type: ignore
+except ImportError:
+ RedBeatScheduler = None
+
+
+def _get_headers(task):
+ # type: (Task) -> dict[str, Any]
+ headers = task.request.get("headers") or {}
+
+ # flatten nested headers
+ if "headers" in headers:
+ headers.update(headers["headers"])
+ del headers["headers"]
+
+ headers.update(task.request.get("properties") or {})
+
+ return headers
+
+
+def _get_monitor_config(celery_schedule, app, monitor_name):
+ # type: (Any, Celery, str) -> MonitorConfig
+ monitor_config = {} # type: MonitorConfig
+ schedule_type = None # type: Optional[MonitorConfigScheduleType]
+ schedule_value = None # type: Optional[Union[str, int]]
+ schedule_unit = None # type: Optional[MonitorConfigScheduleUnit]
+
+ if isinstance(celery_schedule, crontab):
+ schedule_type = "crontab"
+ schedule_value = (
+ "{0._orig_minute} "
+ "{0._orig_hour} "
+ "{0._orig_day_of_month} "
+ "{0._orig_month_of_year} "
+ "{0._orig_day_of_week}".format(celery_schedule)
+ )
+ elif isinstance(celery_schedule, schedule):
+ schedule_type = "interval"
+ (schedule_value, schedule_unit) = _get_humanized_interval(
+ celery_schedule.seconds
+ )
+
+ if schedule_unit == "second":
+ logger.warning(
+ "Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
+ monitor_name,
+ schedule_value,
+ )
+ return {}
+
+ else:
+ logger.warning(
+ "Celery schedule type '%s' not supported by Sentry Crons.",
+ type(celery_schedule),
+ )
+ return {}
+
+ monitor_config["schedule"] = {}
+ monitor_config["schedule"]["type"] = schedule_type
+ monitor_config["schedule"]["value"] = schedule_value
+
+ if schedule_unit is not None:
+ monitor_config["schedule"]["unit"] = schedule_unit
+
+ monitor_config["timezone"] = (
+ (
+ hasattr(celery_schedule, "tz")
+ and celery_schedule.tz is not None
+ and str(celery_schedule.tz)
+ )
+ or app.timezone
+ or "UTC"
+ )
+
+ return monitor_config
+
+
+def _patch_beat_apply_entry():
+ # type: () -> None
+ """
+ Makes sure that the Sentry Crons information is set in the Celery Beat task's
+ headers so that is is monitored with Sentry Crons.
+
+ This is only called by Celery Beat. After apply_entry is called
+ Celery will call apply_async to put the task in the queue.
+ """
+ from sentry_sdk.integrations.celery import CeleryIntegration
+
+ original_apply_entry = Scheduler.apply_entry
+
+ def sentry_apply_entry(*args, **kwargs):
+ # type: (*Any, **Any) -> None
+ scheduler, schedule_entry = args
+ app = scheduler.app
+
+ celery_schedule = schedule_entry.schedule
+ monitor_name = schedule_entry.name
+
+ integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
+ if integration is None:
+ return original_apply_entry(*args, **kwargs)
+
+ if match_regex_list(monitor_name, integration.exclude_beat_tasks):
+ return original_apply_entry(*args, **kwargs)
+
+ # Tasks started by Celery Beat start a new Trace
+ scope = Scope.get_isolation_scope()
+ scope.set_new_propagation_context()
+ scope._name = "celery-beat"
+
+ monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+ is_supported_schedule = bool(monitor_config)
+ if is_supported_schedule:
+ headers = schedule_entry.options.pop("headers", {})
+ headers.update(
+ {
+ "sentry-monitor-slug": monitor_name,
+ "sentry-monitor-config": monitor_config,
+ }
+ )
+
+ check_in_id = capture_checkin(
+ monitor_slug=monitor_name,
+ monitor_config=monitor_config,
+ status=MonitorStatus.IN_PROGRESS,
+ )
+ headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+ # Set the Sentry configuration in the options of the ScheduleEntry.
+ # Those will be picked up in `apply_async` and added to the headers.
+ schedule_entry.options["headers"] = headers
+
+ return original_apply_entry(*args, **kwargs)
+
+ Scheduler.apply_entry = sentry_apply_entry
+
+
+def _patch_redbeat_maybe_due():
+ # type: () -> None
+
+ if RedBeatScheduler is None:
+ return
+
+ from sentry_sdk.integrations.celery import CeleryIntegration
+
+ original_maybe_due = RedBeatScheduler.maybe_due
+
+ def sentry_maybe_due(*args, **kwargs):
+ # type: (*Any, **Any) -> None
+ scheduler, schedule_entry = args
+ app = scheduler.app
+
+ celery_schedule = schedule_entry.schedule
+ monitor_name = schedule_entry.name
+
+ integration = sentry_sdk.get_client().get_integration(CeleryIntegration)
+ if integration is None:
+ return original_maybe_due(*args, **kwargs)
+
+ task_should_be_excluded = match_regex_list(
+ monitor_name, integration.exclude_beat_tasks
+ )
+ if task_should_be_excluded:
+ return original_maybe_due(*args, **kwargs)
+
+ # Tasks started by Celery Beat start a new Trace
+ scope = Scope.get_isolation_scope()
+ scope.set_new_propagation_context()
+ scope._name = "celery-beat"
+
+ monitor_config = _get_monitor_config(celery_schedule, app, monitor_name)
+
+ is_supported_schedule = bool(monitor_config)
+ if is_supported_schedule:
+ headers = schedule_entry.options.pop("headers", {})
+ headers.update(
+ {
+ "sentry-monitor-slug": monitor_name,
+ "sentry-monitor-config": monitor_config,
+ }
+ )
+
+ check_in_id = capture_checkin(
+ monitor_slug=monitor_name,
+ monitor_config=monitor_config,
+ status=MonitorStatus.IN_PROGRESS,
+ )
+ headers.update({"sentry-monitor-check-in-id": check_in_id})
+
+ # Set the Sentry configuration in the options of the ScheduleEntry.
+ # Those will be picked up in `apply_async` and added to the headers.
+ schedule_entry.options["headers"] = headers
+
+ return original_maybe_due(*args, **kwargs)
+
+ RedBeatScheduler.maybe_due = sentry_maybe_due
+
+
+def _setup_celery_beat_signals():
+ # type: () -> None
+ task_success.connect(crons_task_success)
+ task_failure.connect(crons_task_failure)
+ task_retry.connect(crons_task_retry)
+
+
+def crons_task_success(sender, **kwargs):
+ # type: (Task, dict[Any, Any]) -> None
+ logger.debug("celery_task_success %s", sender)
+ headers = _get_headers(sender)
+
+ if "sentry-monitor-slug" not in headers:
+ return
+
+ monitor_config = headers.get("sentry-monitor-config", {})
+
+ start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+ capture_checkin(
+ monitor_slug=headers["sentry-monitor-slug"],
+ monitor_config=monitor_config,
+ check_in_id=headers["sentry-monitor-check-in-id"],
+ duration=_now_seconds_since_epoch() - start_timestamp_s,
+ status=MonitorStatus.OK,
+ )
+
+
+def crons_task_failure(sender, **kwargs):
+ # type: (Task, dict[Any, Any]) -> None
+ logger.debug("celery_task_failure %s", sender)
+ headers = _get_headers(sender)
+
+ if "sentry-monitor-slug" not in headers:
+ return
+
+ monitor_config = headers.get("sentry-monitor-config", {})
+
+ start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+ capture_checkin(
+ monitor_slug=headers["sentry-monitor-slug"],
+ monitor_config=monitor_config,
+ check_in_id=headers["sentry-monitor-check-in-id"],
+ duration=_now_seconds_since_epoch() - start_timestamp_s,
+ status=MonitorStatus.ERROR,
+ )
+
+
+def crons_task_retry(sender, **kwargs):
+ # type: (Task, dict[Any, Any]) -> None
+ logger.debug("celery_task_retry %s", sender)
+ headers = _get_headers(sender)
+
+ if "sentry-monitor-slug" not in headers:
+ return
+
+ monitor_config = headers.get("sentry-monitor-config", {})
+
+ start_timestamp_s = float(headers["sentry-monitor-start-timestamp-s"])
+
+ capture_checkin(
+ monitor_slug=headers["sentry-monitor-slug"],
+ monitor_config=monitor_config,
+ check_in_id=headers["sentry-monitor-check-in-id"],
+ duration=_now_seconds_since_epoch() - start_timestamp_s,
+ status=MonitorStatus.ERROR,
+ )
diff --git a/sentry_sdk/integrations/celery/utils.py b/sentry_sdk/integrations/celery/utils.py
new file mode 100644
index 0000000000..952911a9f6
--- /dev/null
+++ b/sentry_sdk/integrations/celery/utils.py
@@ -0,0 +1,45 @@
+import time
+from typing import cast
+
+from sentry_sdk._types import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from typing import Any, Tuple
+ from sentry_sdk._types import MonitorConfigScheduleUnit
+
+
+def _now_seconds_since_epoch():
+ # type: () -> float
+ # We cannot use `time.perf_counter()` when dealing with the duration
+ # of a Celery task, because the start of a Celery task and
+ # the end are recorded in different processes.
+ # Start happens in the Celery Beat process,
+ # the end in a Celery Worker process.
+ return time.time()
+
+
+def _get_humanized_interval(seconds):
+ # type: (float) -> Tuple[int, MonitorConfigScheduleUnit]
+ TIME_UNITS = ( # noqa: N806
+ ("day", 60 * 60 * 24.0),
+ ("hour", 60 * 60.0),
+ ("minute", 60.0),
+ )
+
+ seconds = float(seconds)
+ for unit, divider in TIME_UNITS:
+ if seconds >= divider:
+ interval = int(seconds / divider)
+ return (interval, cast("MonitorConfigScheduleUnit", unit))
+
+ return (int(seconds), "second")
+
+
+class NoOpMgr:
+ def __enter__(self):
+ # type: () -> None
+ return None
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ # type: (Any, Any, Any) -> None
+ return None
diff --git a/sentry_sdk/integrations/chalice.py b/sentry_sdk/integrations/chalice.py
index 25d8b4ac52..379e46883f 100644
--- a/sentry_sdk/integrations/chalice.py
+++ b/sentry_sdk/integrations/chalice.py
@@ -1,7 +1,7 @@
import sys
+from functools import wraps
-from sentry_sdk._compat import reraise
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.aws_lambda import _make_request_event_processor
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
@@ -9,13 +9,17 @@
capture_internal_exceptions,
event_from_exception,
parse_version,
+ reraise,
)
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk._functools import wraps
-import chalice # type: ignore
-from chalice import Chalice, ChaliceViewError
-from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore
+try:
+ import chalice # type: ignore
+ from chalice import __version__ as CHALICE_VERSION
+ from chalice import Chalice, ChaliceViewError
+ from chalice.app import EventSourceHandler as ChaliceEventSourceHandler # type: ignore
+except ImportError:
+ raise DidNotEnable("Chalice is not installed")
if TYPE_CHECKING:
from typing import Any
@@ -25,19 +29,13 @@
F = TypeVar("F", bound=Callable[..., Any])
-try:
- from chalice import __version__ as CHALICE_VERSION
-except ImportError:
- raise DidNotEnable("Chalice is not installed")
-
class EventSourceHandler(ChaliceEventSourceHandler): # type: ignore
def __call__(self, event, context):
# type: (Any, Any) -> Any
- hub = Hub.current
- client = hub.client # type: Any
+ client = sentry_sdk.get_client()
- with hub.push_scope() as scope:
+ with sentry_sdk.isolation_scope() as scope:
with capture_internal_exceptions():
configured_time = context.get_remaining_time_in_millis()
scope.add_event_processor(
@@ -52,8 +50,8 @@ def __call__(self, event, context):
client_options=client.options,
mechanism={"type": "chalice", "handled": False},
)
- hub.capture_event(event, hint=hint)
- hub.flush()
+ sentry_sdk.capture_event(event, hint=hint)
+ client.flush()
reraise(*exc_info)
@@ -62,9 +60,8 @@ def _get_view_function_response(app, view_function, function_args):
@wraps(view_function)
def wrapped_view_function(**function_args):
# type: (**Any) -> Any
- hub = Hub.current
- client = hub.client # type: Any
- with hub.push_scope() as scope:
+ client = sentry_sdk.get_client()
+ with sentry_sdk.isolation_scope() as scope:
with capture_internal_exceptions():
configured_time = app.lambda_context.get_remaining_time_in_millis()
scope.set_transaction_name(
@@ -90,8 +87,8 @@ def wrapped_view_function(**function_args):
client_options=client.options,
mechanism={"type": "chalice", "handled": False},
)
- hub.capture_event(event, hint=hint)
- hub.flush()
+ sentry_sdk.capture_event(event, hint=hint)
+ client.flush()
raise
return wrapped_view_function # type: ignore
diff --git a/sentry_sdk/integrations/clickhouse_driver.py b/sentry_sdk/integrations/clickhouse_driver.py
index a09e567118..31eb971e33 100644
--- a/sentry_sdk/integrations/clickhouse_driver.py
+++ b/sentry_sdk/integrations/clickhouse_driver.py
@@ -1,10 +1,10 @@
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
-from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing import Span
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
from typing import TypeVar
@@ -74,16 +74,14 @@ def setup_once() -> None:
def _wrap_start(f: Callable[P, T]) -> Callable[P, T]:
+ @ensure_integration_enabled(ClickhouseDriverIntegration, f)
def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
- hub = Hub.current
- if hub.get_integration(ClickhouseDriverIntegration) is None:
- return f(*args, **kwargs)
connection = args[0]
query = args[1]
query_id = args[2] if len(args) > 2 else kwargs.get("query_id")
params = args[3] if len(args) > 3 else kwargs.get("params")
- span = hub.start_span(op=OP.DB, description=query)
+ span = sentry_sdk.start_span(op=OP.DB, description=query)
connection._sentry_span = span # type: ignore[attr-defined]
@@ -94,7 +92,7 @@ def _inner(*args: P.args, **kwargs: P.kwargs) -> T:
if query_id:
span.set_data("db.query_id", query_id)
- if params and _should_send_default_pii():
+ if params and should_send_default_pii():
span.set_data("db.params", params)
# run the original code
@@ -112,11 +110,11 @@ def _inner_end(*args: P.args, **kwargs: P.kwargs) -> T:
span = instance.connection._sentry_span # type: ignore[attr-defined]
if span is not None:
- if res is not None and _should_send_default_pii():
+ if res is not None and should_send_default_pii():
span.set_data("db.result", res)
with capture_internal_exceptions():
- span.hub.add_breadcrumb(
+ span.scope.add_breadcrumb(
message=span._data.pop("query"), category="query", data=span._data
)
@@ -135,7 +133,7 @@ def _inner_send_data(*args: P.args, **kwargs: P.kwargs) -> T:
_set_db_data(span, instance.connection)
- if _should_send_default_pii():
+ if should_send_default_pii():
db_params = span._data.get("db.params", [])
db_params.extend(data)
span.set_data("db.params", db_params)
diff --git a/sentry_sdk/integrations/dedupe.py b/sentry_sdk/integrations/dedupe.py
index 04208f608a..02469b6911 100644
--- a/sentry_sdk/integrations/dedupe.py
+++ b/sentry_sdk/integrations/dedupe.py
@@ -1,4 +1,4 @@
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.utils import ContextVar
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
@@ -27,8 +27,7 @@ def processor(event, hint):
if hint is None:
return event
- integration = Hub.current.get_integration(DedupeIntegration)
-
+ integration = sentry_sdk.get_client().get_integration(DedupeIntegration)
if integration is None:
return event
diff --git a/sentry_sdk/integrations/django/__init__.py b/sentry_sdk/integrations/django/__init__.py
index a38674f09d..bf2648b6bd 100644
--- a/sentry_sdk/integrations/django/__init__.py
+++ b/sentry_sdk/integrations/django/__init__.py
@@ -1,18 +1,14 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
-
import inspect
import sys
import threading
import weakref
from importlib import import_module
-from sentry_sdk._compat import string_types, text_type
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.db.explain_plan.django import attach_explain_plan_to_span
-from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.scope import Scope, add_global_event_processor, should_send_default_pii
from sentry_sdk.serializer import add_global_repr_processor
from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_URL
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
@@ -23,6 +19,7 @@
SENSITIVE_DATA_SUBSTITUTE,
logger,
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
transaction_from_function,
walk_exception_chain,
@@ -86,7 +83,6 @@
from django.utils.datastructures import MultiValueDict
from sentry_sdk.tracing import Span
- from sentry_sdk.scope import Scope
from sentry_sdk.integrations.wsgi import _ScopedResponse
from sentry_sdk._types import Event, Hint, EventProcessor, NotImplementedType
@@ -154,11 +150,9 @@ def setup_once():
old_app = WSGIHandler.__call__
+ @ensure_integration_enabled(DjangoIntegration, old_app)
def sentry_patched_wsgi_handler(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
- if Hub.current.get_integration(DjangoIntegration) is None:
- return old_app(self, environ, start_response)
-
bound_old_app = old_app.__get__(self, WSGIHandler)
from django.conf import settings
@@ -237,11 +231,6 @@ def _django_queryset_repr(value, hint):
if not isinstance(value, QuerySet) or value._result_cache:
return NotImplemented
- # Do not call Hub.get_integration here. It is intentional that
- # running under a new hub does not suddenly start executing
- # querysets. This might be surprising to the user but it's likely
- # less annoying.
-
return "<%s from %s at 0x%x>" % (
value.__class__.__name__,
value.__module__,
@@ -396,7 +385,7 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
# So we don't check here what style is configured
if hasattr(urlconf, "handler404"):
handler = urlconf.handler404
- if isinstance(handler, string_types):
+ if isinstance(handler, str):
scope.transaction = handler
else:
scope.transaction = transaction_from_function(
@@ -406,22 +395,20 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
pass
+@ensure_integration_enabled(DjangoIntegration)
def _before_get_response(request):
# type: (WSGIRequest) -> None
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is None:
- return
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
_patch_drf()
- with hub.configure_scope() as scope:
- # Rely on WSGI middleware to start a trace
- _set_transaction_name_and_source(scope, integration.transaction_style, request)
+ scope = Scope.get_current_scope()
+ # Rely on WSGI middleware to start a trace
+ _set_transaction_name_and_source(scope, integration.transaction_style, request)
- scope.add_event_processor(
- _make_wsgi_request_event_processor(weakref.ref(request), integration)
- )
+ scope.add_event_processor(
+ _make_wsgi_request_event_processor(weakref.ref(request), integration)
+ )
def _attempt_resolve_again(request, scope, transaction_style):
@@ -437,15 +424,15 @@ def _attempt_resolve_again(request, scope, transaction_style):
_set_transaction_name_and_source(scope, transaction_style, request)
+@ensure_integration_enabled(DjangoIntegration)
def _after_get_response(request):
# type: (WSGIRequest) -> None
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is None or integration.transaction_style != "url":
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+ if integration.transaction_style != "url":
return
- with hub.configure_scope() as scope:
- _attempt_resolve_again(request, scope, integration.transaction_style)
+ scope = Scope.get_current_scope()
+ _attempt_resolve_again(request, scope, integration.transaction_style)
def _patch_get_response():
@@ -498,7 +485,7 @@ def wsgi_request_event_processor(event, hint):
with capture_internal_exceptions():
DjangoRequestExtractor(request).extract_into_event(event)
- if _should_send_default_pii():
+ if should_send_default_pii():
with capture_internal_exceptions():
_set_user_info(request, event)
@@ -507,24 +494,22 @@ def wsgi_request_event_processor(event, hint):
return wsgi_request_event_processor
+@ensure_integration_enabled(DjangoIntegration)
def _got_request_exception(request=None, **kwargs):
# type: (WSGIRequest, **Any) -> None
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is not None:
- if request is not None and integration.transaction_style == "url":
- with hub.configure_scope() as scope:
- _attempt_resolve_again(request, scope, integration.transaction_style)
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
- event, hint = event_from_exception(
- sys.exc_info(),
- client_options=client.options,
- mechanism={"type": "django", "handled": False},
- )
- hub.capture_event(event, hint=hint)
+ client = sentry_sdk.get_client()
+ integration = client.get_integration(DjangoIntegration)
+
+ if request is not None and integration.transaction_style == "url":
+ scope = Scope.get_current_scope()
+ _attempt_resolve_again(request, scope, integration.transaction_style)
+
+ event, hint = event_from_exception(
+ sys.exc_info(),
+ client_options=client.options,
+ mechanism={"type": "django", "handled": False},
+ )
+ sentry_sdk.capture_event(event, hint=hint)
class DjangoRequestExtractor(RequestExtractor):
@@ -620,62 +605,56 @@ def install_sql_hook():
# This won't work on Django versions < 1.6
return
+ @ensure_integration_enabled(DjangoIntegration, real_execute)
def execute(self, sql, params=None):
# type: (CursorWrapper, Any, Optional[Any]) -> Any
- hub = Hub.current
- if hub.get_integration(DjangoIntegration) is None:
- return real_execute(self, sql, params)
-
with record_sql_queries(
- hub, self.cursor, sql, params, paramstyle="format", executemany=False
+ self.cursor, sql, params, paramstyle="format", executemany=False
) as span:
_set_db_data(span, self)
- if hub.client:
- options = hub.client.options["_experiments"].get("attach_explain_plans")
- if options is not None:
- attach_explain_plan_to_span(
- span,
- self.cursor.connection,
- sql,
- params,
- self.mogrify,
- options,
- )
+ options = (
+ sentry_sdk.get_client()
+ .options["_experiments"]
+ .get("attach_explain_plans")
+ )
+ if options is not None:
+ attach_explain_plan_to_span(
+ span,
+ self.cursor.connection,
+ sql,
+ params,
+ self.mogrify,
+ options,
+ )
result = real_execute(self, sql, params)
with capture_internal_exceptions():
- add_query_source(hub, span)
+ add_query_source(span)
return result
+ @ensure_integration_enabled(DjangoIntegration, real_executemany)
def executemany(self, sql, param_list):
# type: (CursorWrapper, Any, List[Any]) -> Any
- hub = Hub.current
- if hub.get_integration(DjangoIntegration) is None:
- return real_executemany(self, sql, param_list)
-
with record_sql_queries(
- hub, self.cursor, sql, param_list, paramstyle="format", executemany=True
+ self.cursor, sql, param_list, paramstyle="format", executemany=True
) as span:
_set_db_data(span, self)
result = real_executemany(self, sql, param_list)
with capture_internal_exceptions():
- add_query_source(hub, span)
+ add_query_source(span)
return result
+ @ensure_integration_enabled(DjangoIntegration, real_connect)
def connect(self):
# type: (BaseDatabaseWrapper) -> None
- hub = Hub.current
- if hub.get_integration(DjangoIntegration) is None:
- return real_connect(self)
-
with capture_internal_exceptions():
- hub.add_breadcrumb(message="connect", category="query")
+ sentry_sdk.add_breadcrumb(message="connect", category="query")
- with hub.start_span(op=OP.DB, description="connect") as span:
+ with sentry_sdk.start_span(op=OP.DB, description="connect") as span:
_set_db_data(span, self)
return real_connect(self)
@@ -687,7 +666,6 @@ def connect(self):
def _set_db_data(span, cursor_or_db):
# type: (Span, Any) -> None
-
db = cursor_or_db.db if hasattr(cursor_or_db, "db") else cursor_or_db
vendor = db.vendor
span.set_data(SPANDATA.DB_SYSTEM, vendor)
@@ -726,7 +704,7 @@ def _set_db_data(span, cursor_or_db):
server_port = connection_params.get("port")
if server_port is not None:
- span.set_data(SPANDATA.SERVER_PORT, text_type(server_port))
+ span.set_data(SPANDATA.SERVER_PORT, str(server_port))
server_socket_address = connection_params.get("unix_socket")
if server_socket_address is not None:
diff --git a/sentry_sdk/integrations/django/asgi.py b/sentry_sdk/integrations/django/asgi.py
index e1ba678011..b52ca6dd33 100644
--- a/sentry_sdk/integrations/django/asgi.py
+++ b/sentry_sdk/integrations/django/asgi.py
@@ -7,16 +7,21 @@
"""
import asyncio
+import functools
from django.core.handlers.wsgi import WSGIRequest
-from sentry_sdk import Hub, _functools
+import sentry_sdk
+from sentry_sdk import Scope
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
-from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.scope import should_send_default_pii
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+)
if TYPE_CHECKING:
@@ -50,7 +55,7 @@ def asgi_request_event_processor(event, hint):
with capture_internal_exceptions():
DjangoRequestExtractor(request).extract_into_event(event)
- if _should_send_default_pii():
+ if should_send_default_pii():
with capture_internal_exceptions():
_set_user_info(request, event)
@@ -68,9 +73,7 @@ def patch_django_asgi_handler_impl(cls):
async def sentry_patched_asgi_handler(self, scope, receive, send):
# type: (Any, Any, Any, Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is None:
+ if sentry_sdk.get_client().get_integration(DjangoIntegration) is None:
return await old_app(self, scope, receive, send)
middleware = SentryAsgiMiddleware(
@@ -85,18 +88,14 @@ async def sentry_patched_asgi_handler(self, scope, receive, send):
if modern_django_asgi_support:
old_create_request = cls.create_request
+ @ensure_integration_enabled(DjangoIntegration, old_create_request)
def sentry_patched_create_request(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is None:
- return old_create_request(self, *args, **kwargs)
+ request, error_response = old_create_request(self, *args, **kwargs)
+ scope = Scope.get_isolation_scope()
+ scope.add_event_processor(_make_asgi_request_event_processor(request))
- with hub.configure_scope() as scope:
- request, error_response = old_create_request(self, *args, **kwargs)
- scope.add_event_processor(_make_asgi_request_event_processor(request))
-
- return request, error_response
+ return request, error_response
cls.create_request = sentry_patched_create_request
@@ -124,7 +123,7 @@ def patch_channels_asgi_handler_impl(cls):
async def sentry_patched_asgi_handler(self, receive, send):
# type: (Any, Any, Any) -> Any
- if Hub.current.get_integration(DjangoIntegration) is None:
+ if sentry_sdk.get_client().get_integration(DjangoIntegration) is None:
return await old_app(self, receive, send)
middleware = SentryAsgiMiddleware(
@@ -141,20 +140,19 @@ async def sentry_patched_asgi_handler(self, receive, send):
patch_django_asgi_handler_impl(cls)
-def wrap_async_view(hub, callback):
- # type: (Hub, Any) -> Any
- @_functools.wraps(callback)
+def wrap_async_view(callback):
+ # type: (Any) -> Any
+ @functools.wraps(callback)
async def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
-
- with hub.configure_scope() as sentry_scope:
- if sentry_scope.profile is not None:
- sentry_scope.profile.update_active_thread_id()
-
- with hub.start_span(
- op=OP.VIEW_RENDER, description=request.resolver_match.view_name
- ):
- return await callback(request, *args, **kwargs)
+ sentry_scope = Scope.get_isolation_scope()
+ if sentry_scope.profile is not None:
+ sentry_scope.profile.update_active_thread_id()
+
+ with sentry_sdk.start_span(
+ op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+ ):
+ return await callback(request, *args, **kwargs)
return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/django/caching.py b/sentry_sdk/integrations/django/caching.py
index 921f8e485d..1b2bb477b1 100644
--- a/sentry_sdk/integrations/django/caching.py
+++ b/sentry_sdk/integrations/django/caching.py
@@ -4,9 +4,9 @@
from django import VERSION as DJANGO_VERSION
from django.core.cache import CacheHandler
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
-from sentry_sdk._compat import text_type
+from sentry_sdk.utils import ensure_integration_enabled
if TYPE_CHECKING:
@@ -25,9 +25,9 @@ def _get_span_description(method_name, args, kwargs):
description = "{} ".format(method_name)
if args is not None and len(args) >= 1:
- description += text_type(args[0])
+ description += str(args[0])
elif kwargs is not None and "key" in kwargs:
- description += text_type(kwargs["key"])
+ description += str(kwargs["key"])
return description
@@ -36,22 +36,22 @@ def _patch_cache_method(cache, method_name):
# type: (CacheHandler, str) -> None
from sentry_sdk.integrations.django import DjangoIntegration
+ original_method = getattr(cache, method_name)
+
+ @ensure_integration_enabled(DjangoIntegration, original_method)
def _instrument_call(cache, method_name, original_method, args, kwargs):
# type: (CacheHandler, str, Callable[..., Any], Any, Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
- if integration is None or not integration.cache_spans:
- return original_method(*args, **kwargs)
-
description = _get_span_description(method_name, args, kwargs)
- with hub.start_span(op=OP.CACHE_GET_ITEM, description=description) as span:
+ with sentry_sdk.start_span(
+ op=OP.CACHE_GET_ITEM, description=description
+ ) as span:
value = original_method(*args, **kwargs)
if value:
span.set_data(SPANDATA.CACHE_HIT, True)
- size = len(text_type(value))
+ size = len(str(value))
span.set_data(SPANDATA.CACHE_ITEM_SIZE, size)
else:
@@ -59,8 +59,6 @@ def _instrument_call(cache, method_name, original_method, args, kwargs):
return value
- original_method = getattr(cache, method_name)
-
@functools.wraps(original_method)
def sentry_method(*args, **kwargs):
# type: (*Any, **Any) -> Any
@@ -90,8 +88,8 @@ def sentry_get_item(self, alias):
# type: (CacheHandler, str) -> Any
cache = original_get_item(self, alias)
- integration = Hub.current.get_integration(DjangoIntegration)
- if integration and integration.cache_spans:
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+ if integration is not None and integration.cache_spans:
_patch_cache(cache)
return cache
@@ -107,8 +105,8 @@ def sentry_create_connection(self, alias):
# type: (CacheHandler, str) -> Any
cache = original_create_connection(self, alias)
- integration = Hub.current.get_integration(DjangoIntegration)
- if integration and integration.cache_spans:
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
+ if integration is not None and integration.cache_spans:
_patch_cache(cache)
return cache
diff --git a/sentry_sdk/integrations/django/middleware.py b/sentry_sdk/integrations/django/middleware.py
index aa8023dbd4..9d191ce076 100644
--- a/sentry_sdk/integrations/django/middleware.py
+++ b/sentry_sdk/integrations/django/middleware.py
@@ -2,10 +2,11 @@
Create spans from Django middleware invocations
"""
+from functools import wraps
+
from django import VERSION as DJANGO_VERSION
-from sentry_sdk import Hub
-from sentry_sdk._functools import wraps
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
from sentry_sdk.utils import (
@@ -28,12 +29,6 @@
"import_string_should_wrap_middleware"
)
-if DJANGO_VERSION < (1, 7):
- import_string_name = "import_by_path"
-else:
- import_string_name = "import_string"
-
-
if DJANGO_VERSION < (3, 1):
_asgi_middleware_mixin_factory = lambda _: object
else:
@@ -44,7 +39,7 @@ def patch_django_middlewares():
# type: () -> None
from django.core.handlers import base
- old_import_string = getattr(base, import_string_name)
+ old_import_string = base.import_string
def sentry_patched_import_string(dotted_path):
# type: (str) -> Any
@@ -55,7 +50,7 @@ def sentry_patched_import_string(dotted_path):
return rv
- setattr(base, import_string_name, sentry_patched_import_string)
+ base.import_string = sentry_patched_import_string
old_load_middleware = base.BaseHandler.load_middleware
@@ -76,8 +71,7 @@ def _wrap_middleware(middleware, middleware_name):
def _check_middleware_span(old_method):
# type: (Callable[..., Any]) -> Optional[Span]
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
if integration is None or not integration.middleware_spans:
return None
@@ -88,7 +82,7 @@ def _check_middleware_span(old_method):
if function_basename:
description = "{}.{}".format(description, function_basename)
- middleware_span = hub.start_span(
+ middleware_span = sentry_sdk.start_span(
op=OP.MIDDLEWARE_DJANGO, description=description
)
middleware_span.set_tag("django.function_name", function_name)
@@ -137,7 +131,7 @@ def __init__(self, get_response=None, *args, **kwargs):
self.get_response = get_response
self._call_method = None
if self.async_capable:
- super(SentryWrappingMiddleware, self).__init__(get_response)
+ super().__init__(get_response)
# We need correct behavior for `hasattr()`, which we can only determine
# when we have an instance of the middleware we're wrapping.
diff --git a/sentry_sdk/integrations/django/signals_handlers.py b/sentry_sdk/integrations/django/signals_handlers.py
index 3d1aadab1f..969316d2da 100644
--- a/sentry_sdk/integrations/django/signals_handlers.py
+++ b/sentry_sdk/integrations/django/signals_handlers.py
@@ -1,10 +1,8 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import
+from functools import wraps
from django.dispatch import Signal
-from sentry_sdk import Hub
-from sentry_sdk._functools import wraps
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
from sentry_sdk.integrations.django import DJANGO_VERSION
@@ -54,8 +52,6 @@ def patch_signals():
def _sentry_live_receivers(self, sender):
# type: (Signal, Any) -> Union[tuple[list[Callable[..., Any]], list[Callable[..., Any]]], list[Callable[..., Any]]]
- hub = Hub.current
-
if DJANGO_VERSION >= (5, 0):
sync_receivers, async_receivers = old_live_receivers(self, sender)
else:
@@ -68,7 +64,7 @@ def sentry_sync_receiver_wrapper(receiver):
def wrapper(*args, **kwargs):
# type: (Any, Any) -> Any
signal_name = _get_receiver_name(receiver)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.EVENT_DJANGO,
description=signal_name,
) as span:
@@ -77,7 +73,7 @@ def wrapper(*args, **kwargs):
return wrapper
- integration = hub.get_integration(DjangoIntegration)
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
if (
integration
and integration.signals_spans
diff --git a/sentry_sdk/integrations/django/templates.py b/sentry_sdk/integrations/django/templates.py
index e6c83b5bf2..0c75ad7955 100644
--- a/sentry_sdk/integrations/django/templates.py
+++ b/sentry_sdk/integrations/django/templates.py
@@ -1,10 +1,14 @@
+import functools
+
from django.template import TemplateSyntaxError
from django.utils.safestring import mark_safe
from django import VERSION as DJANGO_VERSION
-from sentry_sdk import _functools, Hub
+import sentry_sdk
+from sentry_sdk import Scope
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
+from sentry_sdk.utils import ensure_integration_enabled
if TYPE_CHECKING:
from typing import Any
@@ -61,13 +65,10 @@ def patch_templates():
real_rendered_content = SimpleTemplateResponse.rendered_content
@property # type: ignore
+ @ensure_integration_enabled(DjangoIntegration, real_rendered_content.fget)
def rendered_content(self):
# type: (SimpleTemplateResponse) -> str
- hub = Hub.current
- if hub.get_integration(DjangoIntegration) is None:
- return real_rendered_content.fget(self)
-
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.TEMPLATE_RENDER,
description=_get_template_name_description(self.template_name),
) as span:
@@ -82,19 +83,19 @@ def rendered_content(self):
real_render = django.shortcuts.render
- @_functools.wraps(real_render)
+ @functools.wraps(real_render)
+ @ensure_integration_enabled(DjangoIntegration, real_render)
def render(request, template_name, context=None, *args, **kwargs):
# type: (django.http.HttpRequest, str, Optional[Dict[str, Any]], *Any, **Any) -> django.http.HttpResponse
- hub = Hub.current
- if hub.get_integration(DjangoIntegration) is None:
- return real_render(request, template_name, context, *args, **kwargs)
# Inject trace meta tags into template context
context = context or {}
if "sentry_trace_meta" not in context:
- context["sentry_trace_meta"] = mark_safe(hub.trace_propagation_meta())
+ context["sentry_trace_meta"] = mark_safe(
+ Scope.get_current_scope().trace_propagation_meta()
+ )
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.TEMPLATE_RENDER,
description=_get_template_name_description(template_name),
) as span:
diff --git a/sentry_sdk/integrations/django/transactions.py b/sentry_sdk/integrations/django/transactions.py
index b2e200b832..a8e756ccaf 100644
--- a/sentry_sdk/integrations/django/transactions.py
+++ b/sentry_sdk/integrations/django/transactions.py
@@ -5,8 +5,6 @@
in use.
"""
-from __future__ import absolute_import
-
import re
from sentry_sdk._types import TYPE_CHECKING
@@ -44,7 +42,7 @@ def get_regex(resolver_or_pattern):
return regex
-class RavenResolver(object):
+class RavenResolver:
_new_style_group_matcher = re.compile(
r"<(?:([^>:]+):)?([^>]+)>"
) # https://github.com/django/django/blob/21382e2743d06efbf5623e7c9b6dccf2a325669b/django/urls/resolvers.py#L245-L247
diff --git a/sentry_sdk/integrations/django/views.py b/sentry_sdk/integrations/django/views.py
index d918afad66..1fd53462b3 100644
--- a/sentry_sdk/integrations/django/views.py
+++ b/sentry_sdk/integrations/django/views.py
@@ -1,7 +1,9 @@
+import functools
+
+import sentry_sdk
+from sentry_sdk import Scope
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk import _functools
if TYPE_CHECKING:
from typing import Any
@@ -31,13 +33,12 @@ def patch_views():
def sentry_patched_render(self):
# type: (SimpleTemplateResponse) -> Any
- hub = Hub.current
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.VIEW_RESPONSE_RENDER, description="serialize response"
):
return old_render(self)
- @_functools.wraps(old_make_view_atomic)
+ @functools.wraps(old_make_view_atomic)
def sentry_patched_make_view_atomic(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
callback = old_make_view_atomic(self, *args, **kwargs)
@@ -45,8 +46,7 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
# XXX: The wrapper function is created for every request. Find more
# efficient way to wrap views (or build a cache?)
- hub = Hub.current
- integration = hub.get_integration(DjangoIntegration)
+ integration = sentry_sdk.get_client().get_integration(DjangoIntegration)
if integration is not None and integration.middleware_spans:
is_async_view = (
iscoroutinefunction is not None
@@ -54,9 +54,9 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
and iscoroutinefunction(callback)
)
if is_async_view:
- sentry_wrapped_callback = wrap_async_view(hub, callback)
+ sentry_wrapped_callback = wrap_async_view(callback)
else:
- sentry_wrapped_callback = _wrap_sync_view(hub, callback)
+ sentry_wrapped_callback = _wrap_sync_view(callback)
else:
sentry_wrapped_callback = callback
@@ -67,20 +67,20 @@ def sentry_patched_make_view_atomic(self, *args, **kwargs):
BaseHandler.make_view_atomic = sentry_patched_make_view_atomic
-def _wrap_sync_view(hub, callback):
- # type: (Hub, Any) -> Any
- @_functools.wraps(callback)
+def _wrap_sync_view(callback):
+ # type: (Any) -> Any
+ @functools.wraps(callback)
def sentry_wrapped_callback(request, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
- with hub.configure_scope() as sentry_scope:
- # set the active thread id to the handler thread for sync views
- # this isn't necessary for async views since that runs on main
- if sentry_scope.profile is not None:
- sentry_scope.profile.update_active_thread_id()
-
- with hub.start_span(
- op=OP.VIEW_RENDER, description=request.resolver_match.view_name
- ):
- return callback(request, *args, **kwargs)
+ sentry_scope = Scope.get_isolation_scope()
+ # set the active thread id to the handler thread for sync views
+ # this isn't necessary for async views since that runs on main
+ if sentry_scope.profile is not None:
+ sentry_scope.profile.update_active_thread_id()
+
+ with sentry_sdk.start_span(
+ op=OP.VIEW_RENDER, description=request.resolver_match.view_name
+ ):
+ return callback(request, *args, **kwargs)
return sentry_wrapped_callback
diff --git a/sentry_sdk/integrations/excepthook.py b/sentry_sdk/integrations/excepthook.py
index 514e082b31..d638ef2f9f 100644
--- a/sentry_sdk/integrations/excepthook.py
+++ b/sentry_sdk/integrations/excepthook.py
@@ -1,7 +1,11 @@
import sys
-from sentry_sdk.hub import Hub
-from sentry_sdk.utils import capture_internal_exceptions, event_from_exception
+import sentry_sdk
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+ event_from_exception,
+)
from sentry_sdk.integrations import Integration
from sentry_sdk._types import TYPE_CHECKING
@@ -43,22 +47,19 @@ def setup_once():
def _make_excepthook(old_excepthook):
# type: (Excepthook) -> Excepthook
+ @ensure_integration_enabled(ExcepthookIntegration, old_excepthook)
def sentry_sdk_excepthook(type_, value, traceback):
# type: (Type[BaseException], BaseException, Optional[TracebackType]) -> None
- hub = Hub.current
- integration = hub.get_integration(ExcepthookIntegration)
-
- if integration is not None and _should_send(integration.always_run):
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
+ integration = sentry_sdk.get_client().get_integration(ExcepthookIntegration)
+ if _should_send(integration.always_run):
with capture_internal_exceptions():
event, hint = event_from_exception(
(type_, value, traceback),
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "excepthook", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return old_excepthook(type_, value, traceback)
diff --git a/sentry_sdk/integrations/executing.py b/sentry_sdk/integrations/executing.py
index e8636b61f8..d6817c5041 100644
--- a/sentry_sdk/integrations/executing.py
+++ b/sentry_sdk/integrations/executing.py
@@ -1,6 +1,4 @@
-from __future__ import absolute_import
-
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.scope import add_global_event_processor
@@ -27,7 +25,7 @@ def setup_once():
@add_global_event_processor
def add_executing_info(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
- if Hub.current.get_integration(ExecutingIntegration) is None:
+ if sentry_sdk.get_client().get_integration(ExecutingIntegration) is None:
return event
if hint is None:
diff --git a/sentry_sdk/integrations/falcon.py b/sentry_sdk/integrations/falcon.py
index d5e2480485..61c11e11d5 100644
--- a/sentry_sdk/integrations/falcon.py
+++ b/sentry_sdk/integrations/falcon.py
@@ -1,12 +1,12 @@
-from __future__ import absolute_import
-
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import Scope
from sentry_sdk.tracing import SOURCE_FOR_STYLE
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
parse_version,
)
@@ -97,19 +97,18 @@ def json(self):
return self.request._media
-class SentryFalconMiddleware(object):
+class SentryFalconMiddleware:
"""Captures exceptions in Falcon requests and send to Sentry"""
def process_request(self, req, resp, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> None
- hub = Hub.current
- integration = hub.get_integration(FalconIntegration)
+ integration = sentry_sdk.get_client().get_integration(FalconIntegration)
if integration is None:
return
- with hub.configure_scope() as scope:
- scope._name = "falcon"
- scope.add_event_processor(_make_request_event_processor(req, integration))
+ scope = Scope.get_isolation_scope()
+ scope._name = "falcon"
+ scope.add_event_processor(_make_request_event_processor(req, integration))
TRANSACTION_STYLE_VALUES = ("uri_template", "path")
@@ -152,8 +151,7 @@ def _patch_wsgi_app():
def sentry_patched_wsgi_app(self, env, start_response):
# type: (falcon.API, Any, Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(FalconIntegration)
+ integration = sentry_sdk.get_client().get_integration(FalconIntegration)
if integration is None:
return original_wsgi_app(self, env, start_response)
@@ -170,6 +168,7 @@ def _patch_handle_exception():
# type: () -> None
original_handle_exception = falcon_app_class._handle_exception
+ @ensure_integration_enabled(FalconIntegration, original_handle_exception)
def sentry_patched_handle_exception(self, *args):
# type: (falcon.API, *Any) -> Any
# NOTE(jmagnusson): falcon 2.0 changed falcon.API._handle_exception
@@ -190,19 +189,13 @@ def sentry_patched_handle_exception(self, *args):
# capture_internal_exceptions block above.
return was_handled
- hub = Hub.current
- integration = hub.get_integration(FalconIntegration)
-
- if integration is not None and _exception_leads_to_http_5xx(ex, response):
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
+ if _exception_leads_to_http_5xx(ex, response):
event, hint = event_from_exception(
ex,
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "falcon", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return was_handled
@@ -221,8 +214,7 @@ def sentry_patched_prepare_middleware(
# We don't support ASGI Falcon apps, so we don't patch anything here
return original_prepare_middleware(middleware, independent_middleware, asgi)
- hub = Hub.current
- integration = hub.get_integration(FalconIntegration)
+ integration = sentry_sdk.get_client().get_integration(FalconIntegration)
if integration is not None:
middleware = [SentryFalconMiddleware()] + (middleware or [])
diff --git a/sentry_sdk/integrations/fastapi.py b/sentry_sdk/integrations/fastapi.py
index 33a5591cc4..8fd18fef96 100644
--- a/sentry_sdk/integrations/fastapi.py
+++ b/sentry_sdk/integrations/fastapi.py
@@ -1,16 +1,19 @@
import asyncio
from copy import deepcopy
+from functools import wraps
-from sentry_sdk._functools import wraps
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
-from sentry_sdk.utils import transaction_from_function, logger
+from sentry_sdk.utils import (
+ transaction_from_function,
+ logger,
+)
if TYPE_CHECKING:
from typing import Any, Callable, Dict
- from sentry_sdk.scope import Scope
from sentry_sdk._types import Event
try:
@@ -84,11 +87,10 @@ def _sentry_get_request_handler(*args, **kwargs):
@wraps(old_call)
def _sentry_call(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- with hub.configure_scope() as sentry_scope:
- if sentry_scope.profile is not None:
- sentry_scope.profile.update_active_thread_id()
- return old_call(*args, **kwargs)
+ sentry_scope = Scope.get_isolation_scope()
+ if sentry_scope.profile is not None:
+ sentry_scope.profile.update_active_thread_id()
+ return old_call(*args, **kwargs)
dependant.call = _sentry_call
@@ -96,43 +98,41 @@ def _sentry_call(*args, **kwargs):
async def _sentry_app(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(FastApiIntegration)
- if integration is None:
+ if sentry_sdk.get_client().get_integration(FastApiIntegration) is None:
return await old_app(*args, **kwargs)
- with hub.configure_scope() as sentry_scope:
- request = args[0]
-
- _set_transaction_name_and_source(
- sentry_scope, integration.transaction_style, request
- )
-
- extractor = StarletteRequestExtractor(request)
- info = await extractor.extract_request_info()
-
- def _make_request_event_processor(req, integration):
- # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event]
- def event_processor(event, hint):
- # type: (Event, Dict[str, Any]) -> Event
-
- # Extract information from request
- request_info = event.get("request", {})
- if info:
- if "cookies" in info and _should_send_default_pii():
- request_info["cookies"] = info["cookies"]
- if "data" in info:
- request_info["data"] = info["data"]
- event["request"] = deepcopy(request_info)
-
- return event
-
- return event_processor
-
- sentry_scope._name = FastApiIntegration.identifier
- sentry_scope.add_event_processor(
- _make_request_event_processor(request, integration)
- )
+ integration = sentry_sdk.get_client().get_integration(FastApiIntegration)
+ request = args[0]
+
+ _set_transaction_name_and_source(
+ Scope.get_current_scope(), integration.transaction_style, request
+ )
+ sentry_scope = Scope.get_isolation_scope()
+ extractor = StarletteRequestExtractor(request)
+ info = await extractor.extract_request_info()
+
+ def _make_request_event_processor(req, integration):
+ # type: (Any, Any) -> Callable[[Event, Dict[str, Any]], Event]
+ def event_processor(event, hint):
+ # type: (Event, Dict[str, Any]) -> Event
+
+ # Extract information from request
+ request_info = event.get("request", {})
+ if info:
+ if "cookies" in info and should_send_default_pii():
+ request_info["cookies"] = info["cookies"]
+ if "data" in info:
+ request_info["data"] = info["data"]
+ event["request"] = deepcopy(request_info)
+
+ return event
+
+ return event_processor
+
+ sentry_scope._name = FastApiIntegration.identifier
+ sentry_scope.add_event_processor(
+ _make_request_event_processor(request, integration)
+ )
return await old_app(*args, **kwargs)
diff --git a/sentry_sdk/integrations/flask.py b/sentry_sdk/integrations/flask.py
index f0bc3d7750..52b843c911 100644
--- a/sentry_sdk/integrations/flask.py
+++ b/sentry_sdk/integrations/flask.py
@@ -1,14 +1,13 @@
-from __future__ import absolute_import
-
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations._wsgi_common import RequestExtractor
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from sentry_sdk.scope import Scope
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import SOURCE_FOR_STYLE
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
package_version,
)
@@ -79,7 +78,7 @@ def setup_once():
def sentry_patched_wsgi_app(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
- if Hub.current.get_integration(FlaskIntegration) is None:
+ if sentry_sdk.get_client().get_integration(FlaskIntegration) is None:
return old_app(self, environ, start_response)
return SentryWsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))(
@@ -94,8 +93,8 @@ def _add_sentry_trace(sender, template, context, **extra):
if "sentry_trace" in context:
return
- hub = Hub.current
- trace_meta = Markup(hub.trace_propagation_meta())
+ scope = Scope.get_current_scope()
+ trace_meta = Markup(scope.trace_propagation_meta())
context["sentry_trace"] = trace_meta # for backwards compatibility
context["sentry_trace_meta"] = trace_meta
@@ -115,20 +114,21 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
pass
+@ensure_integration_enabled(FlaskIntegration)
def _request_started(app, **kwargs):
# type: (Flask, **Any) -> None
- hub = Hub.current
- integration = hub.get_integration(FlaskIntegration)
- if integration is None:
- return
+ integration = sentry_sdk.get_client().get_integration(FlaskIntegration)
+ request = flask_request._get_current_object()
+
+ # Set the transaction name and source here,
+ # but rely on WSGI middleware to actually start the transaction
+ _set_transaction_name_and_source(
+ Scope.get_current_scope(), integration.transaction_style, request
+ )
- with hub.configure_scope() as scope:
- # Set the transaction name and source here,
- # but rely on WSGI middleware to actually start the transaction
- request = flask_request._get_current_object()
- _set_transaction_name_and_source(scope, integration.transaction_style, request)
- evt_processor = _make_request_event_processor(app, request, integration)
- scope.add_event_processor(evt_processor)
+ scope = Scope.get_isolation_scope()
+ evt_processor = _make_request_event_processor(app, request, integration)
+ scope.add_event_processor(evt_processor)
class FlaskRequestExtractor(RequestExtractor):
@@ -183,7 +183,7 @@ def inner(event, hint):
with capture_internal_exceptions():
FlaskRequestExtractor(request).extract_into_event(event)
- if _should_send_default_pii():
+ if should_send_default_pii():
with capture_internal_exceptions():
_add_user_to_event(event)
@@ -192,22 +192,16 @@ def inner(event, hint):
return inner
+@ensure_integration_enabled(FlaskIntegration)
def _capture_exception(sender, exception, **kwargs):
# type: (Flask, Union[ValueError, BaseException], **Any) -> None
- hub = Hub.current
- if hub.get_integration(FlaskIntegration) is None:
- return
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
event, hint = event_from_exception(
exception,
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "flask", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _add_user_to_event(event):
diff --git a/sentry_sdk/integrations/gcp.py b/sentry_sdk/integrations/gcp.py
index 819c7ac93d..0cab8f9b26 100644
--- a/sentry_sdk/integrations/gcp.py
+++ b/sentry_sdk/integrations/gcp.py
@@ -1,22 +1,24 @@
import sys
from copy import deepcopy
-from datetime import timedelta
+from datetime import datetime, timedelta, timezone
from os import environ
+import sentry_sdk
from sentry_sdk.api import continue_trace
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT
-from sentry_sdk._compat import datetime_utcnow, duration_in_milliseconds, reraise
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
logger,
TimeoutThread,
+ reraise,
)
-from sentry_sdk.integrations import Integration
-from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk._types import TYPE_CHECKING
@@ -25,7 +27,6 @@
MILLIS_TO_SECONDS = 1000.0
if TYPE_CHECKING:
- from datetime import datetime
from typing import Any
from typing import TypeVar
from typing import Callable
@@ -38,16 +39,12 @@
def _wrap_func(func):
# type: (F) -> F
+ @ensure_integration_enabled(GcpIntegration, func)
def sentry_func(functionhandler, gcp_event, *args, **kwargs):
# type: (Any, Any, *Any, **Any) -> Any
+ client = sentry_sdk.get_client()
- hub = Hub.current
- integration = hub.get_integration(GcpIntegration)
- if integration is None:
- return func(functionhandler, gcp_event, *args, **kwargs)
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
+ integration = client.get_integration(GcpIntegration)
configured_time = environ.get("FUNCTION_TIMEOUT_SEC")
if not configured_time:
@@ -58,9 +55,9 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
configured_time = int(configured_time)
- initial_time = datetime_utcnow()
+ initial_time = datetime.now(timezone.utc)
- with hub.push_scope() as scope:
+ with sentry_sdk.isolation_scope() as scope:
with capture_internal_exceptions():
scope.clear_breadcrumbs()
scope.add_event_processor(
@@ -101,7 +98,7 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
},
"gcp_event": gcp_event,
}
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction, custom_sampling_context=sampling_context
):
try:
@@ -113,13 +110,13 @@ def sentry_func(functionhandler, gcp_event, *args, **kwargs):
client_options=client.options,
mechanism={"type": "gcp", "handled": False},
)
- hub.capture_event(sentry_event, hint=hint)
+ sentry_sdk.capture_event(sentry_event, hint=hint)
reraise(*exc_info)
finally:
if timeout_thread:
timeout_thread.stop()
# Flush out the event queue
- hub.flush()
+ client.flush()
return sentry_func # type: ignore
@@ -155,10 +152,10 @@ def _make_request_event_processor(gcp_event, configured_timeout, initial_time):
def event_processor(event, hint):
# type: (Event, Hint) -> Optional[Event]
- final_time = datetime_utcnow()
+ final_time = datetime.now(timezone.utc)
time_diff = final_time - initial_time
- execution_duration_in_millis = duration_in_milliseconds(time_diff)
+ execution_duration_in_millis = time_diff / timedelta(milliseconds=1)
extra = event.setdefault("extra", {})
extra["google cloud functions"] = {
@@ -188,7 +185,7 @@ def event_processor(event, hint):
if hasattr(gcp_event, "headers"):
request["headers"] = _filter_headers(gcp_event.headers)
- if _should_send_default_pii():
+ if should_send_default_pii():
if hasattr(gcp_event, "data"):
request["data"] = gcp_event.data
else:
diff --git a/sentry_sdk/integrations/gql.py b/sentry_sdk/integrations/gql.py
index 9db6632a4a..0552edde60 100644
--- a/sentry_sdk/integrations/gql.py
+++ b/sentry_sdk/integrations/gql.py
@@ -1,6 +1,12 @@
-from sentry_sdk.utils import event_from_exception, parse_version
-from sentry_sdk.hub import Hub, _should_send_default_pii
+import sentry_sdk
+from sentry_sdk.utils import (
+ event_from_exception,
+ ensure_integration_enabled,
+ parse_version,
+)
+
from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import Scope, should_send_default_pii
try:
import gql # type: ignore[import-not-found]
@@ -85,25 +91,22 @@ def _patch_execute():
# type: () -> None
real_execute = gql.Client.execute
+ @ensure_integration_enabled(GQLIntegration, real_execute)
def sentry_patched_execute(self, document, *args, **kwargs):
# type: (gql.Client, DocumentNode, Any, Any) -> Any
- hub = Hub.current
- if hub.get_integration(GQLIntegration) is None:
- return real_execute(self, document, *args, **kwargs)
-
- with Hub.current.configure_scope() as scope:
- scope.add_event_processor(_make_gql_event_processor(self, document))
+ scope = Scope.get_isolation_scope()
+ scope.add_event_processor(_make_gql_event_processor(self, document))
try:
return real_execute(self, document, *args, **kwargs)
except TransportQueryError as e:
event, hint = event_from_exception(
e,
- client_options=hub.client.options if hub.client is not None else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "gql", "handled": False},
)
- hub.capture_event(event, hint)
+ sentry_sdk.capture_event(event, hint)
raise e
gql.Client.execute = sentry_patched_execute
@@ -126,7 +129,7 @@ def processor(event, hint):
}
)
- if _should_send_default_pii():
+ if should_send_default_pii():
request["data"] = _data_from_document(document)
contexts = event.setdefault("contexts", {})
response = contexts.setdefault("response", {})
diff --git a/sentry_sdk/integrations/graphene.py b/sentry_sdk/integrations/graphene.py
index b9c3b26018..5b8c393743 100644
--- a/sentry_sdk/integrations/graphene.py
+++ b/sentry_sdk/integrations/graphene.py
@@ -1,7 +1,9 @@
-from sentry_sdk.hub import Hub, _should_send_default_pii
+import sentry_sdk
from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
package_version,
)
@@ -44,15 +46,11 @@ def _patch_graphql():
old_graphql_sync = graphene_schema.graphql_sync
old_graphql_async = graphene_schema.graphql
+ @ensure_integration_enabled(GrapheneIntegration, old_graphql_sync)
def _sentry_patched_graphql_sync(schema, source, *args, **kwargs):
# type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
- hub = Hub.current
- integration = hub.get_integration(GrapheneIntegration)
- if integration is None:
- return old_graphql_sync(schema, source, *args, **kwargs)
-
- with hub.configure_scope() as scope:
- scope.add_event_processor(_event_processor)
+ scope = Scope.get_isolation_scope()
+ scope.add_event_processor(_event_processor)
result = old_graphql_sync(schema, source, *args, **kwargs)
@@ -60,25 +58,23 @@ def _sentry_patched_graphql_sync(schema, source, *args, **kwargs):
for error in result.errors or []:
event, hint = event_from_exception(
error,
- client_options=hub.client.options if hub.client else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={
- "type": integration.identifier,
+ "type": GrapheneIntegration.identifier,
"handled": False,
},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return result
async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
# type: (GraphQLSchema, Union[str, Source], Any, Any) -> ExecutionResult
- hub = Hub.current
- integration = hub.get_integration(GrapheneIntegration)
- if integration is None:
+ if sentry_sdk.get_client().get_integration(GrapheneIntegration) is None:
return await old_graphql_async(schema, source, *args, **kwargs)
- with hub.configure_scope() as scope:
- scope.add_event_processor(_event_processor)
+ scope = Scope.get_isolation_scope()
+ scope.add_event_processor(_event_processor)
result = await old_graphql_async(schema, source, *args, **kwargs)
@@ -86,13 +82,13 @@ async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
for error in result.errors or []:
event, hint = event_from_exception(
error,
- client_options=hub.client.options if hub.client else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={
- "type": integration.identifier,
+ "type": GrapheneIntegration.identifier,
"handled": False,
},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return result
@@ -102,7 +98,7 @@ async def _sentry_patched_graphql_async(schema, source, *args, **kwargs):
def _event_processor(event, hint):
# type: (Event, Dict[str, Any]) -> Event
- if _should_send_default_pii():
+ if should_send_default_pii():
request_info = event.setdefault("request", {})
request_info["api_target"] = "graphql"
diff --git a/sentry_sdk/integrations/grpc/__init__.py b/sentry_sdk/integrations/grpc/__init__.py
index 2cb7c8192a..d84cea573f 100644
--- a/sentry_sdk/integrations/grpc/__init__.py
+++ b/sentry_sdk/integrations/grpc/__init__.py
@@ -128,7 +128,7 @@ def patched_aio_server(
**kwargs: P.kwargs,
) -> Server:
server_interceptor = AsyncServerInterceptor()
- interceptors = [server_interceptor, *(interceptors or [])]
+ interceptors = (server_interceptor, *(interceptors or []))
return func(*args, interceptors=interceptors, **kwargs) # type: ignore
return patched_aio_server # type: ignore
diff --git a/sentry_sdk/integrations/grpc/aio/client.py b/sentry_sdk/integrations/grpc/aio/client.py
index e0b36541f3..91a06eaa7f 100644
--- a/sentry_sdk/integrations/grpc/aio/client.py
+++ b/sentry_sdk/integrations/grpc/aio/client.py
@@ -9,19 +9,20 @@
)
from google.protobuf.message import Message
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP
+from sentry_sdk.scope import Scope
class ClientInterceptor:
@staticmethod
- def _update_client_call_details_metadata_from_hub(
- client_call_details: ClientCallDetails, hub: Hub
+ def _update_client_call_details_metadata_from_scope(
+ client_call_details: ClientCallDetails,
) -> ClientCallDetails:
metadata = (
list(client_call_details.metadata) if client_call_details.metadata else []
)
- for key, value in hub.iter_trace_propagation_headers():
+ for key, value in Scope.get_current_scope().iter_trace_propagation_headers():
metadata.append((key, value))
client_call_details = ClientCallDetails(
@@ -42,17 +43,16 @@ async def intercept_unary_unary(
client_call_details: ClientCallDetails,
request: Message,
) -> Union[UnaryUnaryCall, Message]:
- hub = Hub.current
method = client_call_details.method
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.GRPC_CLIENT, description="unary unary call to %s" % method.decode()
) as span:
span.set_data("type", "unary unary")
span.set_data("method", method)
- client_call_details = self._update_client_call_details_metadata_from_hub(
- client_call_details, hub
+ client_call_details = self._update_client_call_details_metadata_from_scope(
+ client_call_details
)
response = await continuation(client_call_details, request)
@@ -71,17 +71,16 @@ async def intercept_unary_stream(
client_call_details: ClientCallDetails,
request: Message,
) -> Union[AsyncIterable[Any], UnaryStreamCall]:
- hub = Hub.current
method = client_call_details.method
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.GRPC_CLIENT, description="unary stream call to %s" % method.decode()
) as span:
span.set_data("type", "unary stream")
span.set_data("method", method)
- client_call_details = self._update_client_call_details_metadata_from_hub(
- client_call_details, hub
+ client_call_details = self._update_client_call_details_metadata_from_scope(
+ client_call_details
)
response = await continuation(client_call_details, request)
diff --git a/sentry_sdk/integrations/grpc/aio/server.py b/sentry_sdk/integrations/grpc/aio/server.py
index ba19eb947c..550f194c62 100644
--- a/sentry_sdk/integrations/grpc/aio/server.py
+++ b/sentry_sdk/integrations/grpc/aio/server.py
@@ -1,11 +1,11 @@
-from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+import sentry_sdk
+from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
from sentry_sdk.integrations import DidNotEnable
from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM
from sentry_sdk.utils import event_from_exception
-if MYPY:
+if TYPE_CHECKING:
from collections.abc import Awaitable, Callable
from typing import Any
@@ -23,7 +23,7 @@ def __init__(self, find_name=None):
# type: (ServerInterceptor, Callable[[ServicerContext], str] | None) -> None
self._find_method_name = find_name or self._find_name
- super(ServerInterceptor, self).__init__()
+ super().__init__()
async def intercept_service(self, continuation, handler_call_details):
# type: (ServerInterceptor, Callable[[HandlerCallDetails], Awaitable[RpcMethodHandler]], HandlerCallDetails) -> Awaitable[RpcMethodHandler]
@@ -39,8 +39,6 @@ async def wrapped(request, context):
if not name:
return await handler(request, context)
- hub = Hub.current
-
# What if the headers are empty?
transaction = Transaction.continue_from_headers(
dict(context.invocation_metadata()),
@@ -49,7 +47,7 @@ async def wrapped(request, context):
source=TRANSACTION_SOURCE_CUSTOM,
)
- with hub.start_transaction(transaction=transaction):
+ with sentry_sdk.start_transaction(transaction=transaction):
try:
return await handler.unary_unary(request, context)
except AbortError:
@@ -59,7 +57,7 @@ async def wrapped(request, context):
exc,
mechanism={"type": "grpc", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
raise
elif not handler.request_streaming and handler.response_streaming:
diff --git a/sentry_sdk/integrations/grpc/client.py b/sentry_sdk/integrations/grpc/client.py
index 955c3c4217..96f2591bde 100644
--- a/sentry_sdk/integrations/grpc/client.py
+++ b/sentry_sdk/integrations/grpc/client.py
@@ -1,9 +1,10 @@
-from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+import sentry_sdk
+from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
from sentry_sdk.integrations import DidNotEnable
+from sentry_sdk.scope import Scope
-if MYPY:
+if TYPE_CHECKING:
from typing import Any, Callable, Iterator, Iterable, Union
try:
@@ -23,17 +24,16 @@ class ClientInterceptor(
def intercept_unary_unary(self, continuation, client_call_details, request):
# type: (ClientInterceptor, Callable[[ClientCallDetails, Message], _UnaryOutcome], ClientCallDetails, Message) -> _UnaryOutcome
- hub = Hub.current
method = client_call_details.method
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.GRPC_CLIENT, description="unary unary call to %s" % method
) as span:
span.set_data("type", "unary unary")
span.set_data("method", method)
- client_call_details = self._update_client_call_details_metadata_from_hub(
- client_call_details, hub
+ client_call_details = self._update_client_call_details_metadata_from_scope(
+ client_call_details
)
response = continuation(client_call_details, request)
@@ -43,17 +43,16 @@ def intercept_unary_unary(self, continuation, client_call_details, request):
def intercept_unary_stream(self, continuation, client_call_details, request):
# type: (ClientInterceptor, Callable[[ClientCallDetails, Message], Union[Iterable[Any], UnaryStreamCall]], ClientCallDetails, Message) -> Union[Iterator[Message], Call]
- hub = Hub.current
method = client_call_details.method
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.GRPC_CLIENT, description="unary stream call to %s" % method
) as span:
span.set_data("type", "unary stream")
span.set_data("method", method)
- client_call_details = self._update_client_call_details_metadata_from_hub(
- client_call_details, hub
+ client_call_details = self._update_client_call_details_metadata_from_scope(
+ client_call_details
)
response = continuation(
@@ -65,12 +64,12 @@ def intercept_unary_stream(self, continuation, client_call_details, request):
return response
@staticmethod
- def _update_client_call_details_metadata_from_hub(client_call_details, hub):
- # type: (ClientCallDetails, Hub) -> ClientCallDetails
+ def _update_client_call_details_metadata_from_scope(client_call_details):
+ # type: (ClientCallDetails) -> ClientCallDetails
metadata = (
list(client_call_details.metadata) if client_call_details.metadata else []
)
- for key, value in hub.iter_trace_propagation_headers():
+ for key, value in Scope.get_current_scope().iter_trace_propagation_headers():
metadata.append((key, value))
client_call_details = grpc._interceptor._ClientCallDetails(
diff --git a/sentry_sdk/integrations/grpc/server.py b/sentry_sdk/integrations/grpc/server.py
index ce7c2f2a58..50a1dc4dbe 100644
--- a/sentry_sdk/integrations/grpc/server.py
+++ b/sentry_sdk/integrations/grpc/server.py
@@ -1,10 +1,10 @@
-from sentry_sdk import Hub
-from sentry_sdk._types import MYPY
+import sentry_sdk
+from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
from sentry_sdk.integrations import DidNotEnable
from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_CUSTOM
-if MYPY:
+if TYPE_CHECKING:
from typing import Callable, Optional
from google.protobuf.message import Message
@@ -20,7 +20,7 @@ def __init__(self, find_name=None):
# type: (ServerInterceptor, Optional[Callable[[ServicerContext], str]]) -> None
self._find_method_name = find_name or ServerInterceptor._find_name
- super(ServerInterceptor, self).__init__()
+ super().__init__()
def intercept_service(self, continuation, handler_call_details):
# type: (ServerInterceptor, Callable[[HandlerCallDetails], RpcMethodHandler], HandlerCallDetails) -> RpcMethodHandler
@@ -30,27 +30,26 @@ def intercept_service(self, continuation, handler_call_details):
def behavior(request, context):
# type: (Message, ServicerContext) -> Message
- hub = Hub(Hub.current)
-
- name = self._find_method_name(context)
-
- if name:
- metadata = dict(context.invocation_metadata())
-
- transaction = Transaction.continue_from_headers(
- metadata,
- op=OP.GRPC_SERVER,
- name=name,
- source=TRANSACTION_SOURCE_CUSTOM,
- )
-
- with hub.start_transaction(transaction=transaction):
- try:
- return handler.unary_unary(request, context)
- except BaseException as e:
- raise e
- else:
- return handler.unary_unary(request, context)
+ with sentry_sdk.isolation_scope():
+ name = self._find_method_name(context)
+
+ if name:
+ metadata = dict(context.invocation_metadata())
+
+ transaction = Transaction.continue_from_headers(
+ metadata,
+ op=OP.GRPC_SERVER,
+ name=name,
+ source=TRANSACTION_SOURCE_CUSTOM,
+ )
+
+ with sentry_sdk.start_transaction(transaction=transaction):
+ try:
+ return handler.unary_unary(request, context)
+ except BaseException as e:
+ raise e
+ else:
+ return handler.unary_unary(request, context)
return grpc.unary_unary_rpc_method_handler(
behavior,
diff --git a/sentry_sdk/integrations/httpx.py b/sentry_sdk/integrations/httpx.py
index 04db5047b4..fa75d1440b 100644
--- a/sentry_sdk/integrations/httpx.py
+++ b/sentry_sdk/integrations/httpx.py
@@ -1,11 +1,13 @@
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.scope import Scope
from sentry_sdk.tracing import BAGGAGE_HEADER_NAME
from sentry_sdk.tracing_utils import should_propagate_trace
from sentry_sdk.utils import (
SENSITIVE_DATA_SUBSTITUTE,
capture_internal_exceptions,
+ ensure_integration_enabled,
logger,
parse_url,
)
@@ -42,17 +44,14 @@ def _install_httpx_client():
# type: () -> None
real_send = Client.send
+ @ensure_integration_enabled(HttpxIntegration, real_send)
def send(self, request, **kwargs):
# type: (Client, Request, **Any) -> Response
- hub = Hub.current
- if hub.get_integration(HttpxIntegration) is None:
- return real_send(self, request, **kwargs)
-
parsed_url = None
with capture_internal_exceptions():
parsed_url = parse_url(str(request.url), sanitize=False)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.HTTP_CLIENT,
description="%s %s"
% (
@@ -66,8 +65,11 @@ def send(self, request, **kwargs):
span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
- if should_propagate_trace(hub, str(request.url)):
- for key, value in hub.iter_trace_propagation_headers():
+ if should_propagate_trace(sentry_sdk.get_client(), str(request.url)):
+ for (
+ key,
+ value,
+ ) in Scope.get_current_scope().iter_trace_propagation_headers():
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
key=key, value=value, url=request.url
@@ -97,15 +99,14 @@ def _install_httpx_async_client():
async def send(self, request, **kwargs):
# type: (AsyncClient, Request, **Any) -> Response
- hub = Hub.current
- if hub.get_integration(HttpxIntegration) is None:
+ if sentry_sdk.get_client().get_integration(HttpxIntegration) is None:
return await real_send(self, request, **kwargs)
parsed_url = None
with capture_internal_exceptions():
parsed_url = parse_url(str(request.url), sanitize=False)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.HTTP_CLIENT,
description="%s %s"
% (
@@ -119,8 +120,11 @@ async def send(self, request, **kwargs):
span.set_data(SPANDATA.HTTP_QUERY, parsed_url.query)
span.set_data(SPANDATA.HTTP_FRAGMENT, parsed_url.fragment)
- if should_propagate_trace(hub, str(request.url)):
- for key, value in hub.iter_trace_propagation_headers():
+ if should_propagate_trace(sentry_sdk.get_client(), str(request.url)):
+ for (
+ key,
+ value,
+ ) in Scope.get_current_scope().iter_trace_propagation_headers():
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {url}.".format(
key=key, value=value, url=request.url
diff --git a/sentry_sdk/integrations/huey.py b/sentry_sdk/integrations/huey.py
index 43c03936b1..9b457c08d6 100644
--- a/sentry_sdk/integrations/huey.py
+++ b/sentry_sdk/integrations/huey.py
@@ -1,15 +1,12 @@
-from __future__ import absolute_import
-
import sys
from datetime import datetime
-from sentry_sdk._compat import reraise
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk import Hub
from sentry_sdk.api import continue_trace, get_baggage, get_traceparent
from sentry_sdk.consts import OP
-from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import (
BAGGAGE_HEADER_NAME,
SENTRY_TRACE_HEADER_NAME,
@@ -17,8 +14,10 @@
)
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
SENSITIVE_DATA_SUBSTITUTE,
+ reraise,
)
if TYPE_CHECKING:
@@ -53,14 +52,10 @@ def patch_enqueue():
# type: () -> None
old_enqueue = Huey.enqueue
+ @ensure_integration_enabled(HueyIntegration, old_enqueue)
def _sentry_enqueue(self, task):
# type: (Huey, Task) -> Optional[Union[Result, ResultGroup]]
- hub = Hub.current
-
- if hub.get_integration(HueyIntegration) is None:
- return old_enqueue(self, task)
-
- with hub.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name):
+ with sentry_sdk.start_span(op=OP.QUEUE_SUBMIT_HUEY, description=task.name):
if not isinstance(task, PeriodicTask):
# Attach trace propagation data to task kwargs. We do
# not do this for periodic tasks, as these don't
@@ -88,12 +83,12 @@ def event_processor(event, hint):
"task": task.name,
"args": (
task.args
- if _should_send_default_pii()
+ if should_send_default_pii()
else SENSITIVE_DATA_SUBSTITUTE
),
"kwargs": (
task.kwargs
- if _should_send_default_pii()
+ if should_send_default_pii()
else SENSITIVE_DATA_SUBSTITUTE
),
"retry": (task.default_retries or 0) - task.retries,
@@ -106,29 +101,27 @@ def event_processor(event, hint):
def _capture_exception(exc_info):
# type: (ExcInfo) -> None
- hub = Hub.current
+ scope = Scope.get_current_scope()
if exc_info[0] in HUEY_CONTROL_FLOW_EXCEPTIONS:
- hub.scope.transaction.set_status("aborted")
+ scope.transaction.set_status("aborted")
return
- hub.scope.transaction.set_status("internal_error")
+ scope.transaction.set_status("internal_error")
event, hint = event_from_exception(
exc_info,
- client_options=hub.client.options if hub.client else None,
+ client_options=Scope.get_client().options,
mechanism={"type": HueyIntegration.identifier, "handled": False},
)
- hub.capture_event(event, hint=hint)
+ scope.capture_event(event, hint=hint)
def _wrap_task_execute(func):
# type: (F) -> F
+
+ @ensure_integration_enabled(HueyIntegration, func)
def _sentry_execute(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(HueyIntegration) is None:
- return func(*args, **kwargs)
-
try:
result = func(*args, **kwargs)
except Exception:
@@ -145,14 +138,10 @@ def patch_execute():
# type: () -> None
old_execute = Huey._execute
+ @ensure_integration_enabled(HueyIntegration, old_execute)
def _sentry_execute(self, task, timestamp=None):
# type: (Huey, Task, Optional[datetime]) -> Any
- hub = Hub.current
-
- if hub.get_integration(HueyIntegration) is None:
- return old_execute(self, task, timestamp)
-
- with hub.push_scope() as scope:
+ with sentry_sdk.isolation_scope() as scope:
with capture_internal_exceptions():
scope._name = "huey"
scope.clear_breadcrumbs()
@@ -172,7 +161,7 @@ def _sentry_execute(self, task, timestamp=None):
task.execute = _wrap_task_execute(task.execute)
task._sentry_is_patched = True
- with hub.start_transaction(transaction):
+ with sentry_sdk.start_transaction(transaction):
return old_execute(self, task, timestamp)
Huey._execute = _sentry_execute
diff --git a/sentry_sdk/integrations/logging.py b/sentry_sdk/integrations/logging.py
index d455983fc5..231ec5d80e 100644
--- a/sentry_sdk/integrations/logging.py
+++ b/sentry_sdk/integrations/logging.py
@@ -1,9 +1,8 @@
-from __future__ import absolute_import
-
import logging
+from datetime import datetime, timezone
from fnmatch import fnmatch
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.utils import (
to_string,
event_from_exception,
@@ -11,8 +10,6 @@
capture_internal_exceptions,
)
from sentry_sdk.integrations import Integration
-from sentry_sdk._compat import iteritems, utc_from_timestamp
-
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -104,7 +101,9 @@ def sentry_patched_callhandlers(self, record):
# into a recursion error when the integration is resolved
# (this also is slower).
if ignored_loggers is not None and record.name not in ignored_loggers:
- integration = Hub.current.get_integration(LoggingIntegration)
+ integration = sentry_sdk.get_client().get_integration(
+ LoggingIntegration
+ )
if integration is not None:
integration._handle_record(record)
@@ -160,7 +159,7 @@ def _extra_from_record(self, record):
# type: (LogRecord) -> MutableMapping[str, object]
return {
k: v
- for k, v in iteritems(vars(record))
+ for k, v in vars(record).items()
if k not in self.COMMON_RECORD_ATTRS
and (not isinstance(k, str) or not k.startswith("_"))
}
@@ -184,11 +183,11 @@ def _emit(self, record):
if not self._can_record(record):
return
- hub = Hub.current
- if hub.client is None:
+ client = sentry_sdk.get_client()
+ if not client.is_active():
return
- client_options = hub.client.options
+ client_options = client.options
# exc_info might be None or (None, None, None)
#
@@ -253,7 +252,7 @@ def _emit(self, record):
event["extra"] = self._extra_from_record(record)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
# Legacy name
@@ -278,7 +277,7 @@ def _emit(self, record):
if not self._can_record(record):
return
- Hub.current.add_breadcrumb(
+ sentry_sdk.add_breadcrumb(
self._breadcrumb_from_record(record), hint={"log_record": record}
)
@@ -289,6 +288,6 @@ def _breadcrumb_from_record(self, record):
"level": self._logging_to_event_level(record),
"category": record.name,
"message": record.message,
- "timestamp": utc_from_timestamp(record.created),
+ "timestamp": datetime.fromtimestamp(record.created, timezone.utc),
"data": self._extra_from_record(record),
}
diff --git a/sentry_sdk/integrations/loguru.py b/sentry_sdk/integrations/loguru.py
index b1ee2a681f..99f2dfd5ac 100644
--- a/sentry_sdk/integrations/loguru.py
+++ b/sentry_sdk/integrations/loguru.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import enum
from sentry_sdk._types import TYPE_CHECKING
diff --git a/sentry_sdk/integrations/modules.py b/sentry_sdk/integrations/modules.py
index fa0fbf8936..6376d25a30 100644
--- a/sentry_sdk/integrations/modules.py
+++ b/sentry_sdk/integrations/modules.py
@@ -1,6 +1,4 @@
-from __future__ import absolute_import
-
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration
from sentry_sdk.scope import add_global_event_processor
from sentry_sdk.utils import _get_installed_modules
@@ -24,7 +22,7 @@ def processor(event, hint):
if event.get("type") == "transaction":
return event
- if Hub.current.get_integration(ModulesIntegration) is None:
+ if sentry_sdk.get_client().get_integration(ModulesIntegration) is None:
return event
event["modules"] = _get_installed_modules()
diff --git a/sentry_sdk/integrations/openai.py b/sentry_sdk/integrations/openai.py
index 0e71029b60..0d77a27ec0 100644
--- a/sentry_sdk/integrations/openai.py
+++ b/sentry_sdk/integrations/openai.py
@@ -1,3 +1,5 @@
+from functools import wraps
+
from sentry_sdk import consts
from sentry_sdk._types import TYPE_CHECKING
@@ -6,10 +8,14 @@
from sentry_sdk.tracing import Span
import sentry_sdk
-from sentry_sdk._functools import wraps
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
-from sentry_sdk.utils import logger, capture_internal_exceptions, event_from_exception
+from sentry_sdk.utils import (
+ logger,
+ capture_internal_exceptions,
+ event_from_exception,
+ ensure_integration_enabled,
+)
try:
from openai.resources.chat.completions import Completions
@@ -61,16 +67,14 @@ def setup_once():
Embeddings.create = _wrap_embeddings_create(Embeddings.create)
-def _capture_exception(hub, exc):
- # type: (Hub, Any) -> None
-
- if hub.client is not None:
- event, hint = event_from_exception(
- exc,
- client_options=hub.client.options,
- mechanism={"type": "openai", "handled": False},
- )
- hub.capture_event(event, hint=hint)
+def _capture_exception(exc):
+ # type: (Any) -> None
+ event, hint = event_from_exception(
+ exc,
+ client_options=sentry_sdk.get_client().options,
+ mechanism={"type": "openai", "handled": False},
+ )
+ sentry_sdk.capture_event(event, hint=hint)
def _normalize_data(data):
@@ -144,16 +148,9 @@ def _calculate_chat_completion_usage(
def _wrap_chat_completion_create(f):
# type: (Callable[..., Any]) -> Callable[..., Any]
@wraps(f)
+ @ensure_integration_enabled(OpenAIIntegration, f)
def new_chat_completion(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- if not hub:
- return f(*args, **kwargs)
-
- integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
- if not integration:
- return f(*args, **kwargs)
-
if "messages" not in kwargs:
# invalid call (in all versions of openai), let it return error
return f(*args, **kwargs)
@@ -176,19 +173,21 @@ def new_chat_completion(*args, **kwargs):
try:
res = f(*args, **kwargs)
except Exception as e:
- _capture_exception(Hub.current, e)
+ _capture_exception(e)
span.__exit__(None, None, None)
raise e from None
+ integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
+
with capture_internal_exceptions():
- if _should_send_default_pii() and integration.include_prompts:
+ if should_send_default_pii() and integration.include_prompts:
set_data_normalized(span, "ai.input_messages", messages)
set_data_normalized(span, "ai.model_id", model)
set_data_normalized(span, "ai.streaming", streaming)
if hasattr(res, "choices"):
- if _should_send_default_pii() and integration.include_prompts:
+ if should_send_default_pii() and integration.include_prompts:
set_data_normalized(
span,
"ai.responses",
@@ -222,7 +221,7 @@ def new_iterator():
map(lambda chunk: "".join(chunk), data_buf)
)
if (
- _should_send_default_pii()
+ should_send_default_pii()
and integration.include_prompts
):
set_data_normalized(span, "ai.responses", all_responses)
@@ -244,23 +243,16 @@ def _wrap_embeddings_create(f):
# type: (Callable[..., Any]) -> Callable[..., Any]
@wraps(f)
+ @ensure_integration_enabled(OpenAIIntegration, f)
def new_embeddings_create(*args, **kwargs):
# type: (*Any, **Any) -> Any
-
- hub = Hub.current
- if not hub:
- return f(*args, **kwargs)
-
- integration = hub.get_integration(OpenAIIntegration) # type: OpenAIIntegration
- if not integration:
- return f(*args, **kwargs)
-
with sentry_sdk.start_span(
op=consts.OP.OPENAI_EMBEDDINGS_CREATE,
description="OpenAI Embedding Creation",
) as span:
+ integration = sentry_sdk.get_client().get_integration(OpenAIIntegration)
if "input" in kwargs and (
- _should_send_default_pii() and integration.include_prompts
+ should_send_default_pii() and integration.include_prompts
):
if isinstance(kwargs["input"], str):
set_data_normalized(span, "ai.input_messages", [kwargs["input"]])
@@ -275,7 +267,7 @@ def new_embeddings_create(*args, **kwargs):
try:
response = f(*args, **kwargs)
except Exception as e:
- _capture_exception(Hub.current, e)
+ _capture_exception(e)
raise e from None
prompt_tokens = 0
diff --git a/sentry_sdk/integrations/opentelemetry/span_processor.py b/sentry_sdk/integrations/opentelemetry/span_processor.py
index 0db698e239..a09a93d284 100644
--- a/sentry_sdk/integrations/opentelemetry/span_processor.py
+++ b/sentry_sdk/integrations/opentelemetry/span_processor.py
@@ -1,3 +1,4 @@
+from datetime import datetime, timezone
from time import time
from opentelemetry.context import get_value # type: ignore
@@ -15,9 +16,8 @@
INVALID_SPAN_ID,
INVALID_TRACE_ID,
)
-from sentry_sdk._compat import utc_from_timestamp
+from sentry_sdk import get_client, start_transaction
from sentry_sdk.consts import INSTRUMENTER
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations.opentelemetry.consts import (
SENTRY_BAGGAGE_KEY,
SENTRY_TRACE_KEY,
@@ -40,11 +40,9 @@
def link_trace_context_to_error_event(event, otel_span_map):
# type: (Event, Dict[str, Union[Transaction, SentrySpan]]) -> Event
- hub = Hub.current
- if not hub:
- return event
+ client = get_client()
- if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+ if client.options["instrumenter"] != INSTRUMENTER.OTEL:
return event
if hasattr(event, "type") and event["type"] == "transaction":
@@ -85,7 +83,7 @@ class SentrySpanProcessor(SpanProcessor): # type: ignore
def __new__(cls):
# type: () -> SentrySpanProcessor
if not hasattr(cls, "instance"):
- cls.instance = super(SentrySpanProcessor, cls).__new__(cls)
+ cls.instance = super().__new__(cls)
return cls.instance
@@ -116,25 +114,23 @@ def _prune_old_spans(self):
def on_start(self, otel_span, parent_context=None):
# type: (OTelSpan, Optional[SpanContext]) -> None
- hub = Hub.current
- if not hub:
- return
+ client = get_client()
- if not hub.client or (hub.client and not hub.client.dsn):
+ if not client.dsn:
return
try:
- _ = Dsn(hub.client.dsn or "")
+ _ = Dsn(client.dsn)
except Exception:
return
- if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+ if client.options["instrumenter"] != INSTRUMENTER.OTEL:
return
if not otel_span.get_span_context().is_valid:
return
- if self._is_sentry_span(hub, otel_span):
+ if self._is_sentry_span(otel_span):
return
trace_data = self._get_trace_data(otel_span, parent_context)
@@ -149,20 +145,20 @@ def on_start(self, otel_span, parent_context=None):
sentry_span = sentry_parent_span.start_child(
span_id=trace_data["span_id"],
description=otel_span.name,
- start_timestamp=utc_from_timestamp(
- otel_span.start_time / 1e9
+ start_timestamp=datetime.fromtimestamp(
+ otel_span.start_time / 1e9, timezone.utc
), # OTel spans have nanosecond precision
instrumenter=INSTRUMENTER.OTEL,
)
else:
- sentry_span = hub.start_transaction(
+ sentry_span = start_transaction(
name=otel_span.name,
span_id=trace_data["span_id"],
parent_span_id=parent_span_id,
trace_id=trace_data["trace_id"],
baggage=trace_data["baggage"],
- start_timestamp=utc_from_timestamp(
- otel_span.start_time / 1e9
+ start_timestamp=datetime.fromtimestamp(
+ otel_span.start_time / 1e9, timezone.utc
), # OTel spans have nanosecond precision
instrumenter=INSTRUMENTER.OTEL,
)
@@ -179,11 +175,9 @@ def on_start(self, otel_span, parent_context=None):
def on_end(self, otel_span):
# type: (OTelSpan) -> None
- hub = Hub.current
- if not hub:
- return
+ client = get_client()
- if hub.client and hub.client.options["instrumenter"] != INSTRUMENTER.OTEL:
+ if client.options["instrumenter"] != INSTRUMENTER.OTEL:
return
span_context = otel_span.get_span_context()
@@ -210,7 +204,7 @@ def on_end(self, otel_span):
self._update_span_with_otel_data(sentry_span, otel_span)
sentry_span.finish(
- end_timestamp=utc_from_timestamp(otel_span.end_time / 1e9)
+ end_timestamp=datetime.fromtimestamp(otel_span.end_time / 1e9, timezone.utc)
) # OTel spans have nanosecond precision
span_start_in_minutes = int(
@@ -219,14 +213,18 @@ def on_end(self, otel_span):
self.open_spans.setdefault(span_start_in_minutes, set()).discard(span_id)
self._prune_old_spans()
- def _is_sentry_span(self, hub, otel_span):
- # type: (Hub, OTelSpan) -> bool
+ def _is_sentry_span(self, otel_span):
+ # type: (OTelSpan) -> bool
"""
Break infinite loop:
HTTP requests to Sentry are caught by OTel and send again to Sentry.
"""
otel_span_url = otel_span.attributes.get(SpanAttributes.HTTP_URL, None)
- dsn_url = hub.client and Dsn(hub.client.dsn or "").netloc
+
+ dsn_url = None
+ client = get_client()
+ if client.dsn:
+ dsn_url = Dsn(client.dsn).netloc
if otel_span_url and dsn_url in otel_span_url:
return True
diff --git a/sentry_sdk/integrations/pure_eval.py b/sentry_sdk/integrations/pure_eval.py
index 5a2419c267..9af4831b32 100644
--- a/sentry_sdk/integrations/pure_eval.py
+++ b/sentry_sdk/integrations/pure_eval.py
@@ -1,8 +1,7 @@
-from __future__ import absolute_import
-
import ast
-from sentry_sdk import Hub, serializer
+import sentry_sdk
+from sentry_sdk import serializer
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.scope import add_global_event_processor
@@ -41,7 +40,7 @@ def setup_once():
@add_global_event_processor
def add_executing_info(event, hint):
# type: (Event, Optional[Hint]) -> Optional[Event]
- if Hub.current.get_integration(PureEvalIntegration) is None:
+ if sentry_sdk.get_client().get_integration(PureEvalIntegration) is None:
return event
if hint is None:
diff --git a/sentry_sdk/integrations/pymongo.py b/sentry_sdk/integrations/pymongo.py
index 59001bb937..1269fc6538 100644
--- a/sentry_sdk/integrations/pymongo.py
+++ b/sentry_sdk/integrations/pymongo.py
@@ -1,10 +1,9 @@
-from __future__ import absolute_import
import copy
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import SPANDATA
-from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
+from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.tracing import Span
from sentry_sdk.utils import capture_internal_exceptions
@@ -117,9 +116,9 @@ def _operation_key(self, event):
def started(self, event):
# type: (CommandStartedEvent) -> None
- hub = Hub.current
- if hub.get_integration(PyMongoIntegration) is None:
+ if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
return
+
with capture_internal_exceptions():
command = dict(copy.deepcopy(event.command))
@@ -153,11 +152,11 @@ def started(self, event):
except KeyError:
pass
- if not _should_send_default_pii():
+ if not should_send_default_pii():
command = _strip_pii(command)
query = "{} {}".format(event.command_name, command)
- span = hub.start_span(op=op, description=query)
+ span = sentry_sdk.start_span(op=op, description=query)
for tag, value in tags.items():
span.set_tag(tag, value)
@@ -166,14 +165,15 @@ def started(self, event):
span.set_data(key, value)
with capture_internal_exceptions():
- hub.add_breadcrumb(message=query, category="query", type=op, data=tags)
+ sentry_sdk.add_breadcrumb(
+ message=query, category="query", type=op, data=tags
+ )
self._ongoing_operations[self._operation_key(event)] = span.__enter__()
def failed(self, event):
# type: (CommandFailedEvent) -> None
- hub = Hub.current
- if hub.get_integration(PyMongoIntegration) is None:
+ if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
return
try:
@@ -185,8 +185,7 @@ def failed(self, event):
def succeeded(self, event):
# type: (CommandSucceededEvent) -> None
- hub = Hub.current
- if hub.get_integration(PyMongoIntegration) is None:
+ if sentry_sdk.get_client().get_integration(PyMongoIntegration) is None:
return
try:
diff --git a/sentry_sdk/integrations/pyramid.py b/sentry_sdk/integrations/pyramid.py
index 3b9b2fdb96..523ee4b5ec 100644
--- a/sentry_sdk/integrations/pyramid.py
+++ b/sentry_sdk/integrations/pyramid.py
@@ -1,21 +1,20 @@
-from __future__ import absolute_import
-
import os
import sys
import weakref
-from sentry_sdk.hub import Hub, _should_send_default_pii
-from sentry_sdk.scope import Scope
+import sentry_sdk
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import SOURCE_FOR_STYLE
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
+ reraise,
)
-from sentry_sdk._compat import reraise, iteritems
-
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations._wsgi_common import RequestExtractor
-from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk._types import TYPE_CHECKING
try:
from pyramid.httpexceptions import HTTPException
@@ -23,7 +22,6 @@
except ImportError:
raise DidNotEnable("Pyramid not installed")
-from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
from pyramid.response import Response
@@ -74,19 +72,18 @@ def setup_once():
old_call_view = router._call_view
+ @ensure_integration_enabled(PyramidIntegration, old_call_view)
def sentry_patched_call_view(registry, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Response
- hub = Hub.current
- integration = hub.get_integration(PyramidIntegration)
-
- if integration is not None:
- with hub.configure_scope() as scope:
- _set_transaction_name_and_source(
- scope, integration.transaction_style, request
- )
- scope.add_event_processor(
- _make_event_processor(weakref.ref(request), integration)
- )
+ integration = sentry_sdk.get_client().get_integration(PyramidIntegration)
+
+ _set_transaction_name_and_source(
+ Scope.get_current_scope(), integration.transaction_style, request
+ )
+ scope = Scope.get_isolation_scope()
+ scope.add_event_processor(
+ _make_event_processor(weakref.ref(request), integration)
+ )
return old_call_view(registry, request, *args, **kwargs)
@@ -103,7 +100,8 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs):
self.exc_info
and all(self.exc_info)
and rv.status_int == 500
- and Hub.current.get_integration(PyramidIntegration) is not None
+ and sentry_sdk.get_client().get_integration(PyramidIntegration)
+ is not None
):
_capture_exception(self.exc_info)
@@ -113,13 +111,9 @@ def sentry_patched_invoke_exception_view(self, *args, **kwargs):
old_wsgi_call = router.Router.__call__
+ @ensure_integration_enabled(PyramidIntegration, old_wsgi_call)
def sentry_patched_wsgi_call(self, environ, start_response):
# type: (Any, Dict[str, str], Callable[..., Any]) -> _ScopedResponse
- hub = Hub.current
- integration = hub.get_integration(PyramidIntegration)
- if integration is None:
- return old_wsgi_call(self, environ, start_response)
-
def sentry_patched_inner_wsgi_call(environ, start_response):
# type: (Dict[str, Any], Callable[..., Any]) -> Any
try:
@@ -136,24 +130,19 @@ def sentry_patched_inner_wsgi_call(environ, start_response):
router.Router.__call__ = sentry_patched_wsgi_call
+@ensure_integration_enabled(PyramidIntegration)
def _capture_exception(exc_info):
# type: (ExcInfo) -> None
if exc_info[0] is None or issubclass(exc_info[0], HTTPException):
return
- hub = Hub.current
- if hub.get_integration(PyramidIntegration) is None:
- return
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
event, hint = event_from_exception(
exc_info,
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "pyramid", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _set_transaction_name_and_source(scope, transaction_style, request):
@@ -192,7 +181,7 @@ def form(self):
# type: () -> Dict[str, str]
return {
key: value
- for key, value in iteritems(self.request.POST)
+ for key, value in self.request.POST.items()
if not getattr(value, "filename", None)
}
@@ -200,7 +189,7 @@ def files(self):
# type: () -> Dict[str, cgi_FieldStorage]
return {
key: value
- for key, value in iteritems(self.request.POST)
+ for key, value in self.request.POST.items()
if getattr(value, "filename", None)
}
@@ -224,7 +213,7 @@ def pyramid_event_processor(event, hint):
with capture_internal_exceptions():
PyramidRequestExtractor(request).extract_into_event(event)
- if _should_send_default_pii():
+ if should_send_default_pii():
with capture_internal_exceptions():
user_info = event.setdefault("user", {})
user_info.setdefault("id", authenticated_userid(request))
diff --git a/sentry_sdk/integrations/quart.py b/sentry_sdk/integrations/quart.py
index 8803fa7cea..7c2f4ade70 100644
--- a/sentry_sdk/integrations/quart.py
+++ b/sentry_sdk/integrations/quart.py
@@ -1,21 +1,19 @@
-from __future__ import absolute_import
-
import asyncio
import inspect
import threading
+from functools import wraps
-from sentry_sdk.hub import _should_send_default_pii, Hub
+import sentry_sdk
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations._wsgi_common import _filter_headers
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk.scope import Scope
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import SOURCE_FOR_STYLE
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
)
-
-from sentry_sdk._functools import wraps
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -89,11 +87,9 @@ def patch_asgi_app():
# type: () -> None
old_app = Quart.__call__
+ @ensure_integration_enabled(QuartIntegration, old_app)
async def sentry_patched_asgi_app(self, scope, receive, send):
# type: (Any, Any, Any, Any) -> Any
- if Hub.current.get_integration(QuartIntegration) is None:
- return await old_app(self, scope, receive, send)
-
middleware = SentryAsgiMiddleware(lambda *a, **kw: old_app(self, *a, **kw))
middleware.__call__ = middleware._run_asgi3
return await middleware(scope, receive, send)
@@ -117,20 +113,16 @@ def decorator(old_func):
):
@wraps(old_func)
+ @ensure_integration_enabled(QuartIntegration, old_func)
def _sentry_func(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(QuartIntegration)
- if integration is None:
- return old_func(*args, **kwargs)
+ scope = Scope.get_isolation_scope()
+ if scope.profile is not None:
+ scope.profile.active_thread_id = (
+ threading.current_thread().ident
+ )
- with hub.configure_scope() as sentry_scope:
- if sentry_scope.profile is not None:
- sentry_scope.profile.active_thread_id = (
- threading.current_thread().ident
- )
-
- return old_func(*args, **kwargs)
+ return old_func(*args, **kwargs)
return old_decorator(_sentry_func)
@@ -159,27 +151,24 @@ def _set_transaction_name_and_source(scope, transaction_style, request):
async def _request_websocket_started(app, **kwargs):
# type: (Quart, **Any) -> None
- hub = Hub.current
- integration = hub.get_integration(QuartIntegration)
+ integration = sentry_sdk.get_client().get_integration(QuartIntegration)
if integration is None:
return
- with hub.configure_scope() as scope:
- if has_request_context():
- request_websocket = request._get_current_object()
- if has_websocket_context():
- request_websocket = websocket._get_current_object()
+ if has_request_context():
+ request_websocket = request._get_current_object()
+ if has_websocket_context():
+ request_websocket = websocket._get_current_object()
- # Set the transaction name here, but rely on ASGI middleware
- # to actually start the transaction
- _set_transaction_name_and_source(
- scope, integration.transaction_style, request_websocket
- )
+ # Set the transaction name here, but rely on ASGI middleware
+ # to actually start the transaction
+ _set_transaction_name_and_source(
+ Scope.get_current_scope(), integration.transaction_style, request_websocket
+ )
- evt_processor = _make_request_event_processor(
- app, request_websocket, integration
- )
- scope.add_event_processor(evt_processor)
+ scope = Scope.get_isolation_scope()
+ evt_processor = _make_request_event_processor(app, request_websocket, integration)
+ scope.add_event_processor(evt_processor)
def _make_request_event_processor(app, request, integration):
@@ -202,7 +191,7 @@ def inner(event, hint):
request_info["method"] = request.method
request_info["headers"] = _filter_headers(dict(request.headers))
- if _should_send_default_pii():
+ if should_send_default_pii():
request_info["env"] = {"REMOTE_ADDR": request.access_route[0]}
_add_user_to_event(event)
@@ -213,20 +202,17 @@ def inner(event, hint):
async def _capture_exception(sender, exception, **kwargs):
# type: (Quart, Union[ValueError, BaseException], **Any) -> None
- hub = Hub.current
- if hub.get_integration(QuartIntegration) is None:
+ integration = sentry_sdk.get_client().get_integration(QuartIntegration)
+ if integration is None:
return
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
event, hint = event_from_exception(
exception,
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "quart", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _add_user_to_event(event):
diff --git a/sentry_sdk/integrations/redis/__init__.py b/sentry_sdk/integrations/redis/__init__.py
index e09f9ccea4..45f8653e29 100644
--- a/sentry_sdk/integrations/redis/__init__.py
+++ b/sentry_sdk/integrations/redis/__init__.py
@@ -1,14 +1,12 @@
-from __future__ import absolute_import
-
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
-from sentry_sdk._compat import text_type
from sentry_sdk.hub import _should_send_default_pii
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.utils import (
SENSITIVE_DATA_SUBSTITUTE,
capture_internal_exceptions,
+ ensure_integration_enabled,
logger,
)
@@ -129,7 +127,7 @@ def _set_db_data_on_span(span, connection_params):
db = connection_params.get("db")
if db is not None:
- span.set_data(SPANDATA.DB_NAME, text_type(db))
+ span.set_data(SPANDATA.DB_NAME, str(db))
host = connection_params.get("host")
if host is not None:
@@ -179,14 +177,10 @@ def patch_redis_pipeline(pipeline_cls, is_cluster, get_command_args_fn, set_db_d
# type: (Any, bool, Any, Callable[[Span, Any], None]) -> None
old_execute = pipeline_cls.execute
+ @ensure_integration_enabled(RedisIntegration, old_execute)
def sentry_patched_execute(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
- hub = Hub.current
-
- if hub.get_integration(RedisIntegration) is None:
- return old_execute(self, *args, **kwargs)
-
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.DB_REDIS, description="redis.pipeline.execute"
) as span:
with capture_internal_exceptions():
@@ -212,14 +206,10 @@ def patch_redis_client(cls, is_cluster, set_db_data_fn):
"""
old_execute_command = cls.execute_command
+ @ensure_integration_enabled(RedisIntegration, old_execute_command)
def sentry_patched_execute_command(self, name, *args, **kwargs):
# type: (Any, str, *Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(RedisIntegration)
-
- if integration is None:
- return old_execute_command(self, name, *args, **kwargs)
-
+ integration = sentry_sdk.get_client().get_integration(RedisIntegration)
description = _get_span_description(name, *args)
data_should_be_truncated = (
@@ -228,7 +218,7 @@ def sentry_patched_execute_command(self, name, *args, **kwargs):
if data_should_be_truncated:
description = description[: integration.max_data_size - len("...")] + "..."
- with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+ with sentry_sdk.start_span(op=OP.DB_REDIS, description=description) as span:
set_db_data_fn(span, self)
_set_client_data(span, is_cluster, name, *args)
diff --git a/sentry_sdk/integrations/redis/asyncio.py b/sentry_sdk/integrations/redis/asyncio.py
index 09fad3426a..6cb12b0d51 100644
--- a/sentry_sdk/integrations/redis/asyncio.py
+++ b/sentry_sdk/integrations/redis/asyncio.py
@@ -1,6 +1,4 @@
-from __future__ import absolute_import
-
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.consts import OP
from sentry_sdk.integrations.redis import (
RedisIntegration,
@@ -10,7 +8,9 @@
)
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.tracing import Span
-from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+)
if TYPE_CHECKING:
from collections.abc import Callable
@@ -27,12 +27,10 @@ def patch_redis_async_pipeline(
async def _sentry_execute(self, *args, **kwargs):
# type: (Any, *Any, **Any) -> Any
- hub = Hub.current
-
- if hub.get_integration(RedisIntegration) is None:
+ if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
return await old_execute(self, *args, **kwargs)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.DB_REDIS, description="redis.pipeline.execute"
) as span:
with capture_internal_exceptions():
@@ -47,7 +45,7 @@ async def _sentry_execute(self, *args, **kwargs):
return await old_execute(self, *args, **kwargs)
- pipeline_cls.execute = _sentry_execute # type: ignore[method-assign]
+ pipeline_cls.execute = _sentry_execute # type: ignore
def patch_redis_async_client(cls, is_cluster, set_db_data_fn):
@@ -56,17 +54,15 @@ def patch_redis_async_client(cls, is_cluster, set_db_data_fn):
async def _sentry_execute_command(self, name, *args, **kwargs):
# type: (Any, str, *Any, **Any) -> Any
- hub = Hub.current
-
- if hub.get_integration(RedisIntegration) is None:
+ if sentry_sdk.get_client().get_integration(RedisIntegration) is None:
return await old_execute_command(self, name, *args, **kwargs)
description = _get_span_description(name, *args)
- with hub.start_span(op=OP.DB_REDIS, description=description) as span:
+ with sentry_sdk.start_span(op=OP.DB_REDIS, description=description) as span:
set_db_data_fn(span, self)
_set_client_data(span, is_cluster, name, *args)
return await old_execute_command(self, name, *args, **kwargs)
- cls.execute_command = _sentry_execute_command # type: ignore[method-assign]
+ cls.execute_command = _sentry_execute_command # type: ignore
diff --git a/sentry_sdk/integrations/rq.py b/sentry_sdk/integrations/rq.py
index 2b32e59880..23035d3dd3 100644
--- a/sentry_sdk/integrations/rq.py
+++ b/sentry_sdk/integrations/rq.py
@@ -1,15 +1,15 @@
-from __future__ import absolute_import
-
import weakref
-from sentry_sdk.consts import OP
+import sentry_sdk
+from sentry_sdk.consts import OP
from sentry_sdk.api import continue_trace
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.tracing import TRANSACTION_SOURCE_TASK
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
format_timestamp,
parse_version,
@@ -52,18 +52,10 @@ def setup_once():
old_perform_job = Worker.perform_job
+ @ensure_integration_enabled(RqIntegration, old_perform_job)
def sentry_patched_perform_job(self, job, *args, **kwargs):
# type: (Any, Job, *Queue, **Any) -> bool
- hub = Hub.current
- integration = hub.get_integration(RqIntegration)
-
- if integration is None:
- return old_perform_job(self, job, *args, **kwargs)
-
- client = hub.client
- assert client is not None
-
- with hub.push_scope() as scope:
+ with sentry_sdk.new_scope() as scope:
scope.clear_breadcrumbs()
scope.add_event_processor(_make_event_processor(weakref.ref(job)))
@@ -77,7 +69,7 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
with capture_internal_exceptions():
transaction.name = job.func_name
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction, custom_sampling_context={"rq_job": job}
):
rv = old_perform_job(self, job, *args, **kwargs)
@@ -86,7 +78,7 @@ def sentry_patched_perform_job(self, job, *args, **kwargs):
# We're inside of a forked process and RQ is
# about to call `os._exit`. Make sure that our
# events get sent out.
- client.flush()
+ sentry_sdk.get_client().flush()
return rv
@@ -107,14 +99,14 @@ def sentry_patched_handle_exception(self, job, *exc_info, **kwargs):
old_enqueue_job = Queue.enqueue_job
+ @ensure_integration_enabled(RqIntegration, old_enqueue_job)
def sentry_patched_enqueue_job(self, job, **kwargs):
# type: (Queue, Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(RqIntegration) is not None:
- if hub.scope.span is not None:
- job.meta["_sentry_trace_headers"] = dict(
- hub.iter_trace_propagation_headers()
- )
+ scope = Scope.get_current_scope()
+ if scope.span is not None:
+ job.meta["_sentry_trace_headers"] = dict(
+ scope.iter_trace_propagation_headers()
+ )
return old_enqueue_job(self, job, **kwargs)
@@ -158,12 +150,7 @@ def event_processor(event, hint):
def _capture_exception(exc_info, **kwargs):
# type: (ExcInfo, **Any) -> None
- hub = Hub.current
- if hub.get_integration(RqIntegration) is None:
- return
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
+ client = sentry_sdk.get_client()
event, hint = event_from_exception(
exc_info,
@@ -171,4 +158,4 @@ def _capture_exception(exc_info, **kwargs):
mechanism={"type": "rq", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/sanic.py b/sentry_sdk/integrations/sanic.py
index 7e0c690da0..fac0991381 100644
--- a/sentry_sdk/integrations/sanic.py
+++ b/sentry_sdk/integrations/sanic.py
@@ -1,23 +1,25 @@
import sys
import weakref
from inspect import isawaitable
+from urllib.parse import urlsplit
+import sentry_sdk
from sentry_sdk import continue_trace
-from sentry_sdk._compat import urlparse, reraise
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub
+from sentry_sdk.integrations import Integration, DidNotEnable
+from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
+from sentry_sdk.integrations.logging import ignore_logger
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
parse_version,
+ reraise,
)
-from sentry_sdk.integrations import Integration, DidNotEnable
-from sentry_sdk.integrations._wsgi_common import RequestExtractor, _filter_headers
-from sentry_sdk.integrations.logging import ignore_logger
-
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -160,13 +162,13 @@ async def _startup(self):
# type: (Sanic) -> None
# This happens about as early in the lifecycle as possible, just after the
# Request object is created. The body has not yet been consumed.
- self.signal("http.lifecycle.request")(_hub_enter)
+ self.signal("http.lifecycle.request")(_context_enter)
# This happens after the handler is complete. In v21.9 this signal is not
# dispatched when there is an exception. Therefore we need to close out
- # and call _hub_exit from the custom exception handler as well.
+ # and call _context_exit from the custom exception handler as well.
# See https://github.com/sanic-org/sanic/issues/2297
- self.signal("http.lifecycle.response")(_hub_exit)
+ self.signal("http.lifecycle.response")(_context_exit)
# This happens inside of request handling immediately after the route
# has been identified by the router.
@@ -176,23 +178,20 @@ async def _startup(self):
await old_startup(self)
-async def _hub_enter(request):
+async def _context_enter(request):
# type: (Request) -> None
- hub = Hub.current
request.ctx._sentry_do_integration = (
- hub.get_integration(SanicIntegration) is not None
+ sentry_sdk.get_client().get_integration(SanicIntegration) is not None
)
if not request.ctx._sentry_do_integration:
return
weak_request = weakref.ref(request)
- request.ctx._sentry_hub = Hub(hub)
- request.ctx._sentry_hub.__enter__()
-
- with request.ctx._sentry_hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
- scope.add_event_processor(_make_request_processor(weak_request))
+ request.ctx._sentry_scope = sentry_sdk.isolation_scope()
+ scope = request.ctx._sentry_scope.__enter__()
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
transaction = continue_trace(
dict(request.headers),
@@ -201,18 +200,20 @@ async def _hub_enter(request):
name=request.path,
source=TRANSACTION_SOURCE_URL,
)
- request.ctx._sentry_transaction = request.ctx._sentry_hub.start_transaction(
+ request.ctx._sentry_transaction = sentry_sdk.start_transaction(
transaction
).__enter__()
-async def _hub_exit(request, response=None):
+async def _context_exit(request, response=None):
# type: (Request, Optional[BaseHTTPResponse]) -> None
with capture_internal_exceptions():
if not request.ctx._sentry_do_integration:
return
- integration = Hub.current.get_integration(SanicIntegration) # type: Integration
+ integration = sentry_sdk.get_client().get_integration(
+ SanicIntegration
+ ) # type: Integration
response_status = None if response is None else response.status
@@ -226,19 +227,16 @@ async def _hub_exit(request, response=None):
)
request.ctx._sentry_transaction.__exit__(None, None, None)
- request.ctx._sentry_hub.__exit__(None, None, None)
+ request.ctx._sentry_scope.__exit__(None, None, None)
async def _set_transaction(request, route, **_):
# type: (Request, Route, **Any) -> None
- hub = Hub.current
if request.ctx._sentry_do_integration:
with capture_internal_exceptions():
- with hub.configure_scope() as scope:
- route_name = route.name.replace(request.app.name, "").strip(".")
- scope.set_transaction_name(
- route_name, source=TRANSACTION_SOURCE_COMPONENT
- )
+ scope = Scope.get_current_scope()
+ route_name = route.name.replace(request.app.name, "").strip(".")
+ scope.set_transaction_name(route_name, source=TRANSACTION_SOURCE_COMPONENT)
def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
@@ -249,7 +247,7 @@ def _sentry_error_handler_lookup(self, exception, *args, **kwargs):
if old_error_handler is None:
return None
- if Hub.current.get_integration(SanicIntegration) is None:
+ if sentry_sdk.get_client().get_integration(SanicIntegration) is None:
return old_error_handler
async def sentry_wrapped_error_handler(request, exception):
@@ -270,23 +268,21 @@ async def sentry_wrapped_error_handler(request, exception):
# As mentioned in previous comment in _startup, this can be removed
# after https://github.com/sanic-org/sanic/issues/2297 is resolved
if SanicIntegration.version and SanicIntegration.version == (21, 9):
- await _hub_exit(request)
+ await _context_exit(request)
return sentry_wrapped_error_handler
async def _legacy_handle_request(self, request, *args, **kwargs):
# type: (Any, Request, *Any, **Any) -> Any
- hub = Hub.current
- if hub.get_integration(SanicIntegration) is None:
- return old_handle_request(self, request, *args, **kwargs)
+ if sentry_sdk.get_client().get_integration(SanicIntegration) is None:
+ return await old_handle_request(self, request, *args, **kwargs)
weak_request = weakref.ref(request)
- with Hub(hub) as hub:
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
- scope.add_event_processor(_make_request_processor(weak_request))
+ with sentry_sdk.isolation_scope() as scope:
+ scope.clear_breadcrumbs()
+ scope.add_event_processor(_make_request_processor(weak_request))
response = old_handle_request(self, request, *args, **kwargs)
if isawaitable(response):
@@ -298,53 +294,47 @@ async def _legacy_handle_request(self, request, *args, **kwargs):
def _legacy_router_get(self, *args):
# type: (Any, Union[Any, Request]) -> Any
rv = old_router_get(self, *args)
- hub = Hub.current
- if hub.get_integration(SanicIntegration) is not None:
+ if sentry_sdk.get_client().get_integration(SanicIntegration) is not None:
with capture_internal_exceptions():
- with hub.configure_scope() as scope:
- if SanicIntegration.version and SanicIntegration.version >= (21, 3):
- # Sanic versions above and including 21.3 append the app name to the
- # route name, and so we need to remove it from Route name so the
- # transaction name is consistent across all versions
- sanic_app_name = self.ctx.app.name
- sanic_route = rv[0].name
-
- if sanic_route.startswith("%s." % sanic_app_name):
- # We add a 1 to the len of the sanic_app_name because there is a dot
- # that joins app name and the route name
- # Format: app_name.route_name
- sanic_route = sanic_route[len(sanic_app_name) + 1 :]
-
- scope.set_transaction_name(
- sanic_route, source=TRANSACTION_SOURCE_COMPONENT
- )
- else:
- scope.set_transaction_name(
- rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT
- )
+ scope = Scope.get_isolation_scope()
+ if SanicIntegration.version and SanicIntegration.version >= (21, 3):
+ # Sanic versions above and including 21.3 append the app name to the
+ # route name, and so we need to remove it from Route name so the
+ # transaction name is consistent across all versions
+ sanic_app_name = self.ctx.app.name
+ sanic_route = rv[0].name
+
+ if sanic_route.startswith("%s." % sanic_app_name):
+ # We add a 1 to the len of the sanic_app_name because there is a dot
+ # that joins app name and the route name
+ # Format: app_name.route_name
+ sanic_route = sanic_route[len(sanic_app_name) + 1 :]
+
+ scope.set_transaction_name(
+ sanic_route, source=TRANSACTION_SOURCE_COMPONENT
+ )
+ else:
+ scope.set_transaction_name(
+ rv[0].__name__, source=TRANSACTION_SOURCE_COMPONENT
+ )
return rv
+@ensure_integration_enabled(SanicIntegration)
def _capture_exception(exception):
# type: (Union[Tuple[Optional[type], Optional[BaseException], Any], BaseException]) -> None
- hub = Hub.current
- integration = hub.get_integration(SanicIntegration)
- if integration is None:
- return
-
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
with capture_internal_exceptions():
event, hint = event_from_exception(
exception,
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "sanic", "handled": False},
)
+
if hint and hasattr(hint["exc_info"][0], "quiet") and hint["exc_info"][0].quiet:
return
- hub.capture_event(event, hint=hint)
+
+ sentry_sdk.capture_event(event, hint=hint)
def _make_request_processor(weak_request):
@@ -367,7 +357,7 @@ def sanic_processor(event, hint):
extractor.extract_into_event(event)
request_info = event["request"]
- urlparts = urlparse.urlsplit(request.url)
+ urlparts = urlsplit(request.url)
request_info["url"] = "%s://%s%s" % (
urlparts.scheme,
diff --git a/sentry_sdk/integrations/serverless.py b/sentry_sdk/integrations/serverless.py
index 534034547a..a8fbc826fd 100644
--- a/sentry_sdk/integrations/serverless.py
+++ b/sentry_sdk/integrations/serverless.py
@@ -1,11 +1,8 @@
import sys
+from functools import wraps
-from sentry_sdk.hub import Hub
-from sentry_sdk.utils import event_from_exception
-from sentry_sdk._compat import reraise
-from sentry_sdk._functools import wraps
-
-
+import sentry_sdk
+from sentry_sdk.utils import event_from_exception, reraise
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -45,9 +42,8 @@ def wrapper(f):
@wraps(f)
def inner(*args, **kwargs):
# type: (*Any, **Any) -> Any
- with Hub(Hub.current) as hub:
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
+ with sentry_sdk.isolation_scope() as scope:
+ scope.clear_breadcrumbs()
try:
return f(*args, **kwargs)
@@ -55,7 +51,7 @@ def inner(*args, **kwargs):
_capture_and_reraise()
finally:
if flush:
- _flush_client()
+ sentry_sdk.flush()
return inner # type: ignore
@@ -68,18 +64,13 @@ def inner(*args, **kwargs):
def _capture_and_reraise():
# type: () -> None
exc_info = sys.exc_info()
- hub = Hub.current
- if hub.client is not None:
+ client = sentry_sdk.get_client()
+ if client.is_active():
event, hint = event_from_exception(
exc_info,
- client_options=hub.client.options,
+ client_options=client.options,
mechanism={"type": "serverless", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
reraise(*exc_info)
-
-
-def _flush_client():
- # type: () -> None
- return Hub.current.flush()
diff --git a/sentry_sdk/integrations/socket.py b/sentry_sdk/integrations/socket.py
index 7a4e358185..1422551bf4 100644
--- a/sentry_sdk/integrations/socket.py
+++ b/sentry_sdk/integrations/socket.py
@@ -1,7 +1,6 @@
-from __future__ import absolute_import
-
import socket
-from sentry_sdk import Hub
+
+import sentry_sdk
from sentry_sdk._types import MYPY
from sentry_sdk.consts import OP
from sentry_sdk.integrations import Integration
@@ -49,13 +48,11 @@ def create_connection(
source_address=None,
):
# type: (Tuple[Optional[str], int], Optional[float], Optional[Tuple[Union[bytearray, bytes, str], int]])-> socket.socket
- hub = Hub.current
- if hub.get_integration(SocketIntegration) is None:
- return real_create_connection(
- address=address, timeout=timeout, source_address=source_address
- )
+ integration = sentry_sdk.get_client().get_integration(SocketIntegration)
+ if integration is None:
+ return real_create_connection(address, timeout, source_address)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.SOCKET_CONNECTION,
description=_get_span_description(address[0], address[1]),
) as span:
@@ -76,11 +73,11 @@ def _patch_getaddrinfo():
def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
# type: (Union[bytes, str, None], Union[str, int, None], int, int, int, int) -> List[Tuple[AddressFamily, SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]]]
- hub = Hub.current
- if hub.get_integration(SocketIntegration) is None:
+ integration = sentry_sdk.get_client().get_integration(SocketIntegration)
+ if integration is None:
return real_getaddrinfo(host, port, family, type, proto, flags)
- with hub.start_span(
+ with sentry_sdk.start_span(
op=OP.SOCKET_DNS, description=_get_span_description(host, port)
) as span:
span.set_data("host", host)
diff --git a/sentry_sdk/integrations/spark/spark_driver.py b/sentry_sdk/integrations/spark/spark_driver.py
index b3085fc4af..de08fc0f9f 100644
--- a/sentry_sdk/integrations/spark/spark_driver.py
+++ b/sentry_sdk/integrations/spark/spark_driver.py
@@ -1,7 +1,7 @@
-from sentry_sdk import configure_scope
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration
-from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.scope import Scope
+from sentry_sdk.utils import capture_internal_exceptions, ensure_integration_enabled
from sentry_sdk._types import TYPE_CHECKING
@@ -56,56 +56,52 @@ def patch_spark_context_init():
spark_context_init = SparkContext._do_init
+ @ensure_integration_enabled(SparkIntegration, spark_context_init)
def _sentry_patched_spark_context_init(self, *args, **kwargs):
# type: (SparkContext, *Any, **Any) -> Optional[Any]
- init = spark_context_init(self, *args, **kwargs)
-
- if Hub.current.get_integration(SparkIntegration) is None:
- return init
-
_start_sentry_listener(self)
_set_app_properties()
- with configure_scope() as scope:
-
- @scope.add_event_processor
- def process_event(event, hint):
- # type: (Event, Hint) -> Optional[Event]
- with capture_internal_exceptions():
- if Hub.current.get_integration(SparkIntegration) is None:
- return event
-
- event.setdefault("user", {}).setdefault("id", self.sparkUser())
-
- event.setdefault("tags", {}).setdefault(
- "executor.id", self._conf.get("spark.executor.id")
- )
- event["tags"].setdefault(
- "spark-submit.deployMode",
- self._conf.get("spark.submit.deployMode"),
- )
- event["tags"].setdefault(
- "driver.host", self._conf.get("spark.driver.host")
- )
- event["tags"].setdefault(
- "driver.port", self._conf.get("spark.driver.port")
- )
- event["tags"].setdefault("spark_version", self.version)
- event["tags"].setdefault("app_name", self.appName)
- event["tags"].setdefault("application_id", self.applicationId)
- event["tags"].setdefault("master", self.master)
- event["tags"].setdefault("spark_home", self.sparkHome)
-
- event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
-
- return event
-
- return init
+ scope = Scope.get_isolation_scope()
+
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ if sentry_sdk.get_client().get_integration(SparkIntegration) is None:
+ return event
+
+ event.setdefault("user", {}).setdefault("id", self.sparkUser())
+
+ event.setdefault("tags", {}).setdefault(
+ "executor.id", self._conf.get("spark.executor.id")
+ )
+ event["tags"].setdefault(
+ "spark-submit.deployMode",
+ self._conf.get("spark.submit.deployMode"),
+ )
+ event["tags"].setdefault(
+ "driver.host", self._conf.get("spark.driver.host")
+ )
+ event["tags"].setdefault(
+ "driver.port", self._conf.get("spark.driver.port")
+ )
+ event["tags"].setdefault("spark_version", self.version)
+ event["tags"].setdefault("app_name", self.appName)
+ event["tags"].setdefault("application_id", self.applicationId)
+ event["tags"].setdefault("master", self.master)
+ event["tags"].setdefault("spark_home", self.sparkHome)
+
+ event.setdefault("extra", {}).setdefault("web_url", self.uiWebUrl)
+
+ return event
+
+ return spark_context_init(self, *args, **kwargs)
SparkContext._do_init = _sentry_patched_spark_context_init
-class SparkListener(object):
+class SparkListener:
def onApplicationEnd(self, applicationEnd): # noqa: N802,N803
# type: (Any) -> None
pass
@@ -209,14 +205,10 @@ class Java:
class SentryListener(SparkListener):
- def __init__(self):
- # type: () -> None
- self.hub = Hub.current
-
def onJobStart(self, jobStart): # noqa: N802,N803
# type: (Any) -> None
message = "Job {} Started".format(jobStart.jobId())
- self.hub.add_breadcrumb(level="info", message=message)
+ sentry_sdk.add_breadcrumb(level="info", message=message)
_set_app_properties()
def onJobEnd(self, jobEnd): # noqa: N802,N803
@@ -232,14 +224,14 @@ def onJobEnd(self, jobEnd): # noqa: N802,N803
level = "warning"
message = "Job {} Failed".format(jobEnd.jobId())
- self.hub.add_breadcrumb(level=level, message=message, data=data)
+ sentry_sdk.add_breadcrumb(level=level, message=message, data=data)
def onStageSubmitted(self, stageSubmitted): # noqa: N802,N803
# type: (Any) -> None
stage_info = stageSubmitted.stageInfo()
message = "Stage {} Submitted".format(stage_info.stageId())
data = {"attemptId": stage_info.attemptId(), "name": stage_info.name()}
- self.hub.add_breadcrumb(level="info", message=message, data=data)
+ sentry_sdk.add_breadcrumb(level="info", message=message, data=data)
_set_app_properties()
def onStageCompleted(self, stageCompleted): # noqa: N802,N803
@@ -260,4 +252,4 @@ def onStageCompleted(self, stageCompleted): # noqa: N802,N803
message = "Stage {} Completed".format(stage_info.stageId())
level = "info"
- self.hub.add_breadcrumb(level=level, message=message, data=data)
+ sentry_sdk.add_breadcrumb(level=level, message=message, data=data)
diff --git a/sentry_sdk/integrations/spark/spark_worker.py b/sentry_sdk/integrations/spark/spark_worker.py
index 632e870973..fa18896516 100644
--- a/sentry_sdk/integrations/spark/spark_worker.py
+++ b/sentry_sdk/integrations/spark/spark_worker.py
@@ -1,10 +1,8 @@
-from __future__ import absolute_import
-
import sys
-from sentry_sdk import configure_scope
-from sentry_sdk.hub import Hub
+import sentry_sdk
from sentry_sdk.integrations import Integration
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import (
capture_internal_exceptions,
exc_info_from_error,
@@ -33,11 +31,9 @@ def setup_once():
original_daemon.worker_main = _sentry_worker_main
-def _capture_exception(exc_info, hub):
- # type: (ExcInfo, Hub) -> None
- client = hub.client
-
- client_options = client.options # type: ignore
+def _capture_exception(exc_info):
+ # type: (ExcInfo) -> None
+ client = sentry_sdk.get_client()
mechanism = {"type": "spark", "handled": False}
@@ -51,7 +47,7 @@ def _capture_exception(exc_info, hub):
if exc_type not in (SystemExit, EOFError, ConnectionResetError):
rv.append(
single_exception_from_error_tuple(
- exc_type, exc_value, tb, client_options, mechanism
+ exc_type, exc_value, tb, client.options, mechanism
)
)
@@ -62,52 +58,50 @@ def _capture_exception(exc_info, hub):
_tag_task_context()
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _tag_task_context():
# type: () -> None
from pyspark.taskcontext import TaskContext
- with configure_scope() as scope:
+ scope = Scope.get_isolation_scope()
- @scope.add_event_processor
- def process_event(event, hint):
- # type: (Event, Hint) -> Optional[Event]
- with capture_internal_exceptions():
- integration = Hub.current.get_integration(SparkWorkerIntegration)
- task_context = TaskContext.get()
+ @scope.add_event_processor
+ def process_event(event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ with capture_internal_exceptions():
+ integration = sentry_sdk.get_client().get_integration(
+ SparkWorkerIntegration
+ )
+ task_context = TaskContext.get()
- if integration is None or task_context is None:
- return event
+ if integration is None or task_context is None:
+ return event
- event.setdefault("tags", {}).setdefault(
- "stageId", str(task_context.stageId())
- )
- event["tags"].setdefault("partitionId", str(task_context.partitionId()))
- event["tags"].setdefault(
- "attemptNumber", str(task_context.attemptNumber())
- )
- event["tags"].setdefault(
- "taskAttemptId", str(task_context.taskAttemptId())
- )
+ event.setdefault("tags", {}).setdefault(
+ "stageId", str(task_context.stageId())
+ )
+ event["tags"].setdefault("partitionId", str(task_context.partitionId()))
+ event["tags"].setdefault("attemptNumber", str(task_context.attemptNumber()))
+ event["tags"].setdefault("taskAttemptId", str(task_context.taskAttemptId()))
- if task_context._localProperties:
- if "sentry_app_name" in task_context._localProperties:
- event["tags"].setdefault(
- "app_name", task_context._localProperties["sentry_app_name"]
- )
- event["tags"].setdefault(
- "application_id",
- task_context._localProperties["sentry_application_id"],
- )
+ if task_context._localProperties:
+ if "sentry_app_name" in task_context._localProperties:
+ event["tags"].setdefault(
+ "app_name", task_context._localProperties["sentry_app_name"]
+ )
+ event["tags"].setdefault(
+ "application_id",
+ task_context._localProperties["sentry_application_id"],
+ )
- if "callSite.short" in task_context._localProperties:
- event.setdefault("extra", {}).setdefault(
- "callSite", task_context._localProperties["callSite.short"]
- )
+ if "callSite.short" in task_context._localProperties:
+ event.setdefault("extra", {}).setdefault(
+ "callSite", task_context._localProperties["callSite.short"]
+ )
- return event
+ return event
def _sentry_worker_main(*args, **kwargs):
@@ -117,8 +111,7 @@ def _sentry_worker_main(*args, **kwargs):
try:
original_worker.main(*args, **kwargs)
except SystemExit:
- if Hub.current.get_integration(SparkWorkerIntegration) is not None:
- hub = Hub.current
+ if sentry_sdk.get_client().get_integration(SparkWorkerIntegration) is not None:
exc_info = sys.exc_info()
with capture_internal_exceptions():
- _capture_exception(exc_info, hub)
+ _capture_exception(exc_info)
diff --git a/sentry_sdk/integrations/sqlalchemy.py b/sentry_sdk/integrations/sqlalchemy.py
index 5850237e97..9c438ca3df 100644
--- a/sentry_sdk/integrations/sqlalchemy.py
+++ b/sentry_sdk/integrations/sqlalchemy.py
@@ -1,13 +1,14 @@
-from __future__ import absolute_import
-
-from sentry_sdk._compat import text_type
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import SPANDATA
from sentry_sdk.db.explain_plan.sqlalchemy import attach_explain_plan_to_span
-from sentry_sdk.hub import Hub
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.tracing_utils import add_query_source, record_sql_queries
-from sentry_sdk.utils import capture_internal_exceptions, parse_version
+from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ensure_integration_enabled,
+ parse_version,
+)
try:
from sqlalchemy.engine import Engine # type: ignore
@@ -46,16 +47,12 @@ def setup_once():
listen(Engine, "handle_error", _handle_error)
+@ensure_integration_enabled(SqlalchemyIntegration)
def _before_cursor_execute(
conn, cursor, statement, parameters, context, executemany, *args
):
# type: (Any, Any, Any, Any, Any, bool, *Any) -> None
- hub = Hub.current
- if hub.get_integration(SqlalchemyIntegration) is None:
- return
-
ctx_mgr = record_sql_queries(
- hub,
cursor,
statement,
parameters,
@@ -68,25 +65,23 @@ def _before_cursor_execute(
if span is not None:
_set_db_data(span, conn)
- if hub.client:
- options = hub.client.options["_experiments"].get("attach_explain_plans")
- if options is not None:
- attach_explain_plan_to_span(
- span,
- conn,
- statement,
- parameters,
- options,
- )
+ options = (
+ sentry_sdk.get_client().options["_experiments"].get("attach_explain_plans")
+ )
+ if options is not None:
+ attach_explain_plan_to_span(
+ span,
+ conn,
+ statement,
+ parameters,
+ options,
+ )
context._sentry_sql_span = span
+@ensure_integration_enabled(SqlalchemyIntegration)
def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
# type: (Any, Any, Any, Any, Any, *Any) -> None
- hub = Hub.current
- if hub.get_integration(SqlalchemyIntegration) is None:
- return
-
ctx_mgr = getattr(
context, "_sentry_sql_span_manager", None
) # type: Optional[ContextManager[Any]]
@@ -98,7 +93,7 @@ def _after_cursor_execute(conn, cursor, statement, parameters, context, *args):
span = getattr(context, "_sentry_sql_span", None) # type: Optional[Span]
if span is not None:
with capture_internal_exceptions():
- add_query_source(hub, span)
+ add_query_source(span)
def _handle_error(context, *args):
@@ -127,7 +122,7 @@ def _handle_error(context, *args):
# See: https://docs.sqlalchemy.org/en/20/dialects/index.html
def _get_db_system(name):
# type: (str) -> Optional[str]
- name = text_type(name)
+ name = str(name)
if "sqlite" in name:
return "sqlite"
diff --git a/sentry_sdk/integrations/starlette.py b/sentry_sdk/integrations/starlette.py
index 79bb18aa78..cb0f977d99 100644
--- a/sentry_sdk/integrations/starlette.py
+++ b/sentry_sdk/integrations/starlette.py
@@ -1,19 +1,17 @@
-from __future__ import absolute_import
-
import asyncio
import functools
from copy import deepcopy
-from sentry_sdk._compat import iteritems
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations._wsgi_common import (
_is_json_content_type,
request_body_within_bounds,
)
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.tracing import (
SOURCE_FOR_STYLE,
TRANSACTION_SOURCE_COMPONENT,
@@ -22,6 +20,7 @@
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
logger,
parse_version,
@@ -31,7 +30,6 @@
if TYPE_CHECKING:
from typing import Any, Awaitable, Callable, Dict, Optional, Tuple
- from sentry_sdk.scope import Scope as SentryScope
from sentry_sdk._types import Event
try:
@@ -106,59 +104,54 @@ def _enable_span_for_middleware(middleware_class):
async def _create_span_call(app, scope, receive, send, **kwargs):
# type: (Any, Dict[str, Any], Callable[[], Awaitable[Dict[str, Any]]], Callable[[Dict[str, Any]], Awaitable[None]], Any) -> None
- hub = Hub.current
- integration = hub.get_integration(StarletteIntegration)
- if integration is not None:
- middleware_name = app.__class__.__name__
-
- # Update transaction name with middleware name
- with hub.configure_scope() as sentry_scope:
- name, source = _get_transaction_from_middleware(app, scope, integration)
- if name is not None:
- sentry_scope.set_transaction_name(
- name,
- source=source,
- )
+ integration = sentry_sdk.get_client().get_integration(StarletteIntegration)
+ if integration is None:
+ return await old_call(app, scope, receive, send, **kwargs)
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
- ) as middleware_span:
- middleware_span.set_tag("starlette.middleware_name", middleware_name)
-
- # Creating spans for the "receive" callback
- async def _sentry_receive(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- hub = Hub.current
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
- description=getattr(receive, "__qualname__", str(receive)),
- ) as span:
- span.set_tag("starlette.middleware_name", middleware_name)
- return await receive(*args, **kwargs)
-
- receive_name = getattr(receive, "__name__", str(receive))
- receive_patched = receive_name == "_sentry_receive"
- new_receive = _sentry_receive if not receive_patched else receive
-
- # Creating spans for the "send" callback
- async def _sentry_send(*args, **kwargs):
- # type: (*Any, **Any) -> Any
- hub = Hub.current
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLETTE_SEND,
- description=getattr(send, "__qualname__", str(send)),
- ) as span:
- span.set_tag("starlette.middleware_name", middleware_name)
- return await send(*args, **kwargs)
-
- send_name = getattr(send, "__name__", str(send))
- send_patched = send_name == "_sentry_send"
- new_send = _sentry_send if not send_patched else send
-
- return await old_call(app, scope, new_receive, new_send, **kwargs)
+ middleware_name = app.__class__.__name__
- else:
- return await old_call(app, scope, receive, send, **kwargs)
+ # Update transaction name with middleware name
+ name, source = _get_transaction_from_middleware(app, scope, integration)
+ if name is not None:
+ Scope.get_current_scope().set_transaction_name(
+ name,
+ source=source,
+ )
+
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLETTE, description=middleware_name
+ ) as middleware_span:
+ middleware_span.set_tag("starlette.middleware_name", middleware_name)
+
+ # Creating spans for the "receive" callback
+ async def _sentry_receive(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLETTE_RECEIVE,
+ description=getattr(receive, "__qualname__", str(receive)),
+ ) as span:
+ span.set_tag("starlette.middleware_name", middleware_name)
+ return await receive(*args, **kwargs)
+
+ receive_name = getattr(receive, "__name__", str(receive))
+ receive_patched = receive_name == "_sentry_receive"
+ new_receive = _sentry_receive if not receive_patched else receive
+
+ # Creating spans for the "send" callback
+ async def _sentry_send(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLETTE_SEND,
+ description=getattr(send, "__qualname__", str(send)),
+ ) as span:
+ span.set_tag("starlette.middleware_name", middleware_name)
+ return await send(*args, **kwargs)
+
+ send_name = getattr(send, "__name__", str(send))
+ send_patched = send_name == "_sentry_send"
+ new_send = _sentry_send if not send_patched else send
+
+ return await old_call(app, scope, new_receive, new_send, **kwargs)
not_yet_patched = old_call.__name__ not in [
"_create_span_call",
@@ -172,19 +165,16 @@ async def _sentry_send(*args, **kwargs):
return middleware_class
+@ensure_integration_enabled(StarletteIntegration)
def _capture_exception(exception, handled=False):
# type: (BaseException, **Any) -> None
- hub = Hub.current
- if hub.get_integration(StarletteIntegration) is None:
- return
-
event, hint = event_from_exception(
exception,
- client_options=hub.client.options if hub.client else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": StarletteIntegration.identifier, "handled": handled},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def patch_exception_middleware(middleware_class):
@@ -259,6 +249,7 @@ async def _sentry_exceptionmiddleware_call(self, scope, receive, send):
middleware_class.__call__ = _sentry_exceptionmiddleware_call
+@ensure_integration_enabled(StarletteIntegration)
def _add_user_to_sentry_scope(scope):
# type: (Dict[str, Any]) -> None
"""
@@ -268,30 +259,26 @@ def _add_user_to_sentry_scope(scope):
if "user" not in scope:
return
- if not _should_send_default_pii():
- return
-
- hub = Hub.current
- if hub.get_integration(StarletteIntegration) is None:
+ if not should_send_default_pii():
return
- with hub.configure_scope() as sentry_scope:
- user_info = {} # type: Dict[str, Any]
- starlette_user = scope["user"]
+ user_info = {} # type: Dict[str, Any]
+ starlette_user = scope["user"]
- username = getattr(starlette_user, "username", None)
- if username:
- user_info.setdefault("username", starlette_user.username)
+ username = getattr(starlette_user, "username", None)
+ if username:
+ user_info.setdefault("username", starlette_user.username)
- user_id = getattr(starlette_user, "id", None)
- if user_id:
- user_info.setdefault("id", starlette_user.id)
+ user_id = getattr(starlette_user, "id", None)
+ if user_id:
+ user_info.setdefault("id", starlette_user.id)
- email = getattr(starlette_user, "email", None)
- if email:
- user_info.setdefault("email", starlette_user.email)
+ email = getattr(starlette_user, "email", None)
+ if email:
+ user_info.setdefault("email", starlette_user.email)
- sentry_scope.user = user_info
+ sentry_scope = Scope.get_isolation_scope()
+ sentry_scope.user = user_info
def patch_authentication_middleware(middleware_class):
@@ -351,7 +338,7 @@ def patch_asgi_app():
async def _sentry_patched_asgi_app(self, scope, receive, send):
# type: (Starlette, StarletteScope, Receive, Send) -> None
- integration = Hub.current.get_integration(StarletteIntegration)
+ integration = sentry_sdk.get_client().get_integration(StarletteIntegration)
if integration is None:
return await old_app(self, scope, receive, send)
@@ -392,38 +379,39 @@ def _sentry_request_response(func):
async def _sentry_async_func(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(StarletteIntegration)
+ integration = sentry_sdk.get_client().get_integration(
+ StarletteIntegration
+ )
if integration is None:
return await old_func(*args, **kwargs)
- with hub.configure_scope() as sentry_scope:
- request = args[0]
+ request = args[0]
- _set_transaction_name_and_source(
- sentry_scope, integration.transaction_style, request
- )
+ _set_transaction_name_and_source(
+ Scope.get_current_scope(), integration.transaction_style, request
+ )
- extractor = StarletteRequestExtractor(request)
- info = await extractor.extract_request_info()
+ sentry_scope = Scope.get_isolation_scope()
+ extractor = StarletteRequestExtractor(request)
+ info = await extractor.extract_request_info()
- def _make_request_event_processor(req, integration):
- # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
- def event_processor(event, hint):
- # type: (Event, Dict[str, Any]) -> Event
+ def _make_request_event_processor(req, integration):
+ # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+ def event_processor(event, hint):
+ # type: (Event, Dict[str, Any]) -> Event
- # Add info from request to event
- request_info = event.get("request", {})
- if info:
- if "cookies" in info:
- request_info["cookies"] = info["cookies"]
- if "data" in info:
- request_info["data"] = info["data"]
- event["request"] = deepcopy(request_info)
+ # Add info from request to event
+ request_info = event.get("request", {})
+ if info:
+ if "cookies" in info:
+ request_info["cookies"] = info["cookies"]
+ if "data" in info:
+ request_info["data"] = info["data"]
+ event["request"] = deepcopy(request_info)
- return event
+ return event
- return event_processor
+ return event_processor
sentry_scope._name = StarletteIntegration.identifier
sentry_scope.add_event_processor(
@@ -433,43 +421,44 @@ def event_processor(event, hint):
return await old_func(*args, **kwargs)
func = _sentry_async_func
+
else:
+ @ensure_integration_enabled(StarletteIntegration, old_func)
def _sentry_sync_func(*args, **kwargs):
# type: (*Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(StarletteIntegration)
- if integration is None:
- return old_func(*args, **kwargs)
+ integration = sentry_sdk.get_client().get_integration(
+ StarletteIntegration
+ )
+ sentry_scope = Scope.get_isolation_scope()
- with hub.configure_scope() as sentry_scope:
- if sentry_scope.profile is not None:
- sentry_scope.profile.update_active_thread_id()
+ if sentry_scope.profile is not None:
+ sentry_scope.profile.update_active_thread_id()
- request = args[0]
+ request = args[0]
- _set_transaction_name_and_source(
- sentry_scope, integration.transaction_style, request
- )
+ _set_transaction_name_and_source(
+ sentry_scope, integration.transaction_style, request
+ )
- extractor = StarletteRequestExtractor(request)
- cookies = extractor.extract_cookies_from_request()
+ extractor = StarletteRequestExtractor(request)
+ cookies = extractor.extract_cookies_from_request()
- def _make_request_event_processor(req, integration):
- # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
- def event_processor(event, hint):
- # type: (Event, dict[str, Any]) -> Event
+ def _make_request_event_processor(req, integration):
+ # type: (Any, Any) -> Callable[[Event, dict[str, Any]], Event]
+ def event_processor(event, hint):
+ # type: (Event, dict[str, Any]) -> Event
- # Extract information from request
- request_info = event.get("request", {})
- if cookies:
- request_info["cookies"] = cookies
+ # Extract information from request
+ request_info = event.get("request", {})
+ if cookies:
+ request_info["cookies"] = cookies
- event["request"] = deepcopy(request_info)
+ event["request"] = deepcopy(request_info)
- return event
+ return event
- return event_processor
+ return event_processor
sentry_scope._name = StarletteIntegration.identifier
sentry_scope.add_event_processor(
@@ -510,8 +499,7 @@ def _sentry_jinja2templates_init(self, *args, **kwargs):
# type: (Jinja2Templates, *Any, **Any) -> None
def add_sentry_trace_meta(request):
# type: (Request) -> Dict[str, Any]
- hub = Hub.current
- trace_meta = Markup(hub.trace_propagation_meta())
+ trace_meta = Markup(Scope.get_current_scope().trace_propagation_meta())
return {
"sentry_trace_meta": trace_meta,
}
@@ -540,27 +528,21 @@ def __init__(self, request):
def extract_cookies_from_request(self):
# type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
- client = Hub.current.client
- if client is None:
- return None
-
cookies = None # type: Optional[Dict[str, Any]]
- if _should_send_default_pii():
+ if should_send_default_pii():
cookies = self.cookies()
return cookies
async def extract_request_info(self):
# type: (StarletteRequestExtractor) -> Optional[Dict[str, Any]]
- client = Hub.current.client
- if client is None:
- return None
+ client = sentry_sdk.get_client()
request_info = {} # type: Dict[str, Any]
with capture_internal_exceptions():
# Add cookies
- if _should_send_default_pii():
+ if should_send_default_pii():
request_info["cookies"] = self.cookies()
# If there is no body, just return the cookies
@@ -585,7 +567,7 @@ async def extract_request_info(self):
form = await self.form()
if form:
form_data = {}
- for key, val in iteritems(form):
+ for key, val in form.items():
is_file = isinstance(val, UploadFile)
form_data[key] = (
val
@@ -651,7 +633,7 @@ def _transaction_name_from_router(scope):
def _set_transaction_name_and_source(scope, transaction_style, request):
- # type: (SentryScope, str, Any) -> None
+ # type: (Scope, str, Any) -> None
name = None
source = SOURCE_FOR_STYLE[transaction_style]
diff --git a/sentry_sdk/integrations/starlite.py b/sentry_sdk/integrations/starlite.py
index 070675c2e7..9ef7329fd9 100644
--- a/sentry_sdk/integrations/starlite.py
+++ b/sentry_sdk/integrations/starlite.py
@@ -1,12 +1,16 @@
from typing import TYPE_CHECKING
-from pydantic import BaseModel # type: ignore
+import sentry_sdk
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
from sentry_sdk.integrations import DidNotEnable, Integration
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
+from sentry_sdk.scope import Scope as SentryScope, should_send_default_pii
from sentry_sdk.tracing import SOURCE_FOR_STYLE, TRANSACTION_SOURCE_ROUTE
-from sentry_sdk.utils import event_from_exception, transaction_from_function
+from sentry_sdk.utils import (
+ ensure_integration_enabled,
+ event_from_exception,
+ transaction_from_function,
+)
try:
from starlite import Request, Starlite, State # type: ignore
@@ -15,17 +19,19 @@
from starlite.plugins.base import get_plugin_for_value # type: ignore
from starlite.routes.http import HTTPRoute # type: ignore
from starlite.utils import ConnectionDataExtractor, is_async_callable, Ref # type: ignore
+ from pydantic import BaseModel # type: ignore
if TYPE_CHECKING:
from typing import Any, Dict, List, Optional, Union
from starlite.types import ( # type: ignore
ASGIApp,
+ Hint,
HTTPReceiveMessage,
HTTPScope,
Message,
Middleware,
Receive,
- Scope,
+ Scope as StarliteScope,
Send,
WebSocketReceiveMessage,
)
@@ -114,51 +120,50 @@ def enable_span_for_middleware(middleware: "Middleware") -> "Middleware":
old_call = middleware.__call__
async def _create_span_call(
- self: "MiddlewareProtocol", scope: "Scope", receive: "Receive", send: "Send"
+ self: "MiddlewareProtocol",
+ scope: "StarliteScope",
+ receive: "Receive",
+ send: "Send",
) -> None:
- hub = Hub.current
- integration = hub.get_integration(StarliteIntegration)
- if integration is not None:
- middleware_name = self.__class__.__name__
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLITE, description=middleware_name
- ) as middleware_span:
- middleware_span.set_tag("starlite.middleware_name", middleware_name)
-
- # Creating spans for the "receive" callback
- async def _sentry_receive(
- *args: "Any", **kwargs: "Any"
- ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
- hub = Hub.current
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLITE_RECEIVE,
- description=getattr(receive, "__qualname__", str(receive)),
- ) as span:
- span.set_tag("starlite.middleware_name", middleware_name)
- return await receive(*args, **kwargs)
-
- receive_name = getattr(receive, "__name__", str(receive))
- receive_patched = receive_name == "_sentry_receive"
- new_receive = _sentry_receive if not receive_patched else receive
-
- # Creating spans for the "send" callback
- async def _sentry_send(message: "Message") -> None:
- hub = Hub.current
- with hub.start_span(
- op=OP.MIDDLEWARE_STARLITE_SEND,
- description=getattr(send, "__qualname__", str(send)),
- ) as span:
- span.set_tag("starlite.middleware_name", middleware_name)
- return await send(message)
-
- send_name = getattr(send, "__name__", str(send))
- send_patched = send_name == "_sentry_send"
- new_send = _sentry_send if not send_patched else send
-
- return await old_call(self, scope, new_receive, new_send)
- else:
+ if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
return await old_call(self, scope, receive, send)
+ middleware_name = self.__class__.__name__
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLITE, description=middleware_name
+ ) as middleware_span:
+ middleware_span.set_tag("starlite.middleware_name", middleware_name)
+
+ # Creating spans for the "receive" callback
+ async def _sentry_receive(
+ *args: "Any", **kwargs: "Any"
+ ) -> "Union[HTTPReceiveMessage, WebSocketReceiveMessage]":
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLITE_RECEIVE,
+ description=getattr(receive, "__qualname__", str(receive)),
+ ) as span:
+ span.set_tag("starlite.middleware_name", middleware_name)
+ return await receive(*args, **kwargs)
+
+ receive_name = getattr(receive, "__name__", str(receive))
+ receive_patched = receive_name == "_sentry_receive"
+ new_receive = _sentry_receive if not receive_patched else receive
+
+ # Creating spans for the "send" callback
+ async def _sentry_send(message: "Message") -> None:
+ with sentry_sdk.start_span(
+ op=OP.MIDDLEWARE_STARLITE_SEND,
+ description=getattr(send, "__qualname__", str(send)),
+ ) as span:
+ span.set_tag("starlite.middleware_name", middleware_name)
+ return await send(message)
+
+ send_name = getattr(send, "__name__", str(send))
+ send_patched = send_name == "_sentry_send"
+ new_send = _sentry_send if not send_patched else send
+
+ return await old_call(self, scope, new_receive, new_send)
+
not_yet_patched = old_call.__name__ not in ["_create_span_call"]
if not_yet_patched:
@@ -176,66 +181,64 @@ def patch_http_route_handle() -> None:
async def handle_wrapper(
self: "HTTPRoute", scope: "HTTPScope", receive: "Receive", send: "Send"
) -> None:
- hub = Hub.current
- integration: StarliteIntegration = hub.get_integration(StarliteIntegration)
- if integration is None:
+ if sentry_sdk.get_client().get_integration(StarliteIntegration) is None:
return await old_handle(self, scope, receive, send)
- with hub.configure_scope() as sentry_scope:
- request: "Request[Any, Any]" = scope["app"].request_class(
- scope=scope, receive=receive, send=send
+ sentry_scope = SentryScope.get_isolation_scope()
+ request: "Request[Any, Any]" = scope["app"].request_class(
+ scope=scope, receive=receive, send=send
+ )
+ extracted_request_data = ConnectionDataExtractor(
+ parse_body=True, parse_query=True
+ )(request)
+ body = extracted_request_data.pop("body")
+
+ request_data = await body
+
+ def event_processor(event: "Event", _: "Hint") -> "Event":
+ route_handler = scope.get("route_handler")
+
+ request_info = event.get("request", {})
+ request_info["content_length"] = len(scope.get("_body", b""))
+ if should_send_default_pii():
+ request_info["cookies"] = extracted_request_data["cookies"]
+ if request_data is not None:
+ request_info["data"] = request_data
+
+ func = None
+ if route_handler.name is not None:
+ tx_name = route_handler.name
+ elif isinstance(route_handler.fn, Ref):
+ func = route_handler.fn.value
+ else:
+ func = route_handler.fn
+ if func is not None:
+ tx_name = transaction_from_function(func)
+
+ tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
+
+ if not tx_name:
+ tx_name = _DEFAULT_TRANSACTION_NAME
+ tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
+
+ event.update(
+ {
+ "request": request_info,
+ "transaction": tx_name,
+ "transaction_info": tx_info,
+ }
)
- extracted_request_data = ConnectionDataExtractor(
- parse_body=True, parse_query=True
- )(request)
- body = extracted_request_data.pop("body")
-
- request_data = await body
-
- def event_processor(event: "Event", _: "Dict[str, Any]") -> "Event":
- route_handler = scope.get("route_handler")
-
- request_info = event.get("request", {})
- request_info["content_length"] = len(scope.get("_body", b""))
- if _should_send_default_pii():
- request_info["cookies"] = extracted_request_data["cookies"]
- if request_data is not None:
- request_info["data"] = request_data
-
- func = None
- if route_handler.name is not None:
- tx_name = route_handler.name
- elif isinstance(route_handler.fn, Ref):
- func = route_handler.fn.value
- else:
- func = route_handler.fn
- if func is not None:
- tx_name = transaction_from_function(func)
-
- tx_info = {"source": SOURCE_FOR_STYLE["endpoint"]}
-
- if not tx_name:
- tx_name = _DEFAULT_TRANSACTION_NAME
- tx_info = {"source": TRANSACTION_SOURCE_ROUTE}
-
- event.update(
- {
- "request": request_info,
- "transaction": tx_name,
- "transaction_info": tx_info,
- }
- )
- return event
-
- sentry_scope._name = StarliteIntegration.identifier
- sentry_scope.add_event_processor(event_processor)
+ return event
- return await old_handle(self, scope, receive, send)
+ sentry_scope._name = StarliteIntegration.identifier
+ sentry_scope.add_event_processor(event_processor)
+
+ return await old_handle(self, scope, receive, send)
HTTPRoute.handle = handle_wrapper
-def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
+def retrieve_user_from_scope(scope: "StarliteScope") -> "Optional[Dict[str, Any]]":
scope_user = scope.get("user", {})
if not scope_user:
return None
@@ -253,22 +256,19 @@ def retrieve_user_from_scope(scope: "Scope") -> "Optional[Dict[str, Any]]":
return None
-def exception_handler(exc: Exception, scope: "Scope", _: "State") -> None:
- hub = Hub.current
- if hub.get_integration(StarliteIntegration) is None:
- return
-
+@ensure_integration_enabled(StarliteIntegration)
+def exception_handler(exc: Exception, scope: "StarliteScope", _: "State") -> None:
user_info: "Optional[Dict[str, Any]]" = None
- if _should_send_default_pii():
+ if should_send_default_pii():
user_info = retrieve_user_from_scope(scope)
if user_info and isinstance(user_info, dict):
- with hub.configure_scope() as sentry_scope:
- sentry_scope.set_user(user_info)
+ sentry_scope = SentryScope.get_isolation_scope()
+ sentry_scope.set_user(user_info)
event, hint = event_from_exception(
exc,
- client_options=hub.client.options if hub.client else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": StarliteIntegration.identifier, "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
diff --git a/sentry_sdk/integrations/stdlib.py b/sentry_sdk/integrations/stdlib.py
index 0a17834a40..62899e9a1b 100644
--- a/sentry_sdk/integrations/stdlib.py
+++ b/sentry_sdk/integrations/stdlib.py
@@ -2,21 +2,22 @@
import subprocess
import sys
import platform
-from sentry_sdk.consts import OP, SPANDATA
+from http.client import HTTPConnection
-from sentry_sdk.hub import Hub
+import sentry_sdk
+from sentry_sdk.consts import OP, SPANDATA
from sentry_sdk.integrations import Integration
-from sentry_sdk.scope import add_global_event_processor
+from sentry_sdk.scope import Scope, add_global_event_processor
from sentry_sdk.tracing_utils import EnvironHeaders, should_propagate_trace
from sentry_sdk.utils import (
SENSITIVE_DATA_SUBSTITUTE,
capture_internal_exceptions,
+ ensure_integration_enabled,
is_sentry_url,
logger,
safe_repr,
parse_url,
)
-
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -29,12 +30,6 @@
from sentry_sdk._types import Event, Hint
-try:
- from httplib import HTTPConnection # type: ignore
-except ImportError:
- from http.client import HTTPConnection
-
-
_RUNTIME_CONTEXT = {
"name": platform.python_implementation(),
"version": "%s.%s.%s" % (sys.version_info[:3]),
@@ -54,7 +49,7 @@ def setup_once():
@add_global_event_processor
def add_python_runtime_context(event, hint):
# type: (Event, Hint) -> Optional[Event]
- if Hub.current.get_integration(StdlibIntegration) is not None:
+ if sentry_sdk.get_client().get_integration(StdlibIntegration) is not None:
contexts = event.setdefault("contexts", {})
if isinstance(contexts, dict) and "runtime" not in contexts:
contexts["runtime"] = _RUNTIME_CONTEXT
@@ -69,13 +64,14 @@ def _install_httplib():
def putrequest(self, method, url, *args, **kwargs):
# type: (HTTPConnection, str, str, *Any, **Any) -> Any
- hub = Hub.current
-
host = self.host
port = self.port
default_port = self.default_port
- if hub.get_integration(StdlibIntegration) is None or is_sentry_url(hub, host):
+ client = sentry_sdk.get_client()
+ if client.get_integration(StdlibIntegration) is None or is_sentry_url(
+ client, host
+ ):
return real_putrequest(self, method, url, *args, **kwargs)
real_url = url
@@ -91,7 +87,7 @@ def putrequest(self, method, url, *args, **kwargs):
with capture_internal_exceptions():
parsed_url = parse_url(real_url, sanitize=False)
- span = hub.start_span(
+ span = sentry_sdk.start_span(
op=OP.HTTP_CLIENT,
description="%s %s"
% (method, parsed_url.url if parsed_url else SENSITIVE_DATA_SUBSTITUTE),
@@ -105,8 +101,10 @@ def putrequest(self, method, url, *args, **kwargs):
rv = real_putrequest(self, method, url, *args, **kwargs)
- if should_propagate_trace(hub, real_url):
- for key, value in hub.iter_trace_propagation_headers(span):
+ if should_propagate_trace(client, real_url):
+ for key, value in Scope.get_current_scope().iter_trace_propagation_headers(
+ span=span
+ ):
logger.debug(
"[Tracing] Adding `{key}` header {value} to outgoing request to {real_url}.".format(
key=key, value=value, real_url=real_url
@@ -114,7 +112,7 @@ def putrequest(self, method, url, *args, **kwargs):
)
self.putheader(key, value)
- self._sentrysdk_span = span
+ self._sentrysdk_span = span # type: ignore[attr-defined]
return rv
@@ -133,8 +131,8 @@ def getresponse(self, *args, **kwargs):
return rv
- HTTPConnection.putrequest = putrequest
- HTTPConnection.getresponse = getresponse
+ HTTPConnection.putrequest = putrequest # type: ignore[method-assign]
+ HTTPConnection.getresponse = getresponse # type: ignore[method-assign]
def _init_argument(args, kwargs, name, position, setdefault_callback=None):
@@ -172,13 +170,9 @@ def _install_subprocess():
# type: () -> None
old_popen_init = subprocess.Popen.__init__
+ @ensure_integration_enabled(StdlibIntegration, old_popen_init)
def sentry_patched_popen_init(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> None
-
- hub = Hub.current
- if hub.get_integration(StdlibIntegration) is None:
- return old_popen_init(self, *a, **kw)
-
# Convert from tuple to list to be able to set values.
a = list(a)
@@ -203,8 +197,10 @@ def sentry_patched_popen_init(self, *a, **kw):
env = None
- with hub.start_span(op=OP.SUBPROCESS, description=description) as span:
- for k, v in hub.iter_trace_propagation_headers(span):
+ with sentry_sdk.start_span(op=OP.SUBPROCESS, description=description) as span:
+ for k, v in Scope.get_current_scope().iter_trace_propagation_headers(
+ span=span
+ ):
if env is None:
env = _init_argument(
a, kw, "env", 10, lambda x: dict(x or os.environ)
@@ -223,14 +219,10 @@ def sentry_patched_popen_init(self, *a, **kw):
old_popen_wait = subprocess.Popen.wait
+ @ensure_integration_enabled(StdlibIntegration, old_popen_wait)
def sentry_patched_popen_wait(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> Any
- hub = Hub.current
-
- if hub.get_integration(StdlibIntegration) is None:
- return old_popen_wait(self, *a, **kw)
-
- with hub.start_span(op=OP.SUBPROCESS_WAIT) as span:
+ with sentry_sdk.start_span(op=OP.SUBPROCESS_WAIT) as span:
span.set_tag("subprocess.pid", self.pid)
return old_popen_wait(self, *a, **kw)
@@ -238,14 +230,10 @@ def sentry_patched_popen_wait(self, *a, **kw):
old_popen_communicate = subprocess.Popen.communicate
+ @ensure_integration_enabled(StdlibIntegration, old_popen_communicate)
def sentry_patched_popen_communicate(self, *a, **kw):
# type: (subprocess.Popen[Any], *Any, **Any) -> Any
- hub = Hub.current
-
- if hub.get_integration(StdlibIntegration) is None:
- return old_popen_communicate(self, *a, **kw)
-
- with hub.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
+ with sentry_sdk.start_span(op=OP.SUBPROCESS_COMMUNICATE) as span:
span.set_tag("subprocess.pid", self.pid)
return old_popen_communicate(self, *a, **kw)
diff --git a/sentry_sdk/integrations/strawberry.py b/sentry_sdk/integrations/strawberry.py
index 5bc4184bee..024907ab7b 100644
--- a/sentry_sdk/integrations/strawberry.py
+++ b/sentry_sdk/integrations/strawberry.py
@@ -1,13 +1,14 @@
import hashlib
-from functools import cached_property
from inspect import isawaitable
-from sentry_sdk import configure_scope, start_span
+
+import sentry_sdk
from sentry_sdk.consts import OP
from sentry_sdk.integrations import Integration, DidNotEnable
from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import Scope, should_send_default_pii
from sentry_sdk.utils import (
capture_internal_exceptions,
+ ensure_integration_enabled,
event_from_exception,
logger,
package_version,
@@ -15,6 +16,15 @@
)
from sentry_sdk._types import TYPE_CHECKING
+try:
+ from functools import cached_property
+except ImportError:
+ # The strawberry integration requires Python 3.8+. functools.cached_property
+ # was added in 3.8, so this check is technically not needed, but since this
+ # is an auto-enabling integration, we might get to executing this import in
+ # lower Python versions, so we need to deal with it.
+ raise DidNotEnable("strawberry-graphql integration requires Python 3.8 or newer")
+
try:
import strawberry.schema.schema as strawberry_schema # type: ignore
from strawberry import Schema
@@ -74,12 +84,10 @@ def _patch_schema_init():
# type: () -> None
old_schema_init = Schema.__init__
+ @ensure_integration_enabled(StrawberryIntegration, old_schema_init)
def _sentry_patched_schema_init(self, *args, **kwargs):
# type: (Schema, Any, Any) -> None
- integration = Hub.current.get_integration(StrawberryIntegration)
- if integration is None:
- return old_schema_init(self, *args, **kwargs)
-
+ integration = sentry_sdk.get_client().get_integration(StrawberryIntegration)
extensions = kwargs.get("extensions") or []
if integration.async_execution is not None:
@@ -159,7 +167,7 @@ def on_operation(self):
if self._operation_name:
description += " {}".format(self._operation_name)
- Hub.current.add_breadcrumb(
+ sentry_sdk.add_breadcrumb(
category="graphql.operation",
data={
"operation_name": self._operation_name,
@@ -167,13 +175,11 @@ def on_operation(self):
},
)
- with configure_scope() as scope:
- if scope.span:
- self.graphql_span = scope.span.start_child(
- op=op, description=description
- )
- else:
- self.graphql_span = start_span(op=op, description=description)
+ scope = Scope.get_isolation_scope()
+ if scope.span:
+ self.graphql_span = scope.span.start_child(op=op, description=description)
+ else:
+ self.graphql_span = sentry_sdk.start_span(op=op, description=description)
self.graphql_span.set_data("graphql.operation.type", operation_type)
self.graphql_span.set_data("graphql.operation.name", self._operation_name)
@@ -261,37 +267,27 @@ def _patch_execute():
async def _sentry_patched_execute_async(*args, **kwargs):
# type: (Any, Any) -> ExecutionResult
- hub = Hub.current
- integration = hub.get_integration(StrawberryIntegration)
- if integration is None:
- return await old_execute_async(*args, **kwargs)
-
result = await old_execute_async(*args, **kwargs)
+ if sentry_sdk.get_client().get_integration(StrawberryIntegration) is None:
+ return result
+
if "execution_context" in kwargs and result.errors:
- with hub.configure_scope() as scope:
- event_processor = _make_request_event_processor(
- kwargs["execution_context"]
- )
- scope.add_event_processor(event_processor)
+ scope = Scope.get_isolation_scope()
+ event_processor = _make_request_event_processor(kwargs["execution_context"])
+ scope.add_event_processor(event_processor)
return result
+ @ensure_integration_enabled(StrawberryIntegration, old_execute_sync)
def _sentry_patched_execute_sync(*args, **kwargs):
# type: (Any, Any) -> ExecutionResult
- hub = Hub.current
- integration = hub.get_integration(StrawberryIntegration)
- if integration is None:
- return old_execute_sync(*args, **kwargs)
-
result = old_execute_sync(*args, **kwargs)
if "execution_context" in kwargs and result.errors:
- with hub.configure_scope() as scope:
- event_processor = _make_request_event_processor(
- kwargs["execution_context"]
- )
- scope.add_event_processor(event_processor)
+ scope = Scope.get_isolation_scope()
+ event_processor = _make_request_event_processor(kwargs["execution_context"])
+ scope.add_event_processor(event_processor)
return result
@@ -314,31 +310,27 @@ def _sentry_patched_sync_view_handle_errors(self, errors, response_data):
old_sync_view_handle_errors(self, errors, response_data)
_sentry_patched_handle_errors(self, errors, response_data)
+ @ensure_integration_enabled(StrawberryIntegration)
def _sentry_patched_handle_errors(self, errors, response_data):
# type: (Any, List[GraphQLError], GraphQLHTTPResponse) -> None
- hub = Hub.current
- integration = hub.get_integration(StrawberryIntegration)
- if integration is None:
- return
-
if not errors:
return
- with hub.configure_scope() as scope:
- event_processor = _make_response_event_processor(response_data)
- scope.add_event_processor(event_processor)
+ scope = Scope.get_isolation_scope()
+ event_processor = _make_response_event_processor(response_data)
+ scope.add_event_processor(event_processor)
with capture_internal_exceptions():
for error in errors:
event, hint = event_from_exception(
error,
- client_options=hub.client.options if hub.client else None,
+ client_options=sentry_sdk.get_client().options,
mechanism={
- "type": integration.identifier,
+ "type": StrawberryIntegration.identifier,
"handled": False,
},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
async_base_view.AsyncBaseHTTPView._handle_errors = (
_sentry_patched_async_view_handle_errors
@@ -354,7 +346,7 @@ def _make_request_event_processor(execution_context):
def inner(event, hint):
# type: (Event, dict[str, Any]) -> Event
with capture_internal_exceptions():
- if _should_send_default_pii():
+ if should_send_default_pii():
request_data = event.setdefault("request", {})
request_data["api_target"] = "graphql"
@@ -385,7 +377,7 @@ def _make_response_event_processor(response_data):
def inner(event, hint):
# type: (Event, dict[str, Any]) -> Event
with capture_internal_exceptions():
- if _should_send_default_pii():
+ if should_send_default_pii():
contexts = event.setdefault("contexts", {})
contexts["response"] = {"data": response_data}
diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py
index 499cf85e6d..63b6e13846 100644
--- a/sentry_sdk/integrations/threading.py
+++ b/sentry_sdk/integrations/threading.py
@@ -1,14 +1,18 @@
-from __future__ import absolute_import
-
import sys
from functools import wraps
from threading import Thread, current_thread
-from sentry_sdk import Hub
-from sentry_sdk._compat import reraise
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.integrations import Integration
-from sentry_sdk.utils import event_from_exception, capture_internal_exceptions
+from sentry_sdk.scope import Scope, use_isolation_scope, use_scope
+from sentry_sdk.utils import (
+ ensure_integration_enabled,
+ event_from_exception,
+ capture_internal_exceptions,
+ logger,
+ reraise,
+)
if TYPE_CHECKING:
from typing import Any
@@ -24,9 +28,21 @@
class ThreadingIntegration(Integration):
identifier = "threading"
- def __init__(self, propagate_hub=False):
- # type: (bool) -> None
- self.propagate_hub = propagate_hub
+ def __init__(self, propagate_hub=None, propagate_scope=True):
+ # type: (Optional[bool], bool) -> None
+ if propagate_hub is not None:
+ logger.warning(
+ "Deprecated: propagate_hub is deprecated. This will be removed in the future."
+ )
+
+ # Note: propagate_hub did not have any effect on propagation of scope data
+ # scope data was always propagated no matter what the value of propagate_hub was
+ # This is why the default for propagate_scope is True
+
+ self.propagate_scope = propagate_scope
+
+ if propagate_hub is not None:
+ self.propagate_scope = propagate_hub
@staticmethod
def setup_once():
@@ -34,60 +50,70 @@ def setup_once():
old_start = Thread.start
@wraps(old_start)
+ @ensure_integration_enabled(ThreadingIntegration, old_start)
def sentry_start(self, *a, **kw):
# type: (Thread, *Any, **Any) -> Any
- hub = Hub.current
- integration = hub.get_integration(ThreadingIntegration)
- if integration is not None:
- if not integration.propagate_hub:
- hub_ = None
- else:
- hub_ = Hub(hub)
- # Patching instance methods in `start()` creates a reference cycle if
- # done in a naive way. See
- # https://github.com/getsentry/sentry-python/pull/434
- #
- # In threading module, using current_thread API will access current thread instance
- # without holding it to avoid a reference cycle in an easier way.
- with capture_internal_exceptions():
- new_run = _wrap_run(hub_, getattr(self.run, "__func__", self.run))
- self.run = new_run # type: ignore
+ integration = sentry_sdk.get_client().get_integration(ThreadingIntegration)
+ if integration.propagate_scope:
+ isolation_scope = sentry_sdk.Scope.get_isolation_scope()
+ current_scope = sentry_sdk.Scope.get_current_scope()
+ else:
+ isolation_scope = None
+ current_scope = None
+
+ # Patching instance methods in `start()` creates a reference cycle if
+ # done in a naive way. See
+ # https://github.com/getsentry/sentry-python/pull/434
+ #
+ # In threading module, using current_thread API will access current thread instance
+ # without holding it to avoid a reference cycle in an easier way.
+ with capture_internal_exceptions():
+ new_run = _wrap_run(
+ isolation_scope,
+ current_scope,
+ getattr(self.run, "__func__", self.run),
+ )
+ self.run = new_run # type: ignore
return old_start(self, *a, **kw)
Thread.start = sentry_start # type: ignore
-def _wrap_run(parent_hub, old_run_func):
- # type: (Optional[Hub], F) -> F
+def _wrap_run(isolation_scope_to_use, current_scope_to_use, old_run_func):
+ # type: (Optional[Scope], Optional[Scope], F) -> F
@wraps(old_run_func)
def run(*a, **kw):
# type: (*Any, **Any) -> Any
- hub = parent_hub or Hub.current
- with hub:
+ def _run_old_run_func():
+ # type: () -> Any
try:
self = current_thread()
return old_run_func(self, *a, **kw)
except Exception:
reraise(*_capture_exception())
+ if isolation_scope_to_use is not None and current_scope_to_use is not None:
+ with use_isolation_scope(isolation_scope_to_use):
+ with use_scope(current_scope_to_use):
+ return _run_old_run_func()
+ else:
+ return _run_old_run_func()
+
return run # type: ignore
def _capture_exception():
# type: () -> ExcInfo
- hub = Hub.current
exc_info = sys.exc_info()
- if hub.get_integration(ThreadingIntegration) is not None:
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
+ client = sentry_sdk.get_client()
+ if client.get_integration(ThreadingIntegration) is not None:
event, hint = event_from_exception(
exc_info,
client_options=client.options,
mechanism={"type": "threading", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
return exc_info
diff --git a/sentry_sdk/integrations/tornado.py b/sentry_sdk/integrations/tornado.py
index c6f7700f12..6681037000 100644
--- a/sentry_sdk/integrations/tornado.py
+++ b/sentry_sdk/integrations/tornado.py
@@ -2,9 +2,10 @@
import contextlib
from inspect import iscoroutinefunction
+import sentry_sdk
from sentry_sdk.api import continue_trace
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.scope import should_send_default_pii
from sentry_sdk.tracing import (
TRANSACTION_SOURCE_COMPONENT,
TRANSACTION_SOURCE_ROUTE,
@@ -12,6 +13,7 @@
from sentry_sdk.utils import (
HAS_REAL_CONTEXTVARS,
CONTEXTVARS_ERROR_MESSAGE,
+ ensure_integration_enabled,
event_from_exception,
capture_internal_exceptions,
transaction_from_function,
@@ -23,7 +25,6 @@
_is_json_content_type,
)
from sentry_sdk.integrations.logging import ignore_logger
-from sentry_sdk._compat import iteritems
try:
from tornado import version_info as TORNADO_VERSION
@@ -50,8 +51,8 @@ class TornadoIntegration(Integration):
@staticmethod
def setup_once():
# type: () -> None
- if TORNADO_VERSION < (5, 0):
- raise DidNotEnable("Tornado 5+ required")
+ if TORNADO_VERSION < (6, 0):
+ raise DidNotEnable("Tornado 6.0+ required")
if not HAS_REAL_CONTEXTVARS:
# Tornado is async. We better have contextvars or we're going to leak
@@ -99,21 +100,19 @@ def sentry_log_exception(self, ty, value, tb, *args, **kwargs):
@contextlib.contextmanager
def _handle_request_impl(self):
# type: (RequestHandler) -> Generator[None, None, None]
- hub = Hub.current
- integration = hub.get_integration(TornadoIntegration)
+ integration = sentry_sdk.get_client().get_integration(TornadoIntegration)
if integration is None:
yield
weak_handler = weakref.ref(self)
- with Hub(hub) as hub:
+ with sentry_sdk.isolation_scope() as scope:
headers = self.request.headers
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
- processor = _make_event_processor(weak_handler)
- scope.add_event_processor(processor)
+ scope.clear_breadcrumbs()
+ processor = _make_event_processor(weak_handler)
+ scope.add_event_processor(processor)
transaction = continue_trace(
headers,
@@ -126,30 +125,25 @@ def _handle_request_impl(self):
source=TRANSACTION_SOURCE_ROUTE,
)
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction, custom_sampling_context={"tornado_request": self.request}
):
yield
+@ensure_integration_enabled(TornadoIntegration)
def _capture_exception(ty, value, tb):
# type: (type, BaseException, Any) -> None
- hub = Hub.current
- if hub.get_integration(TornadoIntegration) is None:
- return
if isinstance(value, HTTPError):
return
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
-
event, hint = event_from_exception(
(ty, value, tb),
- client_options=client.options,
+ client_options=sentry_sdk.get_client().options,
mechanism={"type": "tornado", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
def _make_event_processor(weak_handler):
@@ -185,7 +179,7 @@ def tornado_processor(event, hint):
request_info["headers"] = _filter_headers(dict(request.headers))
with capture_internal_exceptions():
- if handler.current_user and _should_send_default_pii():
+ if handler.current_user and should_send_default_pii():
event.setdefault("user", {}).setdefault("is_authenticated", True)
return event
@@ -202,7 +196,7 @@ def content_length(self):
def cookies(self):
# type: () -> Dict[str, str]
- return {k: v.value for k, v in iteritems(self.request.cookies)}
+ return {k: v.value for k, v in self.request.cookies.items()}
def raw_data(self):
# type: () -> bytes
@@ -212,7 +206,7 @@ def form(self):
# type: () -> Dict[str, Any]
return {
k: [v.decode("latin1", "replace") for v in vs]
- for k, vs in iteritems(self.request.body_arguments)
+ for k, vs in self.request.body_arguments.items()
}
def is_json(self):
@@ -221,7 +215,7 @@ def is_json(self):
def files(self):
# type: () -> Dict[str, Any]
- return {k: v[0] for k, v in iteritems(self.request.files) if v}
+ return {k: v[0] for k, v in self.request.files.items() if v}
def size_of_file(self, file):
# type: (Any) -> int
diff --git a/sentry_sdk/integrations/trytond.py b/sentry_sdk/integrations/trytond.py
index 6f1aff2f15..da8fc84df1 100644
--- a/sentry_sdk/integrations/trytond.py
+++ b/sentry_sdk/integrations/trytond.py
@@ -1,20 +1,16 @@
-import sentry_sdk.hub
-import sentry_sdk.utils
-import sentry_sdk.integrations
-import sentry_sdk.integrations.wsgi
-from sentry_sdk._types import TYPE_CHECKING
+import sentry_sdk
+from sentry_sdk.integrations import Integration
+from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
+from sentry_sdk.utils import ensure_integration_enabled, event_from_exception
from trytond.exceptions import TrytonException # type: ignore
from trytond.wsgi import app # type: ignore
-if TYPE_CHECKING:
- from typing import Any
-
# TODO: trytond-worker, trytond-cron and trytond-admin intergations
-class TrytondWSGIIntegration(sentry_sdk.integrations.Integration):
+class TrytondWSGIIntegration(Integration):
identifier = "trytond_wsgi"
def __init__(self): # type: () -> None
@@ -22,24 +18,20 @@ def __init__(self): # type: () -> None
@staticmethod
def setup_once(): # type: () -> None
- app.wsgi_app = sentry_sdk.integrations.wsgi.SentryWsgiMiddleware(app.wsgi_app)
+ app.wsgi_app = SentryWsgiMiddleware(app.wsgi_app)
+ @ensure_integration_enabled(TrytondWSGIIntegration)
def error_handler(e): # type: (Exception) -> None
- hub = sentry_sdk.hub.Hub.current
-
- if hub.get_integration(TrytondWSGIIntegration) is None:
- return
- elif isinstance(e, TrytonException):
+ if isinstance(e, TrytonException):
return
else:
- # If an integration is there, a client has to be there.
- client = hub.client # type: Any
- event, hint = sentry_sdk.utils.event_from_exception(
+ client = sentry_sdk.get_client()
+ event, hint = event_from_exception(
e,
client_options=client.options,
mechanism={"type": "trytond", "handled": False},
)
- hub.capture_event(event, hint=hint)
+ sentry_sdk.capture_event(event, hint=hint)
# Expected error handlers signature was changed
# when the error_handler decorator was introduced
diff --git a/sentry_sdk/integrations/wsgi.py b/sentry_sdk/integrations/wsgi.py
index e7fd0da66d..de6c3b8060 100644
--- a/sentry_sdk/integrations/wsgi.py
+++ b/sentry_sdk/integrations/wsgi.py
@@ -1,20 +1,24 @@
import sys
+from functools import partial
-from sentry_sdk._compat import PY2, reraise
-from sentry_sdk._functools import partial
+import sentry_sdk
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk._werkzeug import get_host, _get_headers
from sentry_sdk.api import continue_trace
from sentry_sdk.consts import OP
-from sentry_sdk.hub import Hub, _should_send_default_pii
+from sentry_sdk.hub import _should_send_default_pii
+from sentry_sdk.integrations._wsgi_common import _filter_headers
+from sentry_sdk.sessions import (
+ auto_session_tracking_scope as auto_session_tracking,
+) # When the Hub is removed, this should be renamed (see comment in sentry_sdk/sessions.py)
+from sentry_sdk.scope import use_isolation_scope
+from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
from sentry_sdk.utils import (
ContextVar,
capture_internal_exceptions,
event_from_exception,
+ reraise,
)
-from sentry_sdk.tracing import Transaction, TRANSACTION_SOURCE_ROUTE
-from sentry_sdk.sessions import auto_session_tracking
-from sentry_sdk.integrations._wsgi_common import _filter_headers
if TYPE_CHECKING:
from typing import Callable
@@ -42,17 +46,9 @@ def __call__(self, status, response_headers, exc_info=None): # type: ignore
_wsgi_middleware_applied = ContextVar("sentry_wsgi_middleware_applied")
-if PY2:
-
- def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
- # type: (str, str, str) -> str
- return s.decode(charset, errors)
-
-else:
-
- def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
- # type: (str, str, str) -> str
- return s.encode("latin1").decode(charset, errors)
+def wsgi_decoding_dance(s, charset="utf-8", errors="replace"):
+ # type: (str, str, str) -> str
+ return s.encode("latin1").decode(charset, errors)
def get_request_url(environ, use_x_forwarded_for=False):
@@ -66,7 +62,7 @@ def get_request_url(environ, use_x_forwarded_for=False):
)
-class SentryWsgiMiddleware(object):
+class SentryWsgiMiddleware:
__slots__ = ("app", "use_x_forwarded_for")
def __init__(self, app, use_x_forwarded_for=False):
@@ -81,18 +77,16 @@ def __call__(self, environ, start_response):
_wsgi_middleware_applied.set(True)
try:
- hub = Hub(Hub.current)
- with auto_session_tracking(hub, session_mode="request"):
- with hub:
+ with sentry_sdk.isolation_scope() as scope:
+ with auto_session_tracking(scope, session_mode="request"):
with capture_internal_exceptions():
- with hub.configure_scope() as scope:
- scope.clear_breadcrumbs()
- scope._name = "wsgi"
- scope.add_event_processor(
- _make_wsgi_event_processor(
- environ, self.use_x_forwarded_for
- )
+ scope.clear_breadcrumbs()
+ scope._name = "wsgi"
+ scope.add_event_processor(
+ _make_wsgi_event_processor(
+ environ, self.use_x_forwarded_for
)
+ )
transaction = continue_trace(
environ,
@@ -101,22 +95,22 @@ def __call__(self, environ, start_response):
source=TRANSACTION_SOURCE_ROUTE,
)
- with hub.start_transaction(
+ with sentry_sdk.start_transaction(
transaction, custom_sampling_context={"wsgi_environ": environ}
):
try:
- rv = self.app(
+ response = self.app(
environ,
partial(
_sentry_start_response, start_response, transaction
),
)
except BaseException:
- reraise(*_capture_exception(hub))
+ reraise(*_capture_exception())
finally:
_wsgi_middleware_applied.set(False)
- return _ScopedResponse(hub, rv)
+ return _ScopedResponse(scope, response)
def _sentry_start_response( # type: ignore
@@ -177,33 +171,44 @@ def get_client_ip(environ):
return environ.get("REMOTE_ADDR")
-def _capture_exception(hub):
- # type: (Hub) -> ExcInfo
+def _capture_exception():
+ # type: () -> ExcInfo
+ """
+ Captures the current exception and sends it to Sentry.
+ Returns the ExcInfo tuple to it can be reraised afterwards.
+ """
exc_info = sys.exc_info()
+ e = exc_info[1]
+
+ # SystemExit(0) is the only uncaught exception that is expected behavior
+ should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
+ if not should_skip_capture:
+ event, hint = event_from_exception(
+ exc_info,
+ client_options=sentry_sdk.get_client().options,
+ mechanism={"type": "wsgi", "handled": False},
+ )
+ sentry_sdk.capture_event(event, hint=hint)
- # Check client here as it might have been unset while streaming response
- if hub.client is not None:
- e = exc_info[1]
+ return exc_info
- # SystemExit(0) is the only uncaught exception that is expected behavior
- should_skip_capture = isinstance(e, SystemExit) and e.code in (0, None)
- if not should_skip_capture:
- event, hint = event_from_exception(
- exc_info,
- client_options=hub.client.options,
- mechanism={"type": "wsgi", "handled": False},
- )
- hub.capture_event(event, hint=hint)
- return exc_info
+class _ScopedResponse:
+ """
+ Users a separate scope for each response chunk.
+ This will make WSGI apps more tolerant against:
+ - WSGI servers streaming responses from a different thread/from
+ different threads than the one that called start_response
+ - close() not being called
+ - WSGI servers streaming responses interleaved from the same thread
+ """
-class _ScopedResponse(object):
- __slots__ = ("_response", "_hub")
+ __slots__ = ("_response", "_scope")
- def __init__(self, hub, response):
- # type: (Hub, Iterator[bytes]) -> None
- self._hub = hub
+ def __init__(self, scope, response):
+ # type: (sentry_sdk.scope.Scope, Iterator[bytes]) -> None
+ self._scope = scope
self._response = response
def __iter__(self):
@@ -211,25 +216,25 @@ def __iter__(self):
iterator = iter(self._response)
while True:
- with self._hub:
+ with use_isolation_scope(self._scope):
try:
chunk = next(iterator)
except StopIteration:
break
except BaseException:
- reraise(*_capture_exception(self._hub))
+ reraise(*_capture_exception())
yield chunk
def close(self):
# type: () -> None
- with self._hub:
+ with use_isolation_scope(self._scope):
try:
self._response.close() # type: ignore
except AttributeError:
pass
except BaseException:
- reraise(*_capture_exception(self._hub))
+ reraise(*_capture_exception())
def _make_wsgi_event_processor(environ, use_x_forwarded_for):
@@ -237,7 +242,7 @@ def _make_wsgi_event_processor(environ, use_x_forwarded_for):
# It's a bit unfortunate that we have to extract and parse the request data
# from the environ so eagerly, but there are a few good reasons for this.
#
- # We might be in a situation where the scope/hub never gets torn down
+ # We might be in a situation where the scope never gets torn down
# properly. In that case we will have an unnecessary strong reference to
# all objects in the environ (some of which may take a lot of memory) when
# we're really just interested in a few of them.
diff --git a/sentry_sdk/metrics.py b/sentry_sdk/metrics.py
index 1e4f5a532e..75ba24a6b6 100644
--- a/sentry_sdk/metrics.py
+++ b/sentry_sdk/metrics.py
@@ -6,12 +6,12 @@
import threading
import time
import zlib
+from abc import ABC, abstractmethod
from contextlib import contextmanager
-from datetime import datetime
+from datetime import datetime, timezone
from functools import wraps, partial
import sentry_sdk
-from sentry_sdk._compat import text_type, utc_from_timestamp, iteritems
from sentry_sdk.utils import (
ContextVar,
now,
@@ -68,28 +68,21 @@
_sanitize_unit = partial(re.compile(r"[^a-zA-Z0-9_]+").sub, "")
_sanitize_metric_key = partial(re.compile(r"[^a-zA-Z0-9_\-.]+").sub, "_")
_sanitize_tag_key = partial(re.compile(r"[^a-zA-Z0-9_\-.\/]+").sub, "")
-_TAG_VALUE_SANITIZATION_TABLE = {
- "\n": "\\n",
- "\r": "\\r",
- "\t": "\\t",
- "\\": "\\\\",
- "|": "\\u{7c}",
- ",": "\\u{2c}",
-}
def _sanitize_tag_value(value):
# type: (str) -> str
- return "".join(
- [
- (
- _TAG_VALUE_SANITIZATION_TABLE[char]
- if char in _TAG_VALUE_SANITIZATION_TABLE
- else char
- )
- for char in value
- ]
+ table = str.maketrans(
+ {
+ "\n": "\\n",
+ "\r": "\\r",
+ "\t": "\\t",
+ "\\": "\\\\",
+ "|": "\\u{7c}",
+ ",": "\\u{2c}",
+ }
)
+ return value.translate(table)
def get_code_location(stacklevel):
@@ -132,23 +125,29 @@ def new_func(*args, **kwargs):
return new_func
-class Metric(object):
+class Metric(ABC):
__slots__ = ()
+ @abstractmethod
+ def __init__(self, first):
+ # type: (MetricValue) -> None
+ pass
+
@property
+ @abstractmethod
def weight(self):
- # type: (...) -> int
- raise NotImplementedError()
+ # type: () -> int
+ pass
- def add(
- self, value # type: MetricValue
- ):
- # type: (...) -> None
- raise NotImplementedError()
+ @abstractmethod
+ def add(self, value):
+ # type: (MetricValue) -> None
+ pass
+ @abstractmethod
def serialize_value(self):
- # type: (...) -> Iterable[FlushedMetricValue]
- raise NotImplementedError()
+ # type: () -> Iterable[FlushedMetricValue]
+ pass
class CounterMetric(Metric):
@@ -285,13 +284,13 @@ def _encode_metrics(flushable_buckets):
out = io.BytesIO()
_write = out.write
- # Note on sanetization: we intentionally sanetize in emission (serialization)
+ # Note on sanitization: we intentionally sanitize in emission (serialization)
# and not during aggregation for performance reasons. This means that the
# envelope can in fact have duplicate buckets stored. This is acceptable for
# relay side emission and should not happen commonly.
for timestamp, buckets in flushable_buckets:
- for bucket_key, metric in iteritems(buckets):
+ for bucket_key, metric in buckets.items():
metric_type, metric_name, metric_unit, metric_tags = bucket_key
metric_name = _sanitize_metric_key(metric_name)
metric_unit = _sanitize_unit(metric_unit)
@@ -349,7 +348,7 @@ def _encode_locations(timestamp, code_locations):
"g": GaugeMetric,
"d": DistributionMetric,
"s": SetMetric,
-}
+} # type: dict[MetricType, type[Metric]]
# some of these are dumb
TIMING_FUNCTIONS = {
@@ -364,7 +363,7 @@ def _encode_locations(timestamp, code_locations):
}
-class LocalAggregator(object):
+class LocalAggregator:
__slots__ = ("_measurements",)
def __init__(self):
@@ -418,7 +417,7 @@ def to_json(self):
return rv
-class MetricsAggregator(object):
+class MetricsAggregator:
ROLLUP_IN_SECONDS = 10.0
MAX_WEIGHT = 100000
FLUSHER_SLEEP_TIME = 5.0
@@ -512,14 +511,14 @@ def _flushable_buckets(self):
self._force_flush = False
else:
flushable_buckets = []
- for buckets_timestamp, buckets in iteritems(self.buckets):
+ for buckets_timestamp, buckets in self.buckets.items():
# If the timestamp of the bucket is newer that the rollup we want to skip it.
if buckets_timestamp <= cutoff:
flushable_buckets.append((buckets_timestamp, buckets))
# We will clear the elements while holding the lock, in order to avoid requesting it downstream again.
for buckets_timestamp, buckets in flushable_buckets:
- for _, metric in iteritems(buckets):
+ for metric in buckets.values():
weight_to_remove += metric.weight
del self.buckets[buckets_timestamp]
@@ -604,7 +603,7 @@ def record_code_location(
if timestamp is None:
timestamp = time.time()
meta_key = (ty, key, unit)
- start_of_day = utc_from_timestamp(timestamp).replace(
+ start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace(
hour=0, minute=0, second=0, microsecond=0, tzinfo=None
)
start_of_day = int(to_timestamp(start_of_day))
@@ -620,7 +619,7 @@ def record_code_location(
)
@metrics_noop
- def need_code_loation(
+ def need_code_location(
self,
ty, # type: MetricType
key, # type: str
@@ -631,7 +630,7 @@ def need_code_loation(
if self._enable_code_locations:
return False
meta_key = (ty, key, unit)
- start_of_day = utc_from_timestamp(timestamp).replace(
+ start_of_day = datetime.fromtimestamp(timestamp, timezone.utc).replace(
hour=0, minute=0, second=0, microsecond=0, tzinfo=None
)
start_of_day = int(to_timestamp(start_of_day))
@@ -672,7 +671,7 @@ def _emit(
encoded_metrics = _encode_metrics(flushable_buckets)
envelope.add_item(Item(payload=encoded_metrics, type="statsd"))
- for timestamp, locations in iteritems(code_locations):
+ for timestamp, locations in code_locations.items():
encoded_locations = _encode_locations(timestamp, locations)
envelope.add_item(Item(payload=encoded_locations, type="metric_meta"))
@@ -690,14 +689,14 @@ def _serialize_tags(
return ()
rv = []
- for key, value in iteritems(tags):
+ for key, value in tags.items():
# If the value is a collection, we want to flatten it.
if isinstance(value, (list, tuple)):
for inner_value in value:
if inner_value is not None:
- rv.append((key, text_type(inner_value)))
+ rv.append((key, str(inner_value)))
elif value is not None:
- rv.append((key, text_type(value)))
+ rv.append((key, str(value)))
# It's very important to sort the tags in order to obtain the
# same bucket key.
@@ -741,7 +740,7 @@ def _get_aggregator_and_update_tags(key, value, unit, tags):
updated_tags.setdefault("release", client.options["release"])
updated_tags.setdefault("environment", client.options["environment"])
- scope = hub.scope
+ scope = sentry_sdk.Scope.get_current_scope()
local_aggregator = None
# We go with the low-level API here to access transaction information as
@@ -788,7 +787,7 @@ def increment(
incr = increment
-class _Timing(object):
+class _Timing:
def __init__(
self,
key, # type: str
diff --git a/sentry_sdk/monitor.py b/sentry_sdk/monitor.py
index 71ca5e6c31..f94e0d4e0d 100644
--- a/sentry_sdk/monitor.py
+++ b/sentry_sdk/monitor.py
@@ -13,7 +13,7 @@
MAX_DOWNSAMPLE_FACTOR = 10
-class Monitor(object):
+class Monitor:
"""
Performs health checks in a separate thread once every interval seconds
and updates the internal state. Other parts of the SDK only read this state
diff --git a/sentry_sdk/profiler.py b/sentry_sdk/profiler.py
index da5a4a8228..1da4202d07 100644
--- a/sentry_sdk/profiler.py
+++ b/sentry_sdk/profiler.py
@@ -33,10 +33,11 @@
import threading
import time
import uuid
+from abc import ABC, abstractmethod
from collections import deque
import sentry_sdk
-from sentry_sdk._compat import PY33, PY311
+from sentry_sdk._compat import PY311
from sentry_sdk._lru_cache import LRUCache
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.utils import (
@@ -161,8 +162,14 @@ def has_profiling_enabled(options):
return True
profiles_sample_rate = options["_experiments"].get("profiles_sample_rate")
- if profiles_sample_rate is not None and profiles_sample_rate > 0:
- return True
+ if profiles_sample_rate is not None:
+ logger.warning(
+ "_experiments['profiles_sample_rate'] is deprecated. "
+ "Please use the non-experimental profiles_sample_rate option "
+ "directly."
+ )
+ if profiles_sample_rate > 0:
+ return True
return False
@@ -175,10 +182,6 @@ def setup_profiler(options):
logger.debug("[Profiling] Profiler is already setup")
return False
- if not PY33:
- logger.warn("[Profiling] Profiler requires Python >= 3.3")
- return False
-
frequency = DEFAULT_SAMPLING_FREQUENCY
if is_gevent():
@@ -193,10 +196,13 @@ def setup_profiler(options):
if options.get("profiler_mode") is not None:
profiler_mode = options["profiler_mode"]
else:
- profiler_mode = (
- options.get("_experiments", {}).get("profiler_mode")
- or default_profiler_mode
- )
+ profiler_mode = options.get("_experiments", {}).get("profiler_mode")
+ if profiler_mode is not None:
+ logger.warning(
+ "_experiments['profiler_mode'] is deprecated. Please use the "
+ "non-experimental profiler_mode option directly."
+ )
+ profiler_mode = profiler_mode or default_profiler_mode
if (
profiler_mode == ThreadScheduler.mode
@@ -375,7 +381,7 @@ def get_frame_name(frame):
MAX_PROFILE_DURATION_NS = int(3e10) # 30 seconds
-class Profile(object):
+class Profile:
def __init__(
self,
transaction, # type: sentry_sdk.tracing.Transaction
@@ -455,11 +461,8 @@ def _set_initial_sampling_decision(self, sampling_context):
self.sampled = False
return
- hub = self.hub or sentry_sdk.Hub.current
- client = hub.client
-
- # The client is None, so we can't get the sample rate.
- if client is None:
+ client = sentry_sdk.Scope.get_client()
+ if not client.is_active():
self.sampled = False
return
@@ -522,18 +525,15 @@ def stop(self):
assert self.scheduler, "No scheduler specified"
logger.debug("[Profiling] Stopping profile")
self.active = False
- self.scheduler.stop_profiling(self)
self.stop_ns = nanosecond_time()
def __enter__(self):
# type: () -> Profile
- hub = self.hub or sentry_sdk.Hub.current
-
- _, scope = hub._stack[-1]
+ scope = sentry_sdk.scope.Scope.get_isolation_scope()
old_profile = scope.profile
scope.profile = self
- self._context_manager_state = (hub, scope, old_profile)
+ self._context_manager_state = (scope, old_profile)
self.start()
@@ -543,7 +543,7 @@ def __exit__(self, ty, value, tb):
# type: (Optional[Any], Optional[Any], Optional[Any]) -> None
self.stop()
- _, scope, old_profile = self._context_manager_state
+ scope, old_profile = self._context_manager_state
del self._context_manager_state
scope.profile = old_profile
@@ -665,9 +665,8 @@ def to_json(self, event_opt, options):
def valid(self):
# type: () -> bool
- hub = self.hub or sentry_sdk.Hub.current
- client = hub.client
- if client is None:
+ client = sentry_sdk.Scope.get_client()
+ if not client.is_active():
return False
if not has_profiling_enabled(client.options):
@@ -691,7 +690,7 @@ def valid(self):
return True
-class Scheduler(object):
+class Scheduler(ABC):
mode = "unknown" # type: ProfilerMode
def __init__(self, frequency):
@@ -713,27 +712,30 @@ def __exit__(self, ty, value, tb):
# type: (Optional[Any], Optional[Any], Optional[Any]) -> None
self.teardown()
+ @abstractmethod
def setup(self):
# type: () -> None
- raise NotImplementedError
+ pass
+ @abstractmethod
def teardown(self):
# type: () -> None
- raise NotImplementedError
+ pass
def ensure_running(self):
# type: () -> None
- raise NotImplementedError
+ """
+ Ensure the scheduler is running. By default, this method is a no-op.
+ The method should be overridden by any implementation for which it is
+ relevant.
+ """
+ return None
def start_profiling(self, profile):
# type: (Profile) -> None
self.ensure_running()
self.new_profiles.append(profile)
- def stop_profiling(self, profile):
- # type: (Profile) -> None
- pass
-
def make_sampler(self):
# type: () -> Callable[..., None]
cwd = os.getcwd()
@@ -817,7 +819,7 @@ class ThreadScheduler(Scheduler):
def __init__(self, frequency):
# type: (int) -> None
- super(ThreadScheduler, self).__init__(frequency=frequency)
+ super().__init__(frequency=frequency)
# used to signal to the thread that it should stop
self.running = False
@@ -917,7 +919,7 @@ def __init__(self, frequency):
if ThreadPool is None:
raise ValueError("Profiler mode: {} is not available".format(self.mode))
- super(GeventScheduler, self).__init__(frequency=frequency)
+ super().__init__(frequency=frequency)
# used to signal to the thread that it should stop
self.running = False
diff --git a/sentry_sdk/scope.py b/sentry_sdk/scope.py
index cd974e4a52..58686d56ef 100644
--- a/sentry_sdk/scope.py
+++ b/sentry_sdk/scope.py
@@ -1,21 +1,22 @@
+import os
+import sys
from copy import copy
from collections import deque
+from contextlib import contextmanager
+from enum import Enum
+from datetime import datetime, timezone
+from functools import wraps
from itertools import chain
-import os
-import sys
-import uuid
from sentry_sdk.attachments import Attachment
-from sentry_sdk._compat import datetime_utcnow
-from sentry_sdk.consts import FALSE_VALUES, INSTRUMENTER
-from sentry_sdk._functools import wraps
+from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, FALSE_VALUES, INSTRUMENTER
from sentry_sdk.profiler import Profile
from sentry_sdk.session import Session
from sentry_sdk.tracing_utils import (
Baggage,
- extract_sentrytrace_data,
has_tracing_enabled,
normalize_incoming_data,
+ PropagationContext,
)
from sentry_sdk.tracing import (
BAGGAGE_HEADER_NAME,
@@ -26,10 +27,11 @@
)
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.utils import (
+ capture_internal_exceptions,
+ ContextVar,
event_from_exception,
exc_info_from_error,
logger,
- capture_internal_exceptions,
)
if TYPE_CHECKING:
@@ -43,10 +45,13 @@
from typing import Iterator
from typing import List
from typing import Optional
+ from typing import ParamSpec
from typing import Tuple
from typing import TypeVar
from typing import Union
+ from typing_extensions import Unpack
+
from sentry_sdk._types import (
Breadcrumb,
BreadcrumbHint,
@@ -56,18 +61,69 @@
ExcInfo,
Hint,
LogLevelStr,
+ SamplingContext,
Type,
)
+ from sentry_sdk.tracing import TransactionKwargs
+
import sentry_sdk
+ P = ParamSpec("P")
+ R = TypeVar("R")
+
F = TypeVar("F", bound=Callable[..., Any])
T = TypeVar("T")
+# Holds data that will be added to **all** events sent by this process.
+# In case this is a http server (think web framework) with multiple users
+# the data will be added to events of all users.
+# Typically this is used for process wide data such as the release.
+_global_scope = None # type: Optional[Scope]
+
+# Holds data for the active request.
+# This is used to isolate data for different requests or users.
+# The isolation scope is usually created by integrations, but may also
+# be created manually
+_isolation_scope = ContextVar("isolation_scope", default=None)
+
+# Holds data for the active span.
+# This can be used to manually add additional data to a span.
+_current_scope = ContextVar("current_scope", default=None)
+
global_event_processors = [] # type: List[EventProcessor]
+class ScopeType(Enum):
+ CURRENT = "current"
+ ISOLATION = "isolation"
+ GLOBAL = "global"
+ MERGED = "merged"
+
+
+class _ScopeManager:
+ def __init__(self, hub=None):
+ # type: (Optional[Any]) -> None
+ self._old_scopes = [] # type: List[Scope]
+
+ def __enter__(self):
+ # type: () -> Scope
+ isolation_scope = Scope.get_isolation_scope()
+
+ self._old_scopes.append(isolation_scope)
+
+ forked_scope = isolation_scope.fork()
+ _isolation_scope.set(forked_scope)
+
+ return forked_scope
+
+ def __exit__(self, exc_type, exc_value, tb):
+ # type: (Any, Any, Any) -> None
+ old_scope = self._old_scopes.pop()
+ _isolation_scope.set(old_scope)
+
+
def add_global_event_processor(processor):
# type: (EventProcessor) -> None
global_event_processors.append(processor)
@@ -94,28 +150,6 @@ def wrapper(self, *args, **kwargs):
return wrapper # type: ignore
-def _merge_scopes(base, scope_change, scope_kwargs):
- # type: (Scope, Optional[Any], Dict[str, Any]) -> Scope
- if scope_change and scope_kwargs:
- raise TypeError("cannot provide scope and kwargs")
-
- if scope_change is not None:
- final_scope = copy(base)
- if callable(scope_change):
- scope_change(final_scope)
- else:
- final_scope.update_from_scope(scope_change)
-
- elif scope_kwargs:
- final_scope = copy(base)
- final_scope.update_from_kwargs(**scope_kwargs)
-
- else:
- final_scope = base
-
- return final_scope
-
-
class Scope(object):
"""The scope holds extra information that should be sent with all
events that belong to it.
@@ -149,21 +183,226 @@ class Scope(object):
"_force_auto_session_tracking",
"_profile",
"_propagation_context",
+ "client",
+ "_type",
)
- def __init__(self):
- # type: () -> None
+ def __init__(self, ty=None, client=None):
+ # type: (Optional[ScopeType], Optional[sentry_sdk.Client]) -> None
+ self._type = ty
+
self._event_processors = [] # type: List[EventProcessor]
self._error_processors = [] # type: List[ErrorProcessor]
self._name = None # type: Optional[str]
- self._propagation_context = None # type: Optional[Dict[str, Any]]
+ self._propagation_context = None # type: Optional[PropagationContext]
+
+ self.client = NonRecordingClient() # type: sentry_sdk.client.BaseClient
+
+ if client is not None:
+ self.set_client(client)
self.clear()
incoming_trace_information = self._load_trace_data_from_env()
self.generate_propagation_context(incoming_data=incoming_trace_information)
+ def __copy__(self):
+ # type: () -> Scope
+ """
+ Returns a copy of this scope.
+ This also creates a copy of all referenced data structures.
+ """
+ rv = object.__new__(self.__class__) # type: Scope
+
+ rv._type = self._type
+ rv._level = self._level
+ rv._name = self._name
+ rv._fingerprint = self._fingerprint
+ rv._transaction = self._transaction
+ rv._transaction_info = dict(self._transaction_info)
+ rv._user = self._user
+
+ rv._tags = dict(self._tags)
+ rv._contexts = dict(self._contexts)
+ rv._extras = dict(self._extras)
+
+ rv._breadcrumbs = copy(self._breadcrumbs)
+ rv._event_processors = list(self._event_processors)
+ rv._error_processors = list(self._error_processors)
+ rv._propagation_context = self._propagation_context
+
+ rv._should_capture = self._should_capture
+ rv._span = self._span
+ rv._session = self._session
+ rv._force_auto_session_tracking = self._force_auto_session_tracking
+ rv._attachments = list(self._attachments)
+
+ rv._profile = self._profile
+
+ return rv
+
+ @classmethod
+ def get_current_scope(cls):
+ # type: () -> Scope
+ """
+ .. versionadded:: 2.0.0
+
+ Returns the current scope.
+ """
+ current_scope = _current_scope.get()
+ if current_scope is None:
+ current_scope = Scope(ty=ScopeType.CURRENT)
+ _current_scope.set(current_scope)
+
+ return current_scope
+
+ @classmethod
+ def set_current_scope(cls, new_current_scope):
+ # type: (Scope) -> None
+ """
+ .. versionadded:: 2.0.0
+
+ Sets the given scope as the new current scope overwriting the existing current scope.
+ :param new_current_scope: The scope to set as the new current scope.
+ """
+ _current_scope.set(new_current_scope)
+
+ @classmethod
+ def get_isolation_scope(cls):
+ # type: () -> Scope
+ """
+ .. versionadded:: 2.0.0
+
+ Returns the isolation scope.
+ """
+ isolation_scope = _isolation_scope.get()
+ if isolation_scope is None:
+ isolation_scope = Scope(ty=ScopeType.ISOLATION)
+ _isolation_scope.set(isolation_scope)
+
+ return isolation_scope
+
+ @classmethod
+ def set_isolation_scope(cls, new_isolation_scope):
+ # type: (Scope) -> None
+ """
+ .. versionadded:: 2.0.0
+
+ Sets the given scope as the new isolation scope overwriting the existing isolation scope.
+ :param new_isolation_scope: The scope to set as the new isolation scope.
+ """
+ _isolation_scope.set(new_isolation_scope)
+
+ @classmethod
+ def get_global_scope(cls):
+ # type: () -> Scope
+ """
+ .. versionadded:: 2.0.0
+
+ Returns the global scope.
+ """
+ global _global_scope
+ if _global_scope is None:
+ _global_scope = Scope(ty=ScopeType.GLOBAL)
+
+ return _global_scope
+
+ def _merge_scopes(self, additional_scope=None, additional_scope_kwargs=None):
+ # type: (Optional[Scope], Optional[Dict[str, Any]]) -> Scope
+ """
+ Merges global, isolation and current scope into a new scope and
+ adds the given additional scope or additional scope kwargs to it.
+ """
+ if additional_scope and additional_scope_kwargs:
+ raise TypeError("cannot provide scope and kwargs")
+
+ final_scope = copy(_global_scope) if _global_scope is not None else Scope()
+ final_scope._type = ScopeType.MERGED
+
+ isolation_scope = _isolation_scope.get()
+ if isolation_scope is not None:
+ final_scope.update_from_scope(isolation_scope)
+
+ current_scope = _current_scope.get()
+ if current_scope is not None:
+ final_scope.update_from_scope(current_scope)
+
+ if self != current_scope and self != isolation_scope:
+ final_scope.update_from_scope(self)
+
+ if additional_scope is not None:
+ if callable(additional_scope):
+ additional_scope(final_scope)
+ else:
+ final_scope.update_from_scope(additional_scope)
+
+ elif additional_scope_kwargs:
+ final_scope.update_from_kwargs(**additional_scope_kwargs)
+
+ return final_scope
+
+ @classmethod
+ def get_client(cls):
+ # type: () -> sentry_sdk.client.BaseClient
+ """
+ .. versionadded:: 2.0.0
+
+ Returns the currently used :py:class:`sentry_sdk.Client`.
+ This checks the current scope, the isolation scope and the global scope for a client.
+ If no client is available a :py:class:`sentry_sdk.client.NonRecordingClient` is returned.
+ """
+ current_scope = _current_scope.get()
+ try:
+ client = current_scope.client
+ except AttributeError:
+ client = None
+
+ if client is not None and client.is_active():
+ return client
+
+ isolation_scope = _isolation_scope.get()
+ try:
+ client = isolation_scope.client
+ except AttributeError:
+ client = None
+
+ if client is not None and client.is_active():
+ return client
+
+ try:
+ client = _global_scope.client # type: ignore
+ except AttributeError:
+ client = None
+
+ if client is not None and client.is_active():
+ return client
+
+ return NonRecordingClient()
+
+ def set_client(self, client=None):
+ # type: (Optional[sentry_sdk.client.BaseClient]) -> None
+ """
+ .. versionadded:: 2.0.0
+
+ Sets the client for this scope.
+
+ :param client: The client to use in this scope.
+ If `None` the client of the scope will be replaced by a :py:class:`sentry_sdk.NonRecordingClient`.
+
+ """
+ self.client = client if client is not None else NonRecordingClient()
+
+ def fork(self):
+ # type: () -> Scope
+ """
+ .. versionadded:: 2.0.0
+
+ Returns a fork of this scope.
+ """
+ forked_scope = copy(self)
+ return forked_scope
+
def _load_trace_data_from_env(self):
# type: () -> Optional[Dict[str, str]]
"""
@@ -191,76 +430,28 @@ def _load_trace_data_from_env(self):
return incoming_trace_information or None
- def _extract_propagation_context(self, data):
- # type: (Dict[str, Any]) -> Optional[Dict[str, Any]]
- context = {} # type: Dict[str, Any]
- normalized_data = normalize_incoming_data(data)
-
- baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME)
- if baggage_header:
- context["dynamic_sampling_context"] = Baggage.from_incoming_header(
- baggage_header
- ).dynamic_sampling_context()
-
- sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME)
- if sentry_trace_header:
- sentrytrace_data = extract_sentrytrace_data(sentry_trace_header)
- if sentrytrace_data is not None:
- context.update(sentrytrace_data)
-
- only_baggage_no_sentry_trace = (
- "dynamic_sampling_context" in context and "trace_id" not in context
- )
- if only_baggage_no_sentry_trace:
- context.update(self._create_new_propagation_context())
-
- if context:
- if not context.get("span_id"):
- context["span_id"] = uuid.uuid4().hex[16:]
-
- return context
-
- return None
-
- def _create_new_propagation_context(self):
- # type: () -> Dict[str, Any]
- return {
- "trace_id": uuid.uuid4().hex,
- "span_id": uuid.uuid4().hex[16:],
- "parent_span_id": None,
- "dynamic_sampling_context": None,
- }
-
def set_new_propagation_context(self):
# type: () -> None
"""
Creates a new propagation context and sets it as `_propagation_context`. Overwriting existing one.
"""
- self._propagation_context = self._create_new_propagation_context()
- logger.debug(
- "[Tracing] Create new propagation context: %s",
- self._propagation_context,
- )
+ self._propagation_context = PropagationContext()
def generate_propagation_context(self, incoming_data=None):
# type: (Optional[Dict[str, str]]) -> None
"""
- Makes sure `_propagation_context` is set.
- If there is `incoming_data` overwrite existing `_propagation_context`.
- if there is no `incoming_data` create new `_propagation_context`, but do NOT overwrite if already existing.
+ Makes sure the propagation context is set on the scope.
+ If there is `incoming_data` overwrite existing propagation context.
+ If there is no `incoming_data` create new propagation context, but do NOT overwrite if already existing.
"""
if incoming_data:
- context = self._extract_propagation_context(incoming_data)
+ propagation_context = PropagationContext.from_incoming_data(incoming_data)
+ if propagation_context is not None:
+ self._propagation_context = propagation_context
- if context is not None:
- self._propagation_context = context
- logger.debug(
- "[Tracing] Extracted propagation context from incoming data: %s",
- self._propagation_context,
- )
-
- if self._propagation_context is None:
- self.set_new_propagation_context()
+ if self._type != ScopeType.CURRENT:
+ if self._propagation_context is None:
+ self.set_new_propagation_context()
def get_dynamic_sampling_context(self):
# type: () -> Optional[Dict[str, str]]
@@ -273,11 +464,11 @@ def get_dynamic_sampling_context(self):
baggage = self.get_baggage()
if baggage is not None:
- self._propagation_context["dynamic_sampling_context"] = (
+ self._propagation_context.dynamic_sampling_context = (
baggage.dynamic_sampling_context()
)
- return self._propagation_context["dynamic_sampling_context"]
+ return self._propagation_context.dynamic_sampling_context
def get_traceparent(self, *args, **kwargs):
# type: (Any, Any) -> Optional[str]
@@ -285,47 +476,47 @@ def get_traceparent(self, *args, **kwargs):
Returns the Sentry "sentry-trace" header (aka the traceparent) from the
currently active span or the scopes Propagation Context.
"""
- client = kwargs.pop("client", None)
+ client = Scope.get_client()
# If we have an active span, return traceparent from there
- if (
- client is not None
- and has_tracing_enabled(client.options)
- and self.span is not None
- ):
+ if has_tracing_enabled(client.options) and self.span is not None:
return self.span.to_traceparent()
- if self._propagation_context is None:
- return None
+ # If this scope has a propagation context, return traceparent from there
+ if self._propagation_context is not None:
+ traceparent = "%s-%s" % (
+ self._propagation_context.trace_id,
+ self._propagation_context.span_id,
+ )
+ return traceparent
- traceparent = "%s-%s" % (
- self._propagation_context["trace_id"],
- self._propagation_context["span_id"],
- )
- return traceparent
+ # Fall back to isolation scope's traceparent. It always has one
+ return Scope.get_isolation_scope().get_traceparent()
def get_baggage(self, *args, **kwargs):
# type: (Any, Any) -> Optional[Baggage]
- client = kwargs.pop("client", None)
+ """
+ Returns the Sentry "baggage" header containing trace information from the
+ currently active span or the scopes Propagation Context.
+ """
+ client = Scope.get_client()
# If we have an active span, return baggage from there
- if (
- client is not None
- and has_tracing_enabled(client.options)
- and self.span is not None
- ):
+ if has_tracing_enabled(client.options) and self.span is not None:
return self.span.to_baggage()
- if self._propagation_context is None:
- return None
+ # If this scope has a propagation context, return baggage from there
+ if self._propagation_context is not None:
+ dynamic_sampling_context = (
+ self._propagation_context.dynamic_sampling_context
+ )
+ if dynamic_sampling_context is None:
+ return Baggage.from_options(self)
+ else:
+ return Baggage(dynamic_sampling_context)
- dynamic_sampling_context = self._propagation_context.get(
- "dynamic_sampling_context"
- )
- if dynamic_sampling_context is None:
- return Baggage.from_options(self)
- else:
- return Baggage(dynamic_sampling_context)
+ # Fall back to isolation scope's baggage. It always has one
+ return Scope.get_isolation_scope().get_baggage()
def get_trace_context(self):
# type: () -> Any
@@ -336,9 +527,9 @@ def get_trace_context(self):
return None
trace_context = {
- "trace_id": self._propagation_context["trace_id"],
- "span_id": self._propagation_context["span_id"],
- "parent_span_id": self._propagation_context["parent_span_id"],
+ "trace_id": self._propagation_context.trace_id,
+ "span_id": self._propagation_context.span_id,
+ "parent_span_id": self._propagation_context.parent_span_id,
"dynamic_sampling_context": self.get_dynamic_sampling_context(),
} # type: Dict[str, Any]
@@ -356,18 +547,16 @@ def trace_propagation_meta(self, *args, **kwargs):
"The parameter `span` in trace_propagation_meta() is deprecated and will be removed in the future."
)
- client = kwargs.pop("client", None)
-
meta = ""
- sentry_trace = self.get_traceparent(client=client)
+ sentry_trace = self.get_traceparent()
if sentry_trace is not None:
meta += '' % (
SENTRY_TRACE_HEADER_NAME,
sentry_trace,
)
- baggage = self.get_baggage(client=client)
+ baggage = self.get_baggage()
if baggage is not None:
meta += '' % (
BAGGAGE_HEADER_NAME,
@@ -398,21 +587,49 @@ def iter_trace_propagation_headers(self, *args, **kwargs):
from the span representing the request, if available, or the current
span on the scope if not.
"""
- span = kwargs.pop("span", None)
- client = kwargs.pop("client", None)
-
- propagate_traces = client and client.options["propagate_traces"]
- if not propagate_traces:
+ client = Scope.get_client()
+ if not client.options.get("propagate_traces"):
return
+ span = kwargs.pop("span", None)
span = span or self.span
- if client and has_tracing_enabled(client.options) and span is not None:
+ if has_tracing_enabled(client.options) and span is not None:
for header in span.iter_headers():
yield header
else:
- for header in self.iter_headers():
- yield header
+ # If this scope has a propagation context, return headers from there
+ # (it could be that self is not the current scope nor the isolation scope)
+ if self._propagation_context is not None:
+ for header in self.iter_headers():
+ yield header
+ else:
+ # otherwise try headers from current scope
+ current_scope = Scope.get_current_scope()
+ if current_scope._propagation_context is not None:
+ for header in current_scope.iter_headers():
+ yield header
+ else:
+ # otherwise fall back to headers from isolation scope
+ isolation_scope = Scope.get_isolation_scope()
+ if isolation_scope._propagation_context is not None:
+ for header in isolation_scope.iter_headers():
+ yield header
+
+ def get_active_propagation_context(self):
+ # type: () -> Optional[PropagationContext]
+ if self._propagation_context is not None:
+ return self._propagation_context
+
+ current_scope = Scope.get_current_scope()
+ if current_scope._propagation_context is not None:
+ return current_scope._propagation_context
+
+ isolation_scope = Scope.get_isolation_scope()
+ if isolation_scope._propagation_context is not None:
+ return isolation_scope._propagation_context
+
+ return None
def clear(self):
# type: () -> None
@@ -429,7 +646,7 @@ def clear(self):
self._attachments = [] # type: List[Attachment]
self.clear_breadcrumbs()
- self._should_capture = True
+ self._should_capture = True # type: bool
self._span = None # type: Optional[Span]
self._session = None # type: Optional[Session]
@@ -537,8 +754,9 @@ def set_user(self, value):
# type: (Optional[Dict[str, Any]]) -> None
"""Sets a user for the scope."""
self._user = value
- if self._session is not None:
- self._session.update(user=value)
+ session = Scope.get_isolation_scope()._session
+ if session is not None:
+ session.update(user=value)
@property
def span(self):
@@ -659,12 +877,14 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
:param hint: An optional value that can be used by `before_breadcrumb`
to customize the breadcrumbs that are emitted.
"""
- client = kwargs.pop("client", None)
- if client is None:
+ client = Scope.get_client()
+
+ if not client.is_active():
+ logger.info("Dropped breadcrumb because no client bound")
return
before_breadcrumb = client.options.get("before_breadcrumb")
- max_breadcrumbs = client.options.get("max_breadcrumbs")
+ max_breadcrumbs = client.options.get("max_breadcrumbs", DEFAULT_MAX_BREADCRUMBS)
crumb = dict(crumb or ()) # type: Breadcrumb
crumb.update(kwargs)
@@ -674,7 +894,7 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
hint = dict(hint or ()) # type: Hint
if crumb.get("timestamp") is None:
- crumb["timestamp"] = datetime_utcnow()
+ crumb["timestamp"] = datetime.now(timezone.utc)
if crumb.get("type") is None:
crumb["type"] = "default"
@@ -692,9 +912,13 @@ def add_breadcrumb(self, crumb=None, hint=None, **kwargs):
self._breadcrumbs.popleft()
def start_transaction(
- self, transaction=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs
+ self,
+ transaction=None,
+ instrumenter=INSTRUMENTER.SENTRY,
+ custom_sampling_context=None,
+ **kwargs
):
- # type: (Optional[Transaction], str, Any) -> Union[Transaction, NoOpSpan]
+ # type: (Optional[Transaction], str, Optional[SamplingContext], Unpack[TransactionKwargs]) -> Union[Transaction, NoOpSpan]
"""
Start and return a transaction.
@@ -717,22 +941,32 @@ def start_transaction(
When the transaction is finished, it will be sent to Sentry with all its
finished child spans.
- For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Transaction`.
+ :param transaction: The transaction to start. If omitted, we create and
+ start a new transaction.
+ :param instrumenter: This parameter is meant for internal use only.
+ :param custom_sampling_context: The transaction's custom sampling context.
+ :param kwargs: Optional keyword arguments to be passed to the Transaction
+ constructor. See :py:class:`sentry_sdk.tracing.Transaction` for
+ available arguments.
"""
- hub = kwargs.pop("hub", None)
- client = kwargs.pop("client", None)
+ kwargs.setdefault("scope", self)
+
+ client = Scope.get_client()
- configuration_instrumenter = client and client.options["instrumenter"]
+ configuration_instrumenter = client.options["instrumenter"]
if instrumenter != configuration_instrumenter:
return NoOpSpan()
- custom_sampling_context = kwargs.pop("custom_sampling_context", {})
+ custom_sampling_context = custom_sampling_context or {}
+
+ # kwargs at this point has type TransactionKwargs, since we have removed
+ # the client and custom_sampling_context from it.
+ transaction_kwargs = kwargs # type: TransactionKwargs
# if we haven't been given a transaction, make one
if transaction is None:
- kwargs.setdefault("hub", hub)
- transaction = Transaction(**kwargs)
+ transaction = Transaction(**transaction_kwargs)
# use traces_sample_rate, traces_sampler, and/or inheritance to make a
# sampling decision
@@ -743,21 +977,19 @@ def start_transaction(
sampling_context.update(custom_sampling_context)
transaction._set_initial_sampling_decision(sampling_context=sampling_context)
- profile = Profile(transaction, hub=hub)
+ profile = Profile(transaction)
profile._set_initial_sampling_decision(sampling_context=sampling_context)
# we don't bother to keep spans if we already know we're not going to
# send the transaction
if transaction.sampled:
- max_spans = (
- client and client.options["_experiments"].get("max_spans")
- ) or 1000
+ max_spans = (client.options["_experiments"].get("max_spans")) or 1000
transaction.init_span_recorder(maxlen=max_spans)
return transaction
- def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
- # type: (Optional[Span], str, Any) -> Span
+ def start_span(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
+ # type: (str, Any) -> Span
"""
Start a span whose parent is the currently active span or transaction, if any.
@@ -773,56 +1005,32 @@ def start_span(self, span=None, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
For supported `**kwargs` see :py:class:`sentry_sdk.tracing.Span`.
"""
- client = kwargs.get("client", None)
+ with new_scope():
+ kwargs.setdefault("scope", self)
- configuration_instrumenter = client and client.options["instrumenter"]
+ client = Scope.get_client()
- if instrumenter != configuration_instrumenter:
- return NoOpSpan()
+ configuration_instrumenter = client.options["instrumenter"]
- # THIS BLOCK IS DEPRECATED
- # TODO: consider removing this in a future release.
- # This is for backwards compatibility with releases before
- # start_transaction existed, to allow for a smoother transition.
- if isinstance(span, Transaction) or "transaction" in kwargs:
- deprecation_msg = (
- "Deprecated: use start_transaction to start transactions and "
- "Transaction.start_child to start spans."
- )
+ if instrumenter != configuration_instrumenter:
+ return NoOpSpan()
- if isinstance(span, Transaction):
- logger.warning(deprecation_msg)
- return self.start_transaction(span, **kwargs)
+ # get current span or transaction
+ span = self.span or Scope.get_isolation_scope().span
- if "transaction" in kwargs:
- logger.warning(deprecation_msg)
- name = kwargs.pop("transaction")
- return self.start_transaction(name=name, **kwargs)
+ if span is None:
+ # New spans get the `trace_id` from the scope
+ if "trace_id" not in kwargs:
+ propagation_context = self.get_active_propagation_context()
+ if propagation_context is not None:
+ kwargs["trace_id"] = propagation_context.trace_id
- # THIS BLOCK IS DEPRECATED
- # We do not pass a span into start_span in our code base, so I deprecate this.
- if span is not None:
- deprecation_msg = "Deprecated: passing a span into `start_span` is deprecated and will be removed in the future."
- logger.warning(deprecation_msg)
- return span
-
- kwargs.pop("client")
-
- active_span = self.span
- if active_span is not None:
- new_child_span = active_span.start_child(**kwargs)
- return new_child_span
-
- # If there is already a trace_id in the propagation context, use it.
- # This does not need to be done for `start_child` above because it takes
- # the trace_id from the parent span.
- if "trace_id" not in kwargs:
- traceparent = self.get_traceparent()
- trace_id = traceparent.split("-")[0] if traceparent else None
- if trace_id is not None:
- kwargs["trace_id"] = trace_id
+ span = Span(**kwargs)
+ else:
+ # Children take `trace_id`` from the parent span.
+ span = span.start_child(**kwargs)
- return Span(**kwargs)
+ return span
def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
# type: (Dict[str, Any], Optional[str], Optional[str], Optional[str]) -> Transaction
@@ -840,19 +1048,17 @@ def continue_trace(self, environ_or_headers, op=None, name=None, source=None):
return transaction
- def capture_event(self, event, hint=None, client=None, scope=None, **scope_kwargs):
- # type: (Event, Optional[Hint], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+ def capture_event(self, event, hint=None, scope=None, **scope_kwargs):
+ # type: (Event, Optional[Hint], Optional[Scope], Any) -> Optional[str]
"""
Captures an event.
- Merges given scope data and calls :py:meth:`sentry_sdk.Client.capture_event`.
+ Merges given scope data and calls :py:meth:`sentry_sdk.client._Client.capture_event`.
:param event: A ready-made event that can be directly sent to Sentry.
:param hint: Contains metadata about the event that can be read from `before_send`, such as the original exception object or a HTTP request object.
- :param client: The client to use for sending the event to Sentry.
-
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
@@ -860,19 +1066,14 @@ def capture_event(self, event, hint=None, client=None, scope=None, **scope_kwarg
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
- :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
"""
- if client is None:
- return None
-
- scope = _merge_scopes(self, scope, scope_kwargs)
+ scope = self._merge_scopes(scope, scope_kwargs)
- return client.capture_event(event=event, hint=hint, scope=scope)
+ return Scope.get_client().capture_event(event=event, hint=hint, scope=scope)
- def capture_message(
- self, message, level=None, client=None, scope=None, **scope_kwargs
- ):
- # type: (str, Optional[LogLevelStr], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+ def capture_message(self, message, level=None, scope=None, **scope_kwargs):
+ # type: (str, Optional[LogLevelStr], Optional[Scope], Any) -> Optional[str]
"""
Captures a message.
@@ -880,8 +1081,6 @@ def capture_message(
:param level: If no level is provided, the default level is `info`.
- :param client: The client to use for sending the event to Sentry.
-
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
@@ -889,11 +1088,8 @@ def capture_message(
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
- :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
"""
- if client is None:
- return None
-
if level is None:
level = "info"
@@ -902,16 +1098,14 @@ def capture_message(
"level": level,
} # type: Event
- return self.capture_event(event, client=client, scope=scope, **scope_kwargs)
+ return self.capture_event(event, scope=scope, **scope_kwargs)
- def capture_exception(self, error=None, client=None, scope=None, **scope_kwargs):
- # type: (Optional[Union[BaseException, ExcInfo]], Optional[sentry_sdk.Client], Optional[Scope], Any) -> Optional[str]
+ def capture_exception(self, error=None, scope=None, **scope_kwargs):
+ # type: (Optional[Union[BaseException, ExcInfo]], Optional[Scope], Any) -> Optional[str]
"""Captures an exception.
:param error: An exception to capture. If `None`, `sys.exc_info()` will be used.
- :param client: The client to use for sending the event to Sentry.
-
:param scope: An optional :py:class:`sentry_sdk.Scope` to apply to events.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
@@ -919,22 +1113,19 @@ def capture_exception(self, error=None, client=None, scope=None, **scope_kwargs)
For supported `**scope_kwargs` see :py:meth:`sentry_sdk.Scope.update_from_kwargs`.
The `scope` and `scope_kwargs` parameters are mutually exclusive.
- :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.Client.capture_event`).
+ :returns: An `event_id` if the SDK decided to send the event (see :py:meth:`sentry_sdk.client._Client.capture_event`).
"""
- if client is None:
- return None
-
if error is not None:
exc_info = exc_info_from_error(error)
else:
exc_info = sys.exc_info()
- event, hint = event_from_exception(exc_info, client_options=client.options)
+ event, hint = event_from_exception(
+ exc_info, client_options=Scope.get_client().options
+ )
try:
- return self.capture_event(
- event, hint=hint, client=client, scope=scope, **scope_kwargs
- )
+ return self.capture_event(event, hint=hint, scope=scope, **scope_kwargs)
except Exception:
self._capture_internal_exception(sys.exc_info())
@@ -955,14 +1146,14 @@ def _capture_internal_exception(
def start_session(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Starts a new session."""
- client = kwargs.pop("client", None)
session_mode = kwargs.pop("session_mode", "application")
- self.end_session(client=client)
+ self.end_session()
+ client = Scope.get_client()
self._session = Session(
- release=client.options["release"] if client else None,
- environment=client.options["environment"] if client else None,
+ release=client.options.get("release"),
+ environment=client.options.get("environment"),
user=self._user,
session_mode=session_mode,
)
@@ -970,15 +1161,12 @@ def start_session(self, *args, **kwargs):
def end_session(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Ends the current session if there is one."""
- client = kwargs.pop("client", None)
-
session = self._session
self._session = None
if session is not None:
session.close()
- if client is not None:
- client.capture_session(session)
+ Scope.get_client().capture_session(session)
def stop_auto_session_tracking(self, *args, **kwargs):
# type: (*Any, **Any) -> None
@@ -987,10 +1175,7 @@ def stop_auto_session_tracking(self, *args, **kwargs):
This temporarily session tracking for the current scope when called.
To resume session tracking call `resume_auto_session_tracking`.
"""
- client = kwargs.pop("client", None)
-
- self.end_session(client=client)
-
+ self.end_session()
self._force_auto_session_tracking = False
def resume_auto_session_tracking(self):
@@ -1101,16 +1286,62 @@ def _apply_contexts_to_event(self, event, hint, options):
else:
contexts["trace"] = self.get_trace_context()
- # Add "reply_id" context
- try:
- replay_id = contexts["trace"]["dynamic_sampling_context"]["replay_id"] # type: ignore
- except (KeyError, TypeError):
- replay_id = None
+ def _drop(self, cause, ty):
+ # type: (Any, str) -> Optional[Any]
+ logger.info("%s (%s) dropped event", ty, cause)
+ return None
- if replay_id is not None:
- contexts["replay"] = {
- "replay_id": replay_id,
- }
+ def run_error_processors(self, event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ """
+ Runs the error processors on the event and returns the modified event.
+ """
+ exc_info = hint.get("exc_info")
+ if exc_info is not None:
+ error_processors = chain(
+ Scope.get_global_scope()._error_processors,
+ Scope.get_isolation_scope()._error_processors,
+ Scope.get_current_scope()._error_processors,
+ )
+
+ for error_processor in error_processors:
+ new_event = error_processor(event, exc_info)
+ if new_event is None:
+ return self._drop(error_processor, "error processor")
+
+ event = new_event
+
+ return event
+
+ def run_event_processors(self, event, hint):
+ # type: (Event, Hint) -> Optional[Event]
+ """
+ Runs the event processors on the event and returns the modified event.
+ """
+ ty = event.get("type")
+ is_check_in = ty == "check_in"
+
+ if not is_check_in:
+ # Get scopes without creating them to prevent infinite recursion
+ isolation_scope = _isolation_scope.get()
+ current_scope = _current_scope.get()
+
+ event_processors = chain(
+ global_event_processors,
+ _global_scope and _global_scope._event_processors or [],
+ isolation_scope and isolation_scope._event_processors or [],
+ current_scope and current_scope._event_processors or [],
+ )
+
+ for event_processor in event_processors:
+ new_event = event
+ with capture_internal_exceptions():
+ new_event = event_processor(event, hint)
+ if new_event is None:
+ return self._drop(event_processor, "event processor")
+ event = new_event
+
+ return event
@_disable_capture
def apply_to_event(
@@ -1154,32 +1385,13 @@ def apply_to_event(
if not is_transaction and not is_check_in:
self._apply_breadcrumbs_to_event(event, hint, options)
- def _drop(cause, ty):
- # type: (Any, str) -> Optional[Any]
- logger.info("%s (%s) dropped event", ty, cause)
+ event = self.run_error_processors(event, hint)
+ if event is None:
return None
- # run error processors
- exc_info = hint.get("exc_info")
- if exc_info is not None:
- for error_processor in self._error_processors:
- new_event = error_processor(event, exc_info)
- if new_event is None:
- return _drop(error_processor, "error processor")
-
- event = new_event
-
- # run event processors
- if not is_check_in:
- for event_processor in chain(
- global_event_processors, self._event_processors
- ):
- new_event = event
- with capture_internal_exceptions():
- new_event = event_processor(event, hint)
- if new_event is None:
- return _drop(event_processor, "event processor")
- event = new_event
+ event = self.run_event_processors(event, hint)
+ if event is None:
+ return None
return event
@@ -1212,6 +1424,8 @@ def update_from_scope(self, scope):
self._profile = scope._profile
if scope._propagation_context:
self._propagation_context = scope._propagation_context
+ if scope._session:
+ self._session = scope._session
def update_from_kwargs(
self,
@@ -1237,40 +1451,173 @@ def update_from_kwargs(
if fingerprint is not None:
self._fingerprint = fingerprint
- def __copy__(self):
- # type: () -> Scope
- rv = object.__new__(self.__class__) # type: Scope
+ def __repr__(self):
+ # type: () -> str
+ return "<%s id=%s name=%s type=%s>" % (
+ self.__class__.__name__,
+ hex(id(self)),
+ self._name,
+ self._type,
+ )
- rv._level = self._level
- rv._name = self._name
- rv._fingerprint = self._fingerprint
- rv._transaction = self._transaction
- rv._transaction_info = dict(self._transaction_info)
- rv._user = self._user
- rv._tags = dict(self._tags)
- rv._contexts = dict(self._contexts)
- rv._extras = dict(self._extras)
+@contextmanager
+def new_scope():
+ # type: () -> Generator[Scope, None, None]
+ """
+ .. versionadded:: 2.0.0
- rv._breadcrumbs = copy(self._breadcrumbs)
- rv._event_processors = list(self._event_processors)
- rv._error_processors = list(self._error_processors)
- rv._propagation_context = self._propagation_context
+ Context manager that forks the current scope and runs the wrapped code in it.
+ After the wrapped code is executed, the original scope is restored.
- rv._should_capture = self._should_capture
- rv._span = self._span
- rv._session = self._session
- rv._force_auto_session_tracking = self._force_auto_session_tracking
- rv._attachments = list(self._attachments)
+ Example Usage:
- rv._profile = self._profile
+ .. code-block:: python
- return rv
+ import sentry_sdk
- def __repr__(self):
- # type: () -> str
- return "<%s id=%s name=%s>" % (
- self.__class__.__name__,
- hex(id(self)),
- self._name,
- )
+ with sentry_sdk.new_scope() as scope:
+ scope.set_tag("color", "green")
+ sentry_sdk.capture_message("hello") # will include `color` tag.
+
+ sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
+
+ """
+ # fork current scope
+ current_scope = Scope.get_current_scope()
+ new_scope = current_scope.fork()
+ token = _current_scope.set(new_scope)
+
+ try:
+ yield new_scope
+
+ finally:
+ # restore original scope
+ _current_scope.reset(token)
+
+
+@contextmanager
+def use_scope(scope):
+ # type: (Scope) -> Generator[Scope, None, None]
+ """
+ .. versionadded:: 2.0.0
+
+ Context manager that uses the given `scope` and runs the wrapped code in it.
+ After the wrapped code is executed, the original scope is restored.
+
+ Example Usage:
+ Suppose the variable `scope` contains a `Scope` object, which is not currently
+ the active scope.
+
+ .. code-block:: python
+
+ import sentry_sdk
+
+ with sentry_sdk.use_scope(scope):
+ scope.set_tag("color", "green")
+ sentry_sdk.capture_message("hello") # will include `color` tag.
+
+ sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
+
+ """
+ # set given scope as current scope
+ token = _current_scope.set(scope)
+
+ try:
+ yield scope
+
+ finally:
+ # restore original scope
+ _current_scope.reset(token)
+
+
+@contextmanager
+def isolation_scope():
+ # type: () -> Generator[Scope, None, None]
+ """
+ .. versionadded:: 2.0.0
+
+ Context manager that forks the current isolation scope and runs the wrapped code in it.
+ The current scope is also forked to not bleed data into the existing current scope.
+ After the wrapped code is executed, the original scopes are restored.
+
+ Example Usage:
+
+ .. code-block:: python
+
+ import sentry_sdk
+
+ with sentry_sdk.isolation_scope() as scope:
+ scope.set_tag("color", "green")
+ sentry_sdk.capture_message("hello") # will include `color` tag.
+
+ sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
+
+ """
+ # fork current scope
+ current_scope = Scope.get_current_scope()
+ forked_current_scope = current_scope.fork()
+ current_token = _current_scope.set(forked_current_scope)
+
+ # fork isolation scope
+ isolation_scope = Scope.get_isolation_scope()
+ new_isolation_scope = isolation_scope.fork()
+ isolation_token = _isolation_scope.set(new_isolation_scope)
+
+ try:
+ yield new_isolation_scope
+
+ finally:
+ # restore original scopes
+ _current_scope.reset(current_token)
+ _isolation_scope.reset(isolation_token)
+
+
+@contextmanager
+def use_isolation_scope(isolation_scope):
+ # type: (Scope) -> Generator[Scope, None, None]
+ """
+ .. versionadded:: 2.0.0
+
+ Context manager that uses the given `isolation_scope` and runs the wrapped code in it.
+ The current scope is also forked to not bleed data into the existing current scope.
+ After the wrapped code is executed, the original scopes are restored.
+
+ Example Usage:
+
+ .. code-block:: python
+
+ import sentry_sdk
+
+ with sentry_sdk.isolation_scope() as scope:
+ scope.set_tag("color", "green")
+ sentry_sdk.capture_message("hello") # will include `color` tag.
+
+ sentry_sdk.capture_message("hello, again") # will NOT include `color` tag.
+
+ """
+ # fork current scope
+ current_scope = Scope.get_current_scope()
+ forked_current_scope = current_scope.fork()
+ current_token = _current_scope.set(forked_current_scope)
+
+ # set given scope as isolation scope
+ isolation_token = _isolation_scope.set(isolation_scope)
+
+ try:
+ yield isolation_scope
+
+ finally:
+ # restore original scopes
+ _current_scope.reset(current_token)
+ _isolation_scope.reset(isolation_token)
+
+
+def should_send_default_pii():
+ # type: () -> bool
+ """Shortcut for `Scope.get_client().should_send_default_pii()`."""
+ return Scope.get_client().should_send_default_pii()
+
+
+# Circular imports
+from sentry_sdk.client import NonRecordingClient
diff --git a/sentry_sdk/scrubber.py b/sentry_sdk/scrubber.py
index 3f089ab8f6..f1f320786c 100644
--- a/sentry_sdk/scrubber.py
+++ b/sentry_sdk/scrubber.py
@@ -1,14 +1,8 @@
-try:
- from typing import cast
-except ImportError:
- cast = lambda _, obj: obj
-
from sentry_sdk.utils import (
capture_internal_exceptions,
AnnotatedValue,
iter_event_frames,
)
-from sentry_sdk._compat import string_types
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -97,7 +91,7 @@ def scrub_dict(self, d):
for k, v in d.items():
# The cast is needed because mypy is not smart enough to figure out that k must be a
# string after the isinstance check.
- if isinstance(k, string_types) and cast(str, k).lower() in self.denylist:
+ if isinstance(k, str) and k.lower() in self.denylist:
d[k] = AnnotatedValue.substituted_because_contains_sensitive_data()
elif self.recursive:
self.scrub_dict(v) # no-op unless v is a dict
diff --git a/sentry_sdk/serializer.py b/sentry_sdk/serializer.py
index 51496f57ce..ff243eeadc 100644
--- a/sentry_sdk/serializer.py
+++ b/sentry_sdk/serializer.py
@@ -1,6 +1,6 @@
import sys
import math
-
+from collections.abc import Mapping, Sequence, Set
from datetime import datetime
from sentry_sdk.utils import (
@@ -11,14 +11,6 @@
safe_repr,
strip_string,
)
-from sentry_sdk._compat import (
- text_type,
- PY2,
- string_types,
- number_types,
- iteritems,
- binary_sequence_types,
-)
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
@@ -41,20 +33,8 @@
Segment = Union[str, int]
-if PY2:
- # Importing ABCs from collections is deprecated, and will stop working in 3.8
- # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
- from collections import Mapping, Sequence, Set
-
- serializable_str_types = string_types + binary_sequence_types
-
-else:
- # New in 3.3
- # https://docs.python.org/3/library/collections.abc.html
- from collections.abc import Mapping, Sequence, Set
-
- # Bytes are technically not strings in Python 3, but we can serialize them
- serializable_str_types = string_types + binary_sequence_types
+# Bytes are technically not strings in Python 3, but we can serialize them
+serializable_str_types = (str, bytes, bytearray, memoryview)
# Maximum length of JSON-serialized event payloads that can be safely sent
@@ -82,7 +62,7 @@ def add_global_repr_processor(processor):
global_repr_processors.append(processor)
-class Memo(object):
+class Memo:
__slots__ = ("_ids", "_objs")
def __init__(self):
@@ -130,7 +110,7 @@ def _annotate(**meta):
while len(meta_stack) <= len(path):
try:
segment = path[len(meta_stack) - 1]
- node = meta_stack[-1].setdefault(text_type(segment), {})
+ node = meta_stack[-1].setdefault(str(segment), {})
except IndexError:
node = {}
@@ -310,7 +290,7 @@ def _serialize_node_impl(
sentry_repr = getattr(type(obj), "__sentry_repr__", None)
- if obj is None or isinstance(obj, (bool, number_types)):
+ if obj is None or isinstance(obj, (bool, int, float)):
if should_repr_strings or (
isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
):
@@ -323,7 +303,7 @@ def _serialize_node_impl(
elif isinstance(obj, datetime):
return (
- text_type(format_timestamp(obj))
+ str(format_timestamp(obj))
if not should_repr_strings
else safe_repr(obj)
)
@@ -331,17 +311,17 @@ def _serialize_node_impl(
elif isinstance(obj, Mapping):
# Create temporary copy here to avoid calling too much code that
# might mutate our dictionary while we're still iterating over it.
- obj = dict(iteritems(obj))
+ obj = dict(obj.items())
rv_dict = {} # type: Dict[str, Any]
i = 0
- for k, v in iteritems(obj):
+ for k, v in obj.items():
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
- str_k = text_type(k)
+ str_k = str(k)
v = _serialize_node(
v,
segment=str_k,
@@ -390,7 +370,7 @@ def _serialize_node_impl(
if isinstance(obj, bytes) or isinstance(obj, bytearray):
obj = obj.decode("utf-8", "replace")
- if not isinstance(obj, string_types):
+ if not isinstance(obj, str):
obj = safe_repr(obj)
is_span_description = (
diff --git a/sentry_sdk/session.py b/sentry_sdk/session.py
index 45e2236ec9..5c11456430 100644
--- a/sentry_sdk/session.py
+++ b/sentry_sdk/session.py
@@ -1,11 +1,10 @@
import uuid
+from datetime import datetime, timezone
-from sentry_sdk._compat import datetime_utcnow
from sentry_sdk._types import TYPE_CHECKING
from sentry_sdk.utils import format_timestamp
if TYPE_CHECKING:
- from datetime import datetime
from typing import Optional
from typing import Union
from typing import Any
@@ -28,7 +27,7 @@ def _make_uuid(
return uuid.UUID(val)
-class Session(object):
+class Session:
def __init__(
self,
sid=None, # type: Optional[Union[str, uuid.UUID]]
@@ -49,7 +48,7 @@ def __init__(
if sid is None:
sid = uuid.uuid4()
if started is None:
- started = datetime_utcnow()
+ started = datetime.now(timezone.utc)
if status is None:
status = "ok"
self.status = status
@@ -109,7 +108,7 @@ def update(
if did is not None:
self.did = str(did)
if timestamp is None:
- timestamp = datetime_utcnow()
+ timestamp = datetime.now(timezone.utc)
self.timestamp = timestamp
if started is not None:
self.started = started
diff --git a/sentry_sdk/sessions.py b/sentry_sdk/sessions.py
index 68255184b7..b14bc43187 100644
--- a/sentry_sdk/sessions.py
+++ b/sentry_sdk/sessions.py
@@ -22,6 +22,8 @@
def is_auto_session_tracking_enabled(hub=None):
# type: (Optional[sentry_sdk.Hub]) -> Union[Any, bool, None]
"""Utility function to find out if session tracking is enabled."""
+ # TODO: add deprecation warning
+
if hub is None:
hub = sentry_sdk.Hub.current
@@ -38,6 +40,8 @@ def is_auto_session_tracking_enabled(hub=None):
def auto_session_tracking(hub=None, session_mode="application"):
# type: (Optional[sentry_sdk.Hub], str) -> Generator[None, None, None]
"""Starts and stops a session automatically around a block."""
+ # TODO: add deprecation warning
+
if hub is None:
hub = sentry_sdk.Hub.current
should_track = is_auto_session_tracking_enabled(hub)
@@ -50,6 +54,43 @@ def auto_session_tracking(hub=None, session_mode="application"):
hub.end_session()
+def is_auto_session_tracking_enabled_scope(scope):
+ # type: (sentry_sdk.Scope) -> bool
+ """
+ Utility function to find out if session tracking is enabled.
+
+ TODO: This uses the new scopes. When the Hub is removed, the function
+ is_auto_session_tracking_enabled should be removed and this function
+ should be renamed to is_auto_session_tracking_enabled.
+ """
+ should_track = scope._force_auto_session_tracking
+ if should_track is None:
+ client_options = sentry_sdk.get_client().options
+ should_track = client_options.get("auto_session_tracking", False)
+
+ return should_track
+
+
+@contextmanager
+def auto_session_tracking_scope(scope, session_mode="application"):
+ # type: (sentry_sdk.Scope, str) -> Generator[None, None, None]
+ """
+ Starts and stops a session automatically around a block.
+
+ TODO: This uses the new scopes. When the Hub is removed, the function
+ auto_session_tracking should be removed and this function
+ should be renamed to auto_session_tracking.
+ """
+ should_track = is_auto_session_tracking_enabled_scope(scope)
+ if should_track:
+ scope.start_session(session_mode=session_mode)
+ try:
+ yield
+ finally:
+ if should_track:
+ scope.end_session()
+
+
TERMINAL_SESSION_STATES = ("exited", "abnormal", "crashed")
MAX_ENVELOPE_ITEMS = 100
@@ -59,7 +100,7 @@ def make_aggregate_envelope(aggregate_states, attrs):
return {"attrs": dict(attrs), "aggregates": list(aggregate_states.values())}
-class SessionFlusher(object):
+class SessionFlusher:
def __init__(
self,
capture_func, # type: Callable[[Envelope], None]
diff --git a/sentry_sdk/spotlight.py b/sentry_sdk/spotlight.py
index 3d02ee74f0..76d0d61468 100644
--- a/sentry_sdk/spotlight.py
+++ b/sentry_sdk/spotlight.py
@@ -12,7 +12,7 @@
from sentry_sdk.envelope import Envelope
-class SpotlightClient(object):
+class SpotlightClient:
def __init__(self, url):
# type: (str) -> None
self.url = url
diff --git a/sentry_sdk/tracing.py b/sentry_sdk/tracing.py
index 7afe7e0944..6e82d839db 100644
--- a/sentry_sdk/tracing.py
+++ b/sentry_sdk/tracing.py
@@ -1,25 +1,20 @@
import uuid
import random
-
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
import sentry_sdk
-from sentry_sdk.consts import INSTRUMENTER
+from sentry_sdk.consts import INSTRUMENTER, SPANDATA
from sentry_sdk.utils import (
get_current_thread_meta,
is_valid_sample_rate,
logger,
nanosecond_time,
)
-from sentry_sdk._compat import datetime_utcnow, utc_from_timestamp, PY2
-from sentry_sdk.consts import SPANDATA
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
- import typing
-
- from collections.abc import Callable, MutableMapping
+ from collections.abc import Callable, Mapping, MutableMapping
from typing import Any
from typing import Dict
from typing import Iterator
@@ -31,12 +26,80 @@
from typing import Union
from typing import TypeVar
+ from typing_extensions import TypedDict, Unpack
+
P = ParamSpec("P")
R = TypeVar("R")
import sentry_sdk.profiler
from sentry_sdk._types import Event, MeasurementUnit, SamplingContext
+ class SpanKwargs(TypedDict, total=False):
+ trace_id: str
+ """
+ The trace ID of the root span. If this new span is to be the root span,
+ omit this parameter, and a new trace ID will be generated.
+ """
+
+ span_id: str
+ """The span ID of this span. If omitted, a new span ID will be generated."""
+
+ parent_span_id: str
+ """The span ID of the parent span, if applicable."""
+
+ same_process_as_parent: bool
+ """Whether this span is in the same process as the parent span."""
+
+ sampled: bool
+ """
+ Whether the span should be sampled. Overrides the default sampling decision
+ for this span when provided.
+ """
+
+ op: str
+ """
+ The span's operation. A list of recommended values is available here:
+ https://develop.sentry.dev/sdk/performance/span-operations/
+ """
+
+ description: str
+ """A description of what operation is being performed within the span."""
+
+ hub: Optional["sentry_sdk.Hub"]
+ """The hub to use for this span. This argument is DEPRECATED. Please use the `scope` parameter, instead."""
+
+ status: str
+ """The span's status. Possible values are listed at https://develop.sentry.dev/sdk/event-payloads/span/"""
+
+ containing_transaction: Optional["Transaction"]
+ """The transaction that this span belongs to."""
+
+ start_timestamp: Optional[Union[datetime, float]]
+ """
+ The timestamp when the span started. If omitted, the current time
+ will be used.
+ """
+
+ scope: "sentry_sdk.Scope"
+ """The scope to use for this span. If not provided, we use the current scope."""
+
+ class TransactionKwargs(SpanKwargs, total=False):
+ name: str
+ """Identifier of the transaction. Will show up in the Sentry UI."""
+
+ source: str
+ """
+ A string describing the source of the transaction name. This will be used to determine the transaction's type.
+ See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations for more information.
+ Default "custom".
+ """
+
+ parent_sampled: bool
+ """Whether the parent transaction was sampled. If True this transaction will be kept, if False it will be discarded."""
+
+ baggage: "Baggage"
+ """The W3C baggage header value. (see https://www.w3.org/TR/baggage/)"""
+
BAGGAGE_HEADER_NAME = "baggage"
SENTRY_TRACE_HEADER_NAME = "sentry-trace"
@@ -69,7 +132,7 @@
}
-class _SpanRecorder(object):
+class _SpanRecorder:
"""Limits the number of spans recorded in a transaction."""
__slots__ = ("maxlen", "spans")
@@ -92,9 +155,31 @@ def add(self, span):
self.spans.append(span)
-class Span(object):
+class Span:
"""A span holds timing information of a block of code.
- Spans can have multiple child spans thus forming a span tree."""
+ Spans can have multiple child spans thus forming a span tree.
+
+ :param trace_id: The trace ID of the root span. If this new span is to be the root span,
+ omit this parameter, and a new trace ID will be generated.
+ :param span_id: The span ID of this span. If omitted, a new span ID will be generated.
+ :param parent_span_id: The span ID of the parent span, if applicable.
+ :param same_process_as_parent: Whether this span is in the same process as the parent span.
+ :param sampled: Whether the span should be sampled. Overrides the default sampling decision
+ for this span when provided.
+ :param op: The span's operation. A list of recommended values is available here:
+ https://develop.sentry.dev/sdk/performance/span-operations/
+ :param description: A description of what operation is being performed within the span.
+ :param hub: The hub to use for this span.
+
+ .. deprecated:: 2.0.0
+ Please use the `scope` parameter, instead.
+ :param status: The span's status. Possible values are listed at
+ https://develop.sentry.dev/sdk/event-payloads/span/
+ :param containing_transaction: The transaction that this span belongs to.
+ :param start_timestamp: The timestamp when the span started. If omitted, the current time
+ will be used.
+ :param scope: The scope to use for this span. If not provided, we use the current scope.
+ """
__slots__ = (
"trace_id",
@@ -115,22 +200,9 @@ class Span(object):
"_context_manager_state",
"_containing_transaction",
"_local_aggregator",
+ "scope",
)
- def __new__(cls, **kwargs):
- # type: (**Any) -> Any
- """
- Backwards-compatible implementation of Span and Transaction
- creation.
- """
-
- # TODO: consider removing this in a future release.
- # This is for backwards compatibility with releases before Transaction
- # existed, to allow for a smoother transition.
- if "transaction" in kwargs:
- return object.__new__(Transaction)
- return object.__new__(cls)
-
def __init__(
self,
trace_id=None, # type: Optional[str]
@@ -140,11 +212,11 @@ def __init__(
sampled=None, # type: Optional[bool]
op=None, # type: Optional[str]
description=None, # type: Optional[str]
- hub=None, # type: Optional[sentry_sdk.Hub]
+ hub=None, # type: Optional[sentry_sdk.Hub] # deprecated
status=None, # type: Optional[str]
- transaction=None, # type: Optional[str] # deprecated
containing_transaction=None, # type: Optional[Transaction]
start_timestamp=None, # type: Optional[Union[datetime, float]]
+ scope=None, # type: Optional[sentry_sdk.Scope]
):
# type: (...) -> None
self.trace_id = trace_id or uuid.uuid4().hex
@@ -156,13 +228,14 @@ def __init__(
self.description = description
self.status = status
self.hub = hub
+ self.scope = scope
self._tags = {} # type: MutableMapping[str, str]
self._data = {} # type: Dict[str, Any]
self._containing_transaction = containing_transaction
if start_timestamp is None:
- start_timestamp = datetime_utcnow()
+ start_timestamp = datetime.now(timezone.utc)
elif isinstance(start_timestamp, float):
- start_timestamp = utc_from_timestamp(start_timestamp)
+ start_timestamp = datetime.fromtimestamp(start_timestamp, timezone.utc)
self.start_timestamp = start_timestamp
try:
# profiling depends on this value and requires that
@@ -211,12 +284,10 @@ def __repr__(self):
def __enter__(self):
# type: () -> Span
- hub = self.hub or sentry_sdk.Hub.current
-
- _, scope = hub._stack[-1]
+ scope = self.scope or sentry_sdk.Scope.get_current_scope()
old_span = scope.span
scope.span = self
- self._context_manager_state = (hub, scope, old_span)
+ self._context_manager_state = (scope, old_span)
return self
def __exit__(self, ty, value, tb):
@@ -224,10 +295,9 @@ def __exit__(self, ty, value, tb):
if value is not None:
self.set_status("internal_error")
- hub, scope, old_span = self._context_manager_state
+ scope, old_span = self._context_manager_state
del self._context_manager_state
-
- self.finish(hub)
+ self.finish(scope)
scope.span = old_span
@property
@@ -251,9 +321,9 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
trace id, sampling decision, transaction pointer, and span recorder are
inherited from the current span/transaction.
"""
- hub = self.hub or sentry_sdk.Hub.current
- client = hub.client
- configuration_instrumenter = client and client.options["instrumenter"]
+ configuration_instrumenter = sentry_sdk.Scope.get_client().options[
+ "instrumenter"
+ ]
if instrumenter != configuration_instrumenter:
return NoOpSpan()
@@ -264,7 +334,7 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
trace_id=self.trace_id,
parent_span_id=self.span_id,
containing_transaction=self.containing_transaction,
- **kwargs
+ **kwargs,
)
span_recorder = (
@@ -275,19 +345,11 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
return child
- def new_span(self, **kwargs):
- # type: (**Any) -> Span
- """DEPRECATED: use :py:meth:`sentry_sdk.tracing.Span.start_child` instead."""
- logger.warning(
- "Deprecated: use Span.start_child instead of Span.new_span. This will be removed in the future."
- )
- return self.start_child(**kwargs)
-
@classmethod
def continue_from_environ(
cls,
- environ, # type: typing.Mapping[str, str]
- **kwargs # type: Any
+ environ, # type: Mapping[str, str]
+ **kwargs, # type: Any
):
# type: (...) -> Transaction
"""
@@ -312,8 +374,8 @@ def continue_from_environ(
@classmethod
def continue_from_headers(
cls,
- headers, # type: typing.Mapping[str, str]
- **kwargs # type: Any
+ headers, # type: Mapping[str, str]
+ **kwargs, # type: Any
):
# type: (...) -> Transaction
"""
@@ -369,7 +431,7 @@ def iter_headers(self):
def from_traceparent(
cls,
traceparent, # type: Optional[str]
- **kwargs # type: Any
+ **kwargs, # type: Any
):
# type: (...) -> Optional[Transaction]
"""
@@ -475,33 +537,30 @@ def is_success(self):
# type: () -> bool
return self.status == "ok"
- def finish(self, hub=None, end_timestamp=None):
- # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
- # Note: would be type: (Optional[sentry_sdk.Hub]) -> None, but that leads
- # to incompatible return types for Span.finish and Transaction.finish.
- """Sets the end timestamp of the span.
+ def finish(self, scope=None, end_timestamp=None):
+ # type: (Optional[sentry_sdk.Scope], Optional[Union[float, datetime]]) -> Optional[str]
+ """
+ Sets the end timestamp of the span.
+
Additionally it also creates a breadcrumb from the span,
if the span represents a database or HTTP request.
- :param hub: The hub to use for this transaction.
- If not provided, the current hub will be used.
+ :param scope: The scope to use for this transaction.
+ If not provided, the current scope will be used.
:param end_timestamp: Optional timestamp that should
be used as timestamp instead of the current time.
:return: Always ``None``. The type is ``Optional[str]`` to match
the return value of :py:meth:`sentry_sdk.tracing.Transaction.finish`.
"""
-
if self.timestamp is not None:
# This span is already finished, ignore.
return None
- hub = hub or self.hub or sentry_sdk.Hub.current
-
try:
if end_timestamp:
if isinstance(end_timestamp, float):
- end_timestamp = utc_from_timestamp(end_timestamp)
+ end_timestamp = datetime.fromtimestamp(end_timestamp, timezone.utc)
self.timestamp = end_timestamp
else:
elapsed = nanosecond_time() - self._start_timestamp_monotonic_ns
@@ -509,9 +568,10 @@ def finish(self, hub=None, end_timestamp=None):
microseconds=elapsed / 1000
)
except AttributeError:
- self.timestamp = datetime_utcnow()
+ self.timestamp = datetime.now(timezone.utc)
- maybe_create_breadcrumbs_from_span(hub, self)
+ scope = scope or sentry_sdk.Scope.get_current_scope()
+ maybe_create_breadcrumbs_from_span(scope, self)
return None
@@ -570,7 +630,21 @@ def get_trace_context(self):
class Transaction(Span):
"""The Transaction is the root element that holds all the spans
- for Sentry performance instrumentation."""
+ for Sentry performance instrumentation.
+
+ :param name: Identifier of the transaction.
+ Will show up in the Sentry UI.
+ :param parent_sampled: Whether the parent transaction was sampled.
+ If True this transaction will be kept, if False it will be discarded.
+ :param baggage: The W3C baggage header value.
+ (see https://www.w3.org/TR/baggage/)
+ :param source: A string describing the source of the transaction name.
+ This will be used to determine the transaction's type.
+ See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
+ for more information. Default "custom".
+ :param kwargs: Additional arguments to be passed to the Span constructor.
+ See :py:class:`sentry_sdk.tracing.Span` for available arguments.
+ """
__slots__ = (
"name",
@@ -590,33 +664,11 @@ def __init__(
parent_sampled=None, # type: Optional[bool]
baggage=None, # type: Optional[Baggage]
source=TRANSACTION_SOURCE_CUSTOM, # type: str
- **kwargs # type: Any
+ **kwargs, # type: Unpack[SpanKwargs]
):
# type: (...) -> None
- """Constructs a new Transaction.
-
- :param name: Identifier of the transaction.
- Will show up in the Sentry UI.
- :param parent_sampled: Whether the parent transaction was sampled.
- If True this transaction will be kept, if False it will be discarded.
- :param baggage: The W3C baggage header value.
- (see https://www.w3.org/TR/baggage/)
- :param source: A string describing the source of the transaction name.
- This will be used to determine the transaction's type.
- See https://develop.sentry.dev/sdk/event-payloads/transaction/#transaction-annotations
- for more information. Default "custom".
- """
- # TODO: consider removing this in a future release.
- # This is for backwards compatibility with releases before Transaction
- # existed, to allow for a smoother transition.
- if not name and "transaction" in kwargs:
- logger.warning(
- "Deprecated: use Transaction(name=...) to create transactions "
- "instead of Span(transaction=...)."
- )
- name = kwargs.pop("transaction")
- super(Transaction, self).__init__(**kwargs)
+ super().__init__(**kwargs)
self.name = name
self.source = source
@@ -645,7 +697,7 @@ def __repr__(self):
def __enter__(self):
# type: () -> Transaction
- super(Transaction, self).__enter__()
+ super().__enter__()
if self._profile is not None:
self._profile.__enter__()
@@ -657,7 +709,7 @@ def __exit__(self, ty, value, tb):
if self._profile is not None:
self._profile.__exit__(ty, value, tb)
- super(Transaction, self).__exit__(ty, value, tb)
+ super().__exit__(ty, value, tb)
@property
def containing_transaction(self):
@@ -672,7 +724,7 @@ def containing_transaction(self):
return self
def finish(self, hub=None, end_timestamp=None):
- # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
+ # type: (Optional[Union[sentry_sdk.Hub, sentry_sdk.Scope]], Optional[Union[float, datetime]]) -> Optional[str]
"""Finishes the transaction and sends it to Sentry.
All finished spans in the transaction will also be sent to Sentry.
@@ -689,15 +741,20 @@ def finish(self, hub=None, end_timestamp=None):
return None
hub = hub or self.hub or sentry_sdk.Hub.current
- client = hub.client
+ client = sentry_sdk.Scope.get_client()
- if client is None:
- # We have no client and therefore nowhere to send this transaction.
+ if not client.is_active():
+ # We have no active client and therefore nowhere to send this transaction.
return None
- # This is a de facto proxy for checking if sampled = False
if self._span_recorder is None:
- logger.debug("Discarding transaction because sampled = False")
+ # Explicit check against False needed because self.sampled might be None
+ if self.sampled is False:
+ logger.debug("Discarding transaction because sampled = False")
+ else:
+ logger.debug(
+ "Discarding transaction because it was not started with sentry_sdk.start_transaction"
+ )
# This is not entirely accurate because discards here are not
# exclusively based on sample rate but also traces sampler, but
@@ -718,7 +775,7 @@ def finish(self, hub=None, end_timestamp=None):
)
self.name = ""
- super(Transaction, self).finish(hub, end_timestamp)
+ super().finish(hub, end_timestamp)
if not self.sampled:
# At this point a `sampled = None` should have already been resolved
@@ -790,13 +847,13 @@ def set_http_status(self, http_status):
"""Sets the status of the Transaction according to the given HTTP status.
:param http_status: The HTTP status code."""
- super(Transaction, self).set_http_status(http_status)
+ super().set_http_status(http_status)
self.set_context("response", {"status_code": http_status})
def to_json(self):
# type: () -> Dict[str, Any]
"""Returns a JSON-compatible representation of the transaction."""
- rv = super(Transaction, self).to_json()
+ rv = super().to_json()
rv["name"] = self.name
rv["source"] = self.source
@@ -838,16 +895,14 @@ def _set_initial_sampling_decision(self, sampling_context):
4. If `traces_sampler` is not defined and there's no parent sampling
decision, `traces_sample_rate` will be used.
"""
+ client = sentry_sdk.Scope.get_client()
- hub = self.hub or sentry_sdk.Hub.current
- client = hub.client
- options = (client and client.options) or {}
transaction_description = "{op}transaction <{name}>".format(
op=("<" + self.op + "> " if self.op else ""), name=self.name
)
- # nothing to do if there's no client or if tracing is disabled
- if not client or not has_tracing_enabled(options):
+ # nothing to do if tracing is disabled
+ if not has_tracing_enabled(client.options):
self.sampled = False
return
@@ -861,13 +916,13 @@ def _set_initial_sampling_decision(self, sampling_context):
# `traces_sample_rate` were defined, so one of these should work; prefer
# the hook if so
sample_rate = (
- options["traces_sampler"](sampling_context)
- if callable(options.get("traces_sampler"))
+ client.options["traces_sampler"](sampling_context)
+ if callable(client.options.get("traces_sampler"))
else (
# default inheritance behavior
sampling_context["parent_sampled"]
if sampling_context["parent_sampled"] is not None
- else options["traces_sample_rate"]
+ else client.options["traces_sample_rate"]
)
)
@@ -896,7 +951,7 @@ def _set_initial_sampling_decision(self, sampling_context):
transaction_description=transaction_description,
reason=(
"traces_sampler returned 0 or False"
- if callable(options.get("traces_sampler"))
+ if callable(client.options.get("traces_sampler"))
else "traces_sample_rate is set to 0"
),
)
@@ -927,7 +982,7 @@ def _set_initial_sampling_decision(self, sampling_context):
class NoOpSpan(Span):
def __repr__(self):
# type: () -> str
- return self.__class__.__name__
+ return "<%s>" % self.__class__.__name__
@property
def containing_transaction(self):
@@ -938,10 +993,6 @@ def start_child(self, instrumenter=INSTRUMENTER.SENTRY, **kwargs):
# type: (str, **Any) -> NoOpSpan
return NoOpSpan()
- def new_span(self, **kwargs):
- # type: (**Any) -> NoOpSpan
- return self.start_child(**kwargs)
-
def to_traceparent(self):
# type: () -> str
return ""
@@ -987,7 +1038,7 @@ def get_trace_context(self):
return {}
def finish(self, hub=None, end_timestamp=None):
- # type: (Optional[sentry_sdk.Hub], Optional[Union[float, datetime]]) -> Optional[str]
+ # type: (Optional[Union[sentry_sdk.Hub, sentry_sdk.Scope]], Optional[Union[float, datetime]]) -> Optional[str]
pass
def set_measurement(self, name, value, unit=""):
@@ -1039,10 +1090,7 @@ def my_function():
async def my_async_function():
...
"""
- if PY2:
- from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
- else:
- from sentry_sdk.tracing_utils_py3 import start_child_span_decorator
+ from sentry_sdk.tracing_utils import start_child_span_decorator
# This patterns allows usage of both @sentry_traced and @sentry_traced(...)
# See https://stackoverflow.com/questions/52126071/decorator-with-arguments-avoid-parenthesis-when-no-arguments/52126278
diff --git a/sentry_sdk/tracing_utils.py b/sentry_sdk/tracing_utils.py
index 98cdec5e38..556a466c0b 100644
--- a/sentry_sdk/tracing_utils.py
+++ b/sentry_sdk/tracing_utils.py
@@ -1,7 +1,13 @@
import contextlib
+import inspect
import os
import re
import sys
+from collections.abc import Mapping
+from datetime import timedelta
+from functools import wraps
+from urllib.parse import quote, unquote
+import uuid
import sentry_sdk
from sentry_sdk.consts import OP, SPANDATA
@@ -9,25 +15,17 @@
capture_internal_exceptions,
filename_for_module,
Dsn,
+ logger,
match_regex_list,
+ qualname_from_function,
to_string,
is_sentry_url,
_is_external_source,
_module_in_list,
)
-from sentry_sdk._compat import PY2, duration_in_milliseconds, iteritems
from sentry_sdk._types import TYPE_CHECKING
-if PY2:
- from collections import Mapping
- from urllib import quote, unquote
-else:
- from collections.abc import Mapping
- from urllib.parse import quote, unquote
-
if TYPE_CHECKING:
- import typing
-
from typing import Any
from typing import Dict
from typing import Generator
@@ -60,7 +58,7 @@
class EnvironHeaders(Mapping): # type: ignore
def __init__(
self,
- environ, # type: typing.Mapping[str, str]
+ environ, # type: Mapping[str, str]
prefix="HTTP_", # type: str
):
# type: (...) -> None
@@ -108,7 +106,6 @@ def has_tracing_enabled(options):
@contextlib.contextmanager
def record_sql_queries(
- hub, # type: sentry_sdk.Hub
cursor, # type: Any
query, # type: Any
params_list, # type: Any
@@ -119,9 +116,7 @@ def record_sql_queries(
# type: (...) -> Generator[sentry_sdk.tracing.Span, None, None]
# TODO: Bring back capturing of params by default
- if hub.client and hub.client.options["_experiments"].get(
- "record_sql_params", False
- ):
+ if sentry_sdk.get_client().options["_experiments"].get("record_sql_params", False):
if not params_list or params_list == [None]:
params_list = None
@@ -144,24 +139,25 @@ def record_sql_queries(
data["db.cursor"] = cursor
with capture_internal_exceptions():
- hub.add_breadcrumb(message=query, category="query", data=data)
+ sentry_sdk.add_breadcrumb(message=query, category="query", data=data)
- with hub.start_span(op=OP.DB, description=query) as span:
+ with sentry_sdk.start_span(op=OP.DB, description=query) as span:
for k, v in data.items():
span.set_data(k, v)
yield span
-def maybe_create_breadcrumbs_from_span(hub, span):
- # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None
+def maybe_create_breadcrumbs_from_span(scope, span):
+ # type: (sentry_sdk.Scope, sentry_sdk.tracing.Span) -> None
+
if span.op == OP.DB_REDIS:
- hub.add_breadcrumb(
+ scope.add_breadcrumb(
message=span.description, type="redis", category="redis", data=span._tags
)
elif span.op == OP.HTTP_CLIENT:
- hub.add_breadcrumb(type="http", category="httplib", data=span._data)
+ scope.add_breadcrumb(type="http", category="httplib", data=span._data)
elif span.op == "subprocess":
- hub.add_breadcrumb(
+ scope.add_breadcrumb(
type="subprocess",
category="subprocess",
message=span.description,
@@ -169,13 +165,13 @@ def maybe_create_breadcrumbs_from_span(hub, span):
)
-def add_query_source(hub, span):
- # type: (sentry_sdk.Hub, sentry_sdk.tracing.Span) -> None
+def add_query_source(span):
+ # type: (sentry_sdk.tracing.Span) -> None
"""
Adds OTel compatible source code information to the span
"""
- client = hub.client
- if client is None:
+ client = sentry_sdk.get_client()
+ if not client.is_active():
return
if span.timestamp is None or span.start_timestamp is None:
@@ -187,7 +183,7 @@ def add_query_source(hub, span):
duration = span.timestamp - span.start_timestamp
threshold = client.options.get("db_query_source_threshold_ms", 0)
- slow_query = duration_in_milliseconds(duration) > threshold
+ slow_query = duration / timedelta(milliseconds=1) > threshold
if not slow_query:
return
@@ -201,8 +197,6 @@ def add_query_source(hub, span):
while frame is not None:
try:
abs_path = frame.f_code.co_filename
- if abs_path and PY2:
- abs_path = os.path.abspath(abs_path)
except Exception:
abs_path = ""
@@ -256,7 +250,7 @@ def add_query_source(hub, span):
except Exception:
filepath = None
if filepath is not None:
- if namespace is not None and not PY2:
+ if namespace is not None:
in_app_path = filename_for_module(namespace, filepath)
elif project_root is not None and filepath.startswith(project_root):
in_app_path = filepath.replace(project_root, "").lstrip(os.sep)
@@ -325,7 +319,110 @@ def _format_sql(cursor, sql):
return real_sql or to_string(sql)
-class Baggage(object):
+class PropagationContext:
+ """
+ The PropagationContext represents the data of a trace in Sentry.
+ """
+
+ __slots__ = (
+ "_trace_id",
+ "_span_id",
+ "parent_span_id",
+ "parent_sampled",
+ "dynamic_sampling_context",
+ )
+
+ def __init__(
+ self,
+ trace_id=None, # type: Optional[str]
+ span_id=None, # type: Optional[str]
+ parent_span_id=None, # type: Optional[str]
+ parent_sampled=None, # type: Optional[bool]
+ dynamic_sampling_context=None, # type: Optional[Dict[str, str]]
+ ):
+ # type: (...) -> None
+ self._trace_id = trace_id
+ """The trace id of the Sentry trace."""
+
+ self._span_id = span_id
+ """The span id of the currently executing span."""
+
+ self.parent_span_id = parent_span_id
+ """The id of the parent span that started this span.
+ The parent span could also be a span in an upstream service."""
+
+ self.parent_sampled = parent_sampled
+ """Boolean indicator if the parent span was sampled.
+ Important when the parent span originated in an upstream service,
+ because we watn to sample the whole trace, or nothing from the trace."""
+
+ self.dynamic_sampling_context = dynamic_sampling_context
+ """Data that is used for dynamic sampling decisions."""
+
+ @classmethod
+ def from_incoming_data(cls, incoming_data):
+ # type: (Dict[str, Any]) -> Optional[PropagationContext]
+ propagation_context = None
+
+ normalized_data = normalize_incoming_data(incoming_data)
+ baggage_header = normalized_data.get(BAGGAGE_HEADER_NAME)
+ if baggage_header:
+ propagation_context = PropagationContext()
+ propagation_context.dynamic_sampling_context = Baggage.from_incoming_header(
+ baggage_header
+ ).dynamic_sampling_context()
+
+ sentry_trace_header = normalized_data.get(SENTRY_TRACE_HEADER_NAME)
+ if sentry_trace_header:
+ sentrytrace_data = extract_sentrytrace_data(sentry_trace_header)
+ if sentrytrace_data is not None:
+ if propagation_context is None:
+ propagation_context = PropagationContext()
+ propagation_context.update(sentrytrace_data)
+
+ return propagation_context
+
+ @property
+ def trace_id(self):
+ # type: () -> str
+ """The trace id of the Sentry trace."""
+ if not self._trace_id:
+ self._trace_id = uuid.uuid4().hex
+
+ return self._trace_id
+
+ @trace_id.setter
+ def trace_id(self, value):
+ # type: (str) -> None
+ self._trace_id = value
+
+ @property
+ def span_id(self):
+ # type: () -> str
+ """The span id of the currently executed span."""
+ if not self._span_id:
+ self._span_id = uuid.uuid4().hex[16:]
+
+ return self._span_id
+
+ @span_id.setter
+ def span_id(self, value):
+ # type: (str) -> None
+ self._span_id = value
+
+ def update(self, other_dict):
+ # type: (Dict[str, Any]) -> None
+ """
+ Updates the PropagationContext with data from the given dictionary.
+ """
+ for key, value in other_dict.items():
+ try:
+ setattr(self, key, value)
+ except AttributeError:
+ pass
+
+
+class Baggage:
"""
The W3C Baggage header information (see https://www.w3.org/TR/baggage/).
"""
@@ -388,8 +485,8 @@ def from_options(cls, scope):
options = client.options
propagation_context = scope._propagation_context
- if propagation_context is not None and "trace_id" in propagation_context:
- sentry_items["trace_id"] = propagation_context["trace_id"]
+ if propagation_context is not None:
+ sentry_items["trace_id"] = propagation_context.trace_id
if options.get("environment"):
sentry_items["environment"] = options["environment"]
@@ -403,10 +500,6 @@ def from_options(cls, scope):
if options.get("traces_sample_rate"):
sentry_items["sample_rate"] = options["traces_sample_rate"]
- user = (scope and scope._user) or {}
- if user.get("segment"):
- sentry_items["user_segment"] = user["segment"]
-
return Baggage(sentry_items, third_party_items, mutable)
@classmethod
@@ -416,15 +509,13 @@ def populate_from_transaction(cls, transaction):
Populate fresh baggage entry with sentry_items and make it immutable
if this is the head SDK which originates traces.
"""
- hub = transaction.hub or sentry_sdk.Hub.current
- client = hub.client
+ client = sentry_sdk.Scope.get_client()
sentry_items = {} # type: Dict[str, str]
- if not client:
+ if not client.is_active():
return Baggage(sentry_items)
options = client.options or {}
- user = (hub.scope and hub.scope._user) or {}
sentry_items["trace_id"] = transaction.trace_id
@@ -443,9 +534,6 @@ def populate_from_transaction(cls, transaction):
):
sentry_items["transaction"] = transaction.name
- if user.get("segment"):
- sentry_items["user_segment"] = user["segment"]
-
if transaction.sample_rate is not None:
sentry_items["sample_rate"] = str(transaction.sample_rate)
@@ -468,7 +556,7 @@ def dynamic_sampling_context(self):
# type: () -> Dict[str, str]
header = {}
- for key, item in iteritems(self.sentry_items):
+ for key, item in self.sentry_items.items():
header[key] = item
return header
@@ -477,7 +565,7 @@ def serialize(self, include_third_party=False):
# type: (bool) -> str
items = []
- for key, val in iteritems(self.sentry_items):
+ for key, val in self.sentry_items.items():
with capture_internal_exceptions():
item = Baggage.SENTRY_PREFIX + quote(key) + "=" + quote(str(val))
items.append(item)
@@ -488,15 +576,14 @@ def serialize(self, include_third_party=False):
return ",".join(items)
-def should_propagate_trace(hub, url):
- # type: (sentry_sdk.Hub, str) -> bool
+def should_propagate_trace(client, url):
+ # type: (sentry_sdk.client.BaseClient, str) -> bool
"""
- Returns True if url matches trace_propagation_targets configured in the given hub. Otherwise, returns False.
+ Returns True if url matches trace_propagation_targets configured in the given client. Otherwise, returns False.
"""
- client = hub.client # type: Any
trace_propagation_targets = client.options["trace_propagation_targets"]
- if is_sentry_url(hub, url):
+ if is_sentry_url(client, url):
return False
return match_regex_list(url, trace_propagation_targets, substring_matching=True)
@@ -518,5 +605,78 @@ def normalize_incoming_data(incoming_data):
return data
+def start_child_span_decorator(func):
+ # type: (Any) -> Any
+ """
+ Decorator to add child spans for functions.
+
+ See also ``sentry_sdk.tracing.trace()``.
+ """
+ # Asynchronous case
+ if inspect.iscoroutinefunction(func):
+
+ @wraps(func)
+ async def func_with_tracing(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+
+ span = get_current_span()
+
+ if span is None:
+ logger.warning(
+ "Can not create a child span for %s. "
+ "Please start a Sentry transaction before calling this function.",
+ qualname_from_function(func),
+ )
+ return await func(*args, **kwargs)
+
+ with span.start_child(
+ op=OP.FUNCTION,
+ description=qualname_from_function(func),
+ ):
+ return await func(*args, **kwargs)
+
+ # Synchronous case
+ else:
+
+ @wraps(func)
+ def func_with_tracing(*args, **kwargs):
+ # type: (*Any, **Any) -> Any
+
+ span = get_current_span()
+
+ if span is None:
+ logger.warning(
+ "Can not create a child span for %s. "
+ "Please start a Sentry transaction before calling this function.",
+ qualname_from_function(func),
+ )
+ return func(*args, **kwargs)
+
+ with span.start_child(
+ op=OP.FUNCTION,
+ description=qualname_from_function(func),
+ ):
+ return func(*args, **kwargs)
+
+ return func_with_tracing
+
+
+def get_current_span(scope=None):
+ # type: (Optional[sentry_sdk.Scope]) -> Optional[Span]
+ """
+ Returns the currently active span if there is one running, otherwise `None`
+ """
+ scope = scope or sentry_sdk.Scope.get_current_scope()
+ current_span = scope.span
+ return current_span
+
+
# Circular imports
-from sentry_sdk.tracing import LOW_QUALITY_TRANSACTION_SOURCES
+from sentry_sdk.tracing import (
+ BAGGAGE_HEADER_NAME,
+ LOW_QUALITY_TRANSACTION_SOURCES,
+ SENTRY_TRACE_HEADER_NAME,
+)
+
+if TYPE_CHECKING:
+ from sentry_sdk.tracing import Span
diff --git a/sentry_sdk/tracing_utils_py2.py b/sentry_sdk/tracing_utils_py2.py
deleted file mode 100644
index a251ab41be..0000000000
--- a/sentry_sdk/tracing_utils_py2.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from functools import wraps
-
-import sentry_sdk
-from sentry_sdk import get_current_span
-from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.consts import OP
-from sentry_sdk.utils import logger, qualname_from_function
-
-
-if TYPE_CHECKING:
- from typing import Any
-
-
-def start_child_span_decorator(func):
- # type: (Any) -> Any
- """
- Decorator to add child spans for functions.
-
- This is the Python 2 compatible version of the decorator.
- Duplicated code from ``sentry_sdk.tracing_utils_python3.start_child_span_decorator``.
-
- See also ``sentry_sdk.tracing.trace()``.
- """
-
- @wraps(func)
- def func_with_tracing(*args, **kwargs):
- # type: (*Any, **Any) -> Any
-
- span = get_current_span(sentry_sdk.Hub.current)
-
- if span is None:
- logger.warning(
- "Can not create a child span for %s. "
- "Please start a Sentry transaction before calling this function.",
- qualname_from_function(func),
- )
- return func(*args, **kwargs)
-
- with span.start_child(
- op=OP.FUNCTION,
- description=qualname_from_function(func),
- ):
- return func(*args, **kwargs)
-
- return func_with_tracing
diff --git a/sentry_sdk/tracing_utils_py3.py b/sentry_sdk/tracing_utils_py3.py
deleted file mode 100644
index d58d5f7cb4..0000000000
--- a/sentry_sdk/tracing_utils_py3.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import inspect
-from functools import wraps
-
-import sentry_sdk
-from sentry_sdk import get_current_span
-from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.consts import OP
-from sentry_sdk.utils import logger, qualname_from_function
-
-
-if TYPE_CHECKING:
- from typing import Any
-
-
-def start_child_span_decorator(func):
- # type: (Any) -> Any
- """
- Decorator to add child spans for functions.
-
- This is the Python 3 compatible version of the decorator.
- For Python 2 there is duplicated code here: ``sentry_sdk.tracing_utils_python2.start_child_span_decorator()``.
-
- See also ``sentry_sdk.tracing.trace()``.
- """
-
- # Asynchronous case
- if inspect.iscoroutinefunction(func):
-
- @wraps(func)
- async def func_with_tracing(*args, **kwargs):
- # type: (*Any, **Any) -> Any
-
- span = get_current_span(sentry_sdk.Hub.current)
-
- if span is None:
- logger.warning(
- "Can not create a child span for %s. "
- "Please start a Sentry transaction before calling this function.",
- qualname_from_function(func),
- )
- return await func(*args, **kwargs)
-
- with span.start_child(
- op=OP.FUNCTION,
- description=qualname_from_function(func),
- ):
- return await func(*args, **kwargs)
-
- # Synchronous case
- else:
-
- @wraps(func)
- def func_with_tracing(*args, **kwargs):
- # type: (*Any, **Any) -> Any
-
- span = get_current_span(sentry_sdk.Hub.current)
-
- if span is None:
- logger.warning(
- "Can not create a child span for %s. "
- "Please start a Sentry transaction before calling this function.",
- qualname_from_function(func),
- )
- return func(*args, **kwargs)
-
- with span.start_child(
- op=OP.FUNCTION,
- description=qualname_from_function(func),
- ):
- return func(*args, **kwargs)
-
- return func_with_tracing
diff --git a/sentry_sdk/transport.py b/sentry_sdk/transport.py
index d2fc734f7c..6a2aa76d68 100644
--- a/sentry_sdk/transport.py
+++ b/sentry_sdk/transport.py
@@ -1,23 +1,23 @@
-from __future__ import print_function
-
+from abc import ABC, abstractmethod
import io
import gzip
import socket
import time
-from datetime import timedelta
+import warnings
+from datetime import datetime, timedelta, timezone
from collections import defaultdict
+from urllib.request import getproxies
import urllib3
import certifi
-from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions, json_dumps
+from sentry_sdk.consts import EndpointType
+from sentry_sdk.utils import Dsn, logger, capture_internal_exceptions
from sentry_sdk.worker import BackgroundWorker
from sentry_sdk.envelope import Envelope, Item, PayloadRef
-from sentry_sdk._compat import datetime_utcnow
from sentry_sdk._types import TYPE_CHECKING
if TYPE_CHECKING:
- from datetime import datetime
from typing import Any
from typing import Callable
from typing import Dict
@@ -32,15 +32,10 @@
from urllib3.poolmanager import PoolManager
from urllib3.poolmanager import ProxyManager
- from sentry_sdk._types import Event, EndpointType
+ from sentry_sdk._types import Event
DataCategory = Optional[str]
-try:
- from urllib.request import getproxies
-except ImportError:
- from urllib import getproxies # type: ignore
-
KEEP_ALIVE_SOCKET_OPTIONS = []
for option in [
@@ -57,7 +52,7 @@
pass
-class Transport(object):
+class Transport(ABC):
"""Baseclass for all transports.
A transport is used to send an event to sentry.
@@ -80,11 +75,23 @@ def capture_event(
):
# type: (...) -> None
"""
+ DEPRECATED: Please use capture_envelope instead.
+
This gets invoked with the event dictionary when an event should
be sent to sentry.
"""
- raise NotImplementedError()
+ warnings.warn(
+ "capture_event is deprecated, please use capture_envelope instead!",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+ envelope = Envelope()
+ envelope.add_event(event)
+ self.capture_envelope(envelope)
+
+ @abstractmethod
def capture_envelope(
self, envelope # type: Envelope
):
@@ -93,11 +100,10 @@ def capture_envelope(
Send an envelope to Sentry.
Envelopes are a data container format that can hold any type of data
- submitted to Sentry. We use it for transactions and sessions, but
- regular "error" events should go through `capture_event` for backwards
- compat.
+ submitted to Sentry. We use it to send all event data (including errors,
+ transactions, crons checkins, etc.) to Sentry.
"""
- raise NotImplementedError()
+ pass
def flush(
self,
@@ -105,13 +111,23 @@ def flush(
callback=None, # type: Optional[Any]
):
# type: (...) -> None
- """Wait `timeout` seconds for the current events to be sent out."""
- pass
+ """
+ Wait `timeout` seconds for the current events to be sent out.
+
+ The default implementation is a no-op, since this method may only be relevant to some transports.
+ Subclasses should override this method if necessary.
+ """
+ return None
def kill(self):
# type: () -> None
- """Forcefully kills the transport."""
- pass
+ """
+ Forcefully kills the transport.
+
+ The default implementation is a no-op, since this method may only be relevant to some transports.
+ Subclasses should override this method if necessary.
+ """
+ return None
def record_lost_event(
self,
@@ -140,7 +156,7 @@ def __del__(self):
def _parse_rate_limits(header, now=None):
# type: (Any, Optional[datetime]) -> Iterable[Tuple[DataCategory, datetime]]
if now is None:
- now = datetime_utcnow()
+ now = datetime.now(timezone.utc)
for limit in header.split(","):
try:
@@ -243,7 +259,7 @@ def _update_rate_limits(self, response):
# sentries if a proxy in front wants to globally slow things down.
elif response.status == 429:
logger.warning("Rate-limited via 429")
- self._disabled_until[None] = datetime_utcnow() + timedelta(
+ self._disabled_until[None] = datetime.now(timezone.utc) + timedelta(
seconds=self._retry.get_retry_after(response) or 60
)
@@ -251,7 +267,7 @@ def _send_request(
self,
body, # type: bytes
headers, # type: Dict[str, str]
- endpoint_type="store", # type: EndpointType
+ endpoint_type=EndpointType.ENVELOPE, # type: EndpointType
envelope=None, # type: Optional[Envelope]
):
# type: (...) -> None
@@ -356,14 +372,15 @@ def _disabled(bucket):
bucket = "metric_bucket"
ts = self._disabled_until.get(bucket)
-
- return ts is not None and ts > datetime_utcnow()
+ return ts is not None and ts > datetime.now(timezone.utc)
return _disabled(category) or _disabled(None)
def _is_rate_limited(self):
# type: () -> bool
- return any(ts > datetime_utcnow() for ts in self._disabled_until.values())
+ return any(
+ ts > datetime.now(timezone.utc) for ts in self._disabled_until.values()
+ )
def _is_worker_full(self):
# type: () -> bool
@@ -373,46 +390,6 @@ def is_healthy(self):
# type: () -> bool
return not (self._is_worker_full() or self._is_rate_limited())
- def _send_event(
- self, event # type: Event
- ):
- # type: (...) -> None
-
- if self._check_disabled("error"):
- self.on_dropped_event("self_rate_limits")
- self.record_lost_event("ratelimit_backoff", data_category="error")
- return None
-
- body = io.BytesIO()
- if self._compresslevel == 0:
- body.write(json_dumps(event))
- else:
- with gzip.GzipFile(
- fileobj=body, mode="w", compresslevel=self._compresslevel
- ) as f:
- f.write(json_dumps(event))
-
- assert self.parsed_dsn is not None
- logger.debug(
- "Sending event, type:%s level:%s event_id:%s project:%s host:%s"
- % (
- event.get("type") or "null",
- event.get("level") or "null",
- event.get("event_id") or "null",
- self.parsed_dsn.project_id,
- self.parsed_dsn.host,
- )
- )
-
- headers = {
- "Content-Type": "application/json",
- }
- if self._compresslevel > 0:
- headers["Content-Encoding"] = "gzip"
-
- self._send_request(body.getvalue(), headers=headers)
- return None
-
def _send_envelope(
self, envelope # type: Envelope
):
@@ -470,7 +447,7 @@ def _send_envelope(
self._send_request(
body.getvalue(),
headers=headers,
- endpoint_type="envelope",
+ endpoint_type=EndpointType.ENVELOPE,
envelope=envelope,
)
return None
@@ -560,23 +537,6 @@ def _make_pool(
else:
return urllib3.PoolManager(**opts)
- def capture_event(
- self, event # type: Event
- ):
- # type: (...) -> None
- hub = self.hub_cls.current
-
- def send_event_wrapper():
- # type: () -> None
- with hub:
- with capture_internal_exceptions():
- self._send_event(event)
- self._flush_client_reports()
-
- if not self._worker.submit(send_event_wrapper):
- self.on_dropped_event("full_queue")
- self.record_lost_event("queue_overflow", data_category="error")
-
def capture_envelope(
self, envelope # type: Envelope
):
@@ -614,6 +574,11 @@ def kill(self):
class _FunctionTransport(Transport):
+ """
+ DEPRECATED: Users wishing to provide a custom transport should subclass
+ the Transport class, rather than providing a function.
+ """
+
def __init__(
self, func # type: Callable[[Event], None]
):
@@ -628,19 +593,33 @@ def capture_event(
self._func(event)
return None
+ def capture_envelope(self, envelope: Envelope) -> None:
+ # Since function transports expect to be called with an event, we need
+ # to iterate over the envelope and call the function for each event, via
+ # the deprecated capture_event method.
+ event = envelope.get_event()
+ if event is not None:
+ self.capture_event(event)
+
def make_transport(options):
# type: (Dict[str, Any]) -> Optional[Transport]
ref_transport = options["transport"]
- # If no transport is given, we use the http transport class
- if ref_transport is None:
- transport_cls = HttpTransport # type: Type[Transport]
- elif isinstance(ref_transport, Transport):
+ # By default, we use the http transport class
+ transport_cls = HttpTransport # type: Type[Transport]
+
+ if isinstance(ref_transport, Transport):
return ref_transport
elif isinstance(ref_transport, type) and issubclass(ref_transport, Transport):
transport_cls = ref_transport
elif callable(ref_transport):
+ warnings.warn(
+ "Function transports are deprecated and will be removed in a future release."
+ "Please provide a Transport instance or subclass, instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
return _FunctionTransport(ref_transport)
# if a transport class is given only instantiate it if the dsn is not
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py
index efacd6161b..a89a63bf5d 100644
--- a/sentry_sdk/utils.py
+++ b/sentry_sdk/utils.py
@@ -12,30 +12,11 @@
import time
from collections import namedtuple
from copy import copy
+from datetime import datetime
from decimal import Decimal
+from functools import partial, partialmethod, wraps
from numbers import Real
-
-try:
- # Python 3
- from urllib.parse import parse_qs
- from urllib.parse import unquote
- from urllib.parse import urlencode
- from urllib.parse import urlsplit
- from urllib.parse import urlunsplit
-except ImportError:
- # Python 2
- from cgi import parse_qs # type: ignore
- from urllib import unquote # type: ignore
- from urllib import urlencode # type: ignore
- from urlparse import urlsplit # type: ignore
- from urlparse import urlunsplit # type: ignore
-
-try:
- # Python 3
- FileNotFoundError
-except NameError:
- # Python 2
- FileNotFoundError = IOError
+from urllib.parse import parse_qs, unquote, urlencode, urlsplit, urlunsplit
try:
# Python 3.11
@@ -44,38 +25,40 @@
# Python 3.10 and below
BaseExceptionGroup = None # type: ignore
-from datetime import datetime
-from functools import partial
-
-try:
- from functools import partialmethod
-
- _PARTIALMETHOD_AVAILABLE = True
-except ImportError:
- _PARTIALMETHOD_AVAILABLE = False
-
import sentry_sdk
-from sentry_sdk._compat import PY2, PY33, PY37, implements_str, text_type, urlparse
+import sentry_sdk.hub
+from sentry_sdk._compat import PY37
from sentry_sdk._types import TYPE_CHECKING
-from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH
+from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, EndpointType
if TYPE_CHECKING:
+ from collections.abc import Awaitable
+
from types import FrameType, TracebackType
from typing import (
Any,
Callable,
+ cast,
ContextManager,
Dict,
Iterator,
List,
+ NoReturn,
Optional,
+ overload,
+ ParamSpec,
Set,
Tuple,
Type,
+ TypeVar,
Union,
)
- from sentry_sdk._types import EndpointType, Event, ExcInfo
+ import sentry_sdk.integrations
+ from sentry_sdk._types import Event, ExcInfo
+
+ P = ParamSpec("P")
+ R = TypeVar("R")
epoch = datetime(1970, 1, 1)
@@ -188,7 +171,7 @@ def get_sdk_name(installed_integrations):
return "sentry.python"
-class CaptureInternalException(object):
+class CaptureInternalException:
__slots__ = ()
def __enter__(self):
@@ -244,8 +227,7 @@ class BadDsn(ValueError):
"""Raised on invalid DSNs."""
-@implements_str
-class Dsn(object):
+class Dsn:
"""Represents a DSN."""
def __init__(self, value):
@@ -253,7 +235,7 @@ def __init__(self, value):
if isinstance(value, Dsn):
self.__dict__ = dict(value.__dict__)
return
- parts = urlparse.urlsplit(text_type(value))
+ parts = urlsplit(str(value))
if parts.scheme not in ("http", "https"):
raise BadDsn("Unsupported scheme %r" % parts.scheme)
@@ -278,7 +260,7 @@ def __init__(self, value):
path = parts.path.rsplit("/", 1)
try:
- self.project_id = text_type(int(path.pop()))
+ self.project_id = str(int(path.pop()))
except (ValueError, TypeError):
raise BadDsn("Invalid project in DSN (%r)" % (parts.path or "")[1:])
@@ -318,7 +300,7 @@ def __str__(self):
)
-class Auth(object):
+class Auth:
"""Helper object that represents the auth info."""
def __init__(
@@ -342,17 +324,8 @@ def __init__(
self.version = version
self.client = client
- @property
- def store_api_url(self):
- # type: () -> str
- """Returns the API url for storing events.
-
- Deprecated: use get_api_url instead.
- """
- return self.get_api_url(type="store")
-
def get_api_url(
- self, type="store" # type: EndpointType
+ self, type=EndpointType.ENVELOPE # type: EndpointType
):
# type: (...) -> str
"""Returns the API url for storing events."""
@@ -361,7 +334,7 @@ def get_api_url(
self.host,
self.path,
self.project_id,
- type,
+ type.value,
)
def to_header(self):
@@ -375,7 +348,7 @@ def to_header(self):
return "Sentry " + ", ".join("%s=%s" % (key, value) for key, value in rv)
-class AnnotatedValue(object):
+class AnnotatedValue:
"""
Meta information for a data field in the event payload.
This is to tell Relay that we have tampered with the fields value.
@@ -569,46 +542,17 @@ def get_source_context(
def safe_str(value):
# type: (Any) -> str
try:
- return text_type(value)
+ return str(value)
except Exception:
return safe_repr(value)
-if PY2:
-
- def safe_repr(value):
- # type: (Any) -> str
- try:
- rv = repr(value).decode("utf-8", "replace")
-
- # At this point `rv` contains a bunch of literal escape codes, like
- # this (exaggerated example):
- #
- # u"\\x2f"
- #
- # But we want to show this string as:
- #
- # u"/"
- try:
- # unicode-escape does this job, but can only decode latin1. So we
- # attempt to encode in latin1.
- return rv.encode("latin1").decode("unicode-escape")
- except Exception:
- # Since usually strings aren't latin1 this can break. In those
- # cases we just give up.
- return rv
- except Exception:
- # If e.g. the call to `repr` already fails
- return ""
-
-else:
-
- def safe_repr(value):
- # type: (Any) -> str
- try:
- return repr(value)
- except Exception:
- return ""
+def safe_repr(value):
+ # type: (Any) -> str
+ try:
+ return repr(value)
+ except Exception:
+ return ""
def filename_for_module(module, abs_path):
@@ -976,7 +920,7 @@ def exceptions_from_error_tuple(
def to_string(value):
# type: (str) -> str
try:
- return text_type(value)
+ return str(value)
except UnicodeDecodeError:
return repr(value)[1:-1]
@@ -1138,28 +1082,13 @@ def _truncate_by_bytes(string, max_bytes):
"""
Truncate a UTF-8-encodable string to the last full codepoint so that it fits in max_bytes.
"""
- # This function technically supports bytes, but only for Python 2 compat.
- # XXX remove support for bytes when we drop Python 2
- if isinstance(string, bytes):
- truncated = string[: max_bytes - 3]
- else:
- truncated = string.encode("utf-8")[: max_bytes - 3].decode(
- "utf-8", errors="ignore"
- )
+ truncated = string.encode("utf-8")[: max_bytes - 3].decode("utf-8", errors="ignore")
return truncated + "..."
def _get_size_in_bytes(value):
# type: (str) -> Optional[int]
- # This function technically supports bytes, but only for Python 2 compat.
- # XXX remove support for bytes when we drop Python 2
- if not isinstance(value, (bytes, text_type)):
- return None
-
- if isinstance(value, bytes):
- return len(value)
-
try:
return len(value.encode("utf-8"))
except (UnicodeEncodeError, UnicodeDecodeError):
@@ -1175,9 +1104,7 @@ def strip_string(value, max_length=None):
max_length = DEFAULT_MAX_VALUE_LENGTH
byte_size = _get_size_in_bytes(value)
- text_size = None
- if isinstance(value, text_type):
- text_size = len(value)
+ text_size = len(value)
if byte_size is not None and byte_size > max_length:
# truncate to max_length bytes, preserving code points
@@ -1303,7 +1230,7 @@ def _is_contextvars_broken():
def _make_threadlocal_contextvars(local):
# type: (type) -> type
- class ContextVar(object):
+ class ContextVar:
# Super-limited impl of ContextVar
def __init__(self, name, default=None):
@@ -1328,27 +1255,14 @@ def set(self, value):
def reset(self, token):
# type: (Any) -> None
self._local.value = getattr(self._original_local, token)
- del self._original_local[token]
+ # delete the original value (this way it works in Python 3.6+)
+ del self._original_local.__dict__[token]
return ContextVar
-def _make_noop_copy_context():
- # type: () -> Callable[[], Any]
- class NoOpContext:
- def run(self, func, *args, **kwargs):
- # type: (Callable[..., Any], *Any, **Any) -> Any
- return func(*args, **kwargs)
-
- def copy_context():
- # type: () -> NoOpContext
- return NoOpContext()
-
- return copy_context
-
-
def _get_contextvars():
- # type: () -> Tuple[bool, type, Callable[[], Any]]
+ # type: () -> Tuple[bool, type]
"""
Figure out the "right" contextvars installation to use. Returns a
`contextvars.ContextVar`-like class with a limited API.
@@ -1364,17 +1278,17 @@ def _get_contextvars():
# `aiocontextvars` is absolutely required for functional
# contextvars on Python 3.6.
try:
- from aiocontextvars import ContextVar, copy_context
+ from aiocontextvars import ContextVar
- return True, ContextVar, copy_context
+ return True, ContextVar
except ImportError:
pass
else:
# On Python 3.7 contextvars are functional.
try:
- from contextvars import ContextVar, copy_context
+ from contextvars import ContextVar
- return True, ContextVar, copy_context
+ return True, ContextVar
except ImportError:
pass
@@ -1382,10 +1296,10 @@ def _get_contextvars():
from threading import local
- return False, _make_threadlocal_contextvars(local), _make_noop_copy_context()
+ return False, _make_threadlocal_contextvars(local)
-HAS_REAL_CONTEXTVARS, ContextVar, copy_context = _get_contextvars()
+HAS_REAL_CONTEXTVARS, ContextVar = _get_contextvars()
CONTEXTVARS_ERROR_MESSAGE = """
@@ -1414,10 +1328,8 @@ def qualname_from_function(func):
prefix, suffix = "", ""
- if (
- _PARTIALMETHOD_AVAILABLE
- and hasattr(func, "_partialmethod")
- and isinstance(func._partialmethod, partialmethod)
+ if hasattr(func, "_partialmethod") and isinstance(
+ func._partialmethod, partialmethod
):
prefix, suffix = "partialmethod()"
func = func._partialmethod.func
@@ -1655,16 +1567,16 @@ def match_regex_list(item, regex_list=None, substring_matching=False):
return False
-def is_sentry_url(hub, url):
- # type: (sentry_sdk.Hub, str) -> bool
+def is_sentry_url(client, url):
+ # type: (sentry_sdk.client.BaseClient, str) -> bool
"""
Determines whether the given URL matches the Sentry DSN.
"""
return (
- hub.client is not None
- and hub.client.transport is not None
- and hub.client.transport.parsed_dsn is not None
- and hub.client.transport.parsed_dsn.netloc in url
+ client is not None
+ and client.transport is not None
+ and client.transport.parsed_dsn is not None
+ and client.transport.parsed_dsn.netloc in url
)
@@ -1720,36 +1632,161 @@ def package_version(package):
return parse_version(version)
-if PY37:
+def reraise(tp, value, tb=None):
+ # type: (Optional[Type[BaseException]], Optional[BaseException], Optional[Any]) -> NoReturn
+ assert value is not None
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
- def nanosecond_time():
- # type: () -> int
- return time.perf_counter_ns()
-elif PY33:
+def _no_op(*_a, **_k):
+ # type: (*Any, **Any) -> None
+ """No-op function for ensure_integration_enabled."""
+ pass
+
+
+async def _no_op_async(*_a, **_k):
+ # type: (*Any, **Any) -> None
+ """No-op function for ensure_integration_enabled_async."""
+ pass
+
+
+if TYPE_CHECKING:
+
+ @overload
+ def ensure_integration_enabled(
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ original_function, # type: Callable[P, R]
+ ):
+ # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
+ ...
+
+ @overload
+ def ensure_integration_enabled(
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ ):
+ # type: (...) -> Callable[[Callable[P, None]], Callable[P, None]]
+ ...
+
+
+def ensure_integration_enabled(
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ original_function=_no_op, # type: Union[Callable[P, R], Callable[P, None]]
+):
+ # type: (...) -> Callable[[Callable[P, R]], Callable[P, R]]
+ """
+ Ensures a given integration is enabled prior to calling a Sentry-patched function.
+
+ The function takes as its parameters the integration that must be enabled and the original
+ function that the SDK is patching. The function returns a function that takes the
+ decorated (Sentry-patched) function as its parameter, and returns a function that, when
+ called, checks whether the given integration is enabled. If the integration is enabled, the
+ function calls the decorated, Sentry-patched function. If the integration is not enabled,
+ the original function is called.
+
+ The function also takes care of preserving the original function's signature and docstring.
+
+ Example usage:
+
+ ```python
+ @ensure_integration_enabled(MyIntegration, my_function)
+ def patch_my_function():
+ with sentry_sdk.start_transaction(...):
+ return my_function()
+ ```
+ """
+ if TYPE_CHECKING:
+ # Type hint to ensure the default function has the right typing. The overloads
+ # ensure the default _no_op function is only used when R is None.
+ original_function = cast(Callable[P, R], original_function)
+
+ def patcher(sentry_patched_function):
+ # type: (Callable[P, R]) -> Callable[P, R]
+ def runner(*args: "P.args", **kwargs: "P.kwargs"):
+ # type: (...) -> R
+ if sentry_sdk.get_client().get_integration(integration) is None:
+ return original_function(*args, **kwargs)
+
+ return sentry_patched_function(*args, **kwargs)
+
+ if original_function is _no_op:
+ return wraps(sentry_patched_function)(runner)
+
+ return wraps(original_function)(runner)
+
+ return patcher
+
+
+if TYPE_CHECKING:
+
+ # mypy has some trouble with the overloads, hence the ignore[no-overload-impl]
+ @overload # type: ignore[no-overload-impl]
+ def ensure_integration_enabled_async(
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ original_function, # type: Callable[P, Awaitable[R]]
+ ):
+ # type: (...) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]
+ ...
+
+ @overload
+ def ensure_integration_enabled_async(
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ ):
+ # type: (...) -> Callable[[Callable[P, Awaitable[None]]], Callable[P, Awaitable[None]]]
+ ...
+
+
+# The ignore[no-redef] also needed because mypy is struggling with these overloads.
+def ensure_integration_enabled_async( # type: ignore[no-redef]
+ integration, # type: type[sentry_sdk.integrations.Integration]
+ original_function=_no_op_async, # type: Union[Callable[P, Awaitable[R]], Callable[P, Awaitable[None]]]
+):
+ # type: (...) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]
+ """
+ Version of `ensure_integration_enabled` for decorating async functions.
+
+ Please refer to the `ensure_integration_enabled` documentation for more information.
+ """
+
+ if TYPE_CHECKING:
+ # Type hint to ensure the default function has the right typing. The overloads
+ # ensure the default _no_op function is only used when R is None.
+ original_function = cast(Callable[P, Awaitable[R]], original_function)
+
+ def patcher(sentry_patched_function):
+ # type: (Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]
+ async def runner(*args: "P.args", **kwargs: "P.kwargs"):
+ # type: (...) -> R
+ if sentry_sdk.get_client().get_integration(integration) is None:
+ return await original_function(*args, **kwargs)
+
+ return await sentry_patched_function(*args, **kwargs)
+
+ if original_function is _no_op_async:
+ return wraps(sentry_patched_function)(runner)
+
+ return wraps(original_function)(runner)
+
+ return patcher
+
+
+if PY37:
def nanosecond_time():
# type: () -> int
- return int(time.perf_counter() * 1e9)
+ return time.perf_counter_ns()
else:
def nanosecond_time():
# type: () -> int
- return int(time.time() * 1e9)
-
-
-if PY2:
-
- def now():
- # type: () -> float
- return time.time()
+ return int(time.perf_counter() * 1e9)
-else:
- def now():
- # type: () -> float
- return time.perf_counter()
+def now():
+ # type: () -> float
+ return time.perf_counter()
try:
diff --git a/sentry_sdk/worker.py b/sentry_sdk/worker.py
index 27b2f2f69c..2e4c58f46a 100644
--- a/sentry_sdk/worker.py
+++ b/sentry_sdk/worker.py
@@ -17,7 +17,7 @@
_TERMINATOR = object()
-class BackgroundWorker(object):
+class BackgroundWorker:
def __init__(self, queue_size=DEFAULT_QUEUE_SIZE):
# type: (int) -> None
self._queue = Queue(queue_size) # type: Queue
@@ -62,7 +62,7 @@ def start(self):
with self._lock:
if not self.is_alive:
self._thread = threading.Thread(
- target=self._target, name="raven-sentry.BackgroundWorker"
+ target=self._target, name="sentry-sdk.BackgroundWorker"
)
self._thread.daemon = True
try:
diff --git a/setup.py b/setup.py
index 14da2fc74c..037a621ddf 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ def get_file_text(file_name):
setup(
name="sentry-sdk",
- version="1.45.0",
+ version="2.0.1",
author="Sentry Team and Contributors",
author_email="hello@sentry.io",
url="https://github.com/getsentry/sentry-python",
@@ -37,10 +37,9 @@ def get_file_text(file_name):
package_data={"sentry_sdk": ["py.typed"]},
zip_safe=False,
license="MIT",
+ python_requires=">=3.6",
install_requires=[
- 'urllib3>=1.25.7; python_version<="3.4"',
- 'urllib3>=1.26.9; python_version=="3.5"',
- 'urllib3>=1.26.11; python_version>="3.6"',
+ "urllib3>=1.26.11",
"certifi",
],
extras_require={
@@ -91,11 +90,7 @@ def get_file_text(file_name):
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
- "Programming Language :: Python :: 2",
- "Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.4",
- "Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
diff --git a/test-requirements.txt b/test-requirements.txt
index c9324e753b..15f150097d 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,15 +1,14 @@
-pip # always use newest pip
-mock ; python_version<'3.3'
+pip
pytest
-pytest-cov==2.8.1
-pytest-forked<=1.4.0
-pytest-localserver==0.5.1 # TODO(py3): 0.6.0 drops 2.7 support: https://github.com/pytest-dev/pytest-localserver/releases/tag/v0.6.0
-pytest-watch==4.2.0
-tox==3.7.0
-jsonschema==3.2.0
-pyrsistent==0.16.0 # TODO(py3): 0.17.0 requires python3, see https://github.com/tobgu/pyrsistent/issues/205
-executing<2.0.0 # TODO(py3): 2.0.0 requires python3
+pytest-cov
+pytest-forked
+pytest-localserver
+pytest-watch
+jsonschema
+pyrsistent
+executing
asttokens
responses
pysocks
ipdb
+setuptools
diff --git a/tests/conftest.py b/tests/conftest.py
index c87111cbf7..118408cfc3 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,6 +3,8 @@
import socket
from threading import Thread
from contextlib import contextmanager
+from http.server import BaseHTTPRequestHandler, HTTPServer
+from unittest import mock
import pytest
import jsonschema
@@ -17,29 +19,12 @@
except ImportError:
eventlet = None
-try:
- # Python 2
- import BaseHTTPServer
-
- HTTPServer = BaseHTTPServer.HTTPServer
- BaseHTTPRequestHandler = BaseHTTPServer.BaseHTTPRequestHandler
-except Exception:
- # Python 3
- from http.server import BaseHTTPRequestHandler, HTTPServer
-
-
-try:
- from unittest import mock
-except ImportError:
- import mock
-
import sentry_sdk
-from sentry_sdk._compat import iteritems, reraise, string_types, PY2
from sentry_sdk.envelope import Envelope
from sentry_sdk.integrations import _processed_integrations # noqa: F401
from sentry_sdk.profiler import teardown_profiler
from sentry_sdk.transport import Transport
-from sentry_sdk.utils import capture_internal_exceptions
+from sentry_sdk.utils import reraise
from tests import _warning_recorder, _warning_recorder_mgr
@@ -70,6 +55,19 @@ def benchmark():
del pytest_benchmark
+from sentry_sdk import scope
+
+
+@pytest.fixture(autouse=True)
+def clean_scopes():
+ """
+ Resets the scopes for every test to avoid leaking data between tests.
+ """
+ scope._global_scope = None
+ scope._isolation_scope.set(None)
+ scope._current_scope.set(None)
+
+
@pytest.fixture(autouse=True)
def internal_exceptions(request, monkeypatch):
errors = []
@@ -154,35 +152,6 @@ def _capture_internal_warnings():
raise AssertionError(warning)
-@pytest.fixture
-def monkeypatch_test_transport(monkeypatch, validate_event_schema):
- def check_event(event):
- def check_string_keys(map):
- for key, value in iteritems(map):
- assert isinstance(key, string_types)
- if isinstance(value, dict):
- check_string_keys(value)
-
- with capture_internal_exceptions():
- check_string_keys(event)
- validate_event_schema(event)
-
- def check_envelope(envelope):
- with capture_internal_exceptions():
- # There used to be a check here for errors are not sent in envelopes.
- # We changed the behaviour to send errors in envelopes when tracing is enabled.
- # This is checked in test_client.py::test_sending_events_with_tracing
- # and test_client.py::test_sending_events_with_no_tracing
- pass
-
- def inner(client):
- monkeypatch.setattr(
- client, "transport", TestTransport(check_event, check_envelope)
- )
-
- return inner
-
-
@pytest.fixture
def validate_event_schema(tmpdir):
def inner(event):
@@ -204,13 +173,12 @@ def reset_integrations():
@pytest.fixture
-def sentry_init(monkeypatch_test_transport, request):
+def sentry_init(request):
def inner(*a, **kw):
hub = sentry_sdk.Hub.current
+ kw.setdefault("transport", TestTransport())
client = sentry_sdk.Client(*a, **kw)
hub.bind_client(client)
- if "transport" not in kw:
- monkeypatch_test_transport(sentry_sdk.Hub.current.client)
if request.node.get_closest_marker("forked"):
# Do not run isolation if the test is already running in
@@ -223,11 +191,12 @@ def inner(*a, **kw):
class TestTransport(Transport):
- def __init__(self, capture_event_callback, capture_envelope_callback):
+ def __init__(self):
Transport.__init__(self)
- self.capture_event = capture_event_callback
- self.capture_envelope = capture_envelope_callback
- self._queue = None
+
+ def capture_envelope(self, _: Envelope) -> None:
+ """No-op capture_envelope for tests"""
+ pass
@pytest.fixture
@@ -235,21 +204,16 @@ def capture_events(monkeypatch):
def inner():
events = []
test_client = sentry_sdk.Hub.current.client
- old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
- def append_event(event):
- events.append(event)
- return old_capture_event(event)
-
- def append_envelope(envelope):
+ def append_event(envelope):
for item in envelope:
if item.headers.get("type") in ("event", "transaction"):
- test_client.transport.capture_event(item.payload.json)
+ events.append(item.payload.json)
return old_capture_envelope(envelope)
- monkeypatch.setattr(test_client.transport, "capture_event", append_event)
- monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
+ monkeypatch.setattr(test_client.transport, "capture_envelope", append_event)
+
return events
return inner
@@ -260,21 +224,14 @@ def capture_envelopes(monkeypatch):
def inner():
envelopes = []
test_client = sentry_sdk.Hub.current.client
- old_capture_event = test_client.transport.capture_event
old_capture_envelope = test_client.transport.capture_envelope
- def append_event(event):
- envelope = Envelope()
- envelope.add_event(event)
- envelopes.append(envelope)
- return old_capture_event(event)
-
def append_envelope(envelope):
envelopes.append(envelope)
return old_capture_envelope(envelope)
- monkeypatch.setattr(test_client.transport, "capture_event", append_event)
monkeypatch.setattr(test_client.transport, "capture_envelope", append_envelope)
+
return envelopes
return inner
@@ -310,17 +267,19 @@ def inner():
test_client = sentry_sdk.Hub.current.client
- old_capture_event = test_client.transport.capture_event
+ old_capture_envelope = test_client.transport.capture_envelope
- def append(event):
- events_w.write(json.dumps(event).encode("utf-8"))
- events_w.write(b"\n")
- return old_capture_event(event)
+ def append(envelope):
+ event = envelope.get_event() or envelope.get_transaction_event()
+ if event is not None:
+ events_w.write(json.dumps(event).encode("utf-8"))
+ events_w.write(b"\n")
+ return old_capture_envelope(envelope)
def flush(timeout=None, callback=None):
events_w.write(b"flush\n")
- monkeypatch.setattr(test_client.transport, "capture_event", append)
+ monkeypatch.setattr(test_client.transport, "capture_envelope", append)
monkeypatch.setattr(test_client, "flush", flush)
return EventStreamReader(events_r, events_w)
@@ -328,7 +287,7 @@ def flush(timeout=None, callback=None):
return inner
-class EventStreamReader(object):
+class EventStreamReader:
def __init__(self, read_file, write_file):
self.read_file = read_file
self.write_file = write_file
@@ -420,16 +379,10 @@ def string_containing_matcher():
"""
- class StringContaining(object):
+ class StringContaining:
def __init__(self, substring):
self.substring = substring
-
- try:
- # the `unicode` type only exists in python 2, so if this blows up,
- # we must be in py3 and have the `bytes` type
- self.valid_types = (str, unicode)
- except NameError:
- self.valid_types = (str, bytes)
+ self.valid_types = (str, bytes)
def __eq__(self, test_string):
if not isinstance(test_string, self.valid_types):
@@ -503,7 +456,7 @@ def dictionary_containing_matcher():
>>> f.assert_any_call(DictionaryContaining({"dogs": "yes"})) # no AssertionError
"""
- class DictionaryContaining(object):
+ class DictionaryContaining:
def __init__(self, subdict):
self.subdict = subdict
@@ -543,7 +496,7 @@ def object_described_by_matcher():
Used like this:
- >>> class Dog(object):
+ >>> class Dog:
... pass
...
>>> maisey = Dog()
@@ -555,7 +508,7 @@ def object_described_by_matcher():
>>> f.assert_any_call(ObjectDescribedBy(attrs={"name": "Maisey"})) # no AssertionError
"""
- class ObjectDescribedBy(object):
+ class ObjectDescribedBy:
def __init__(self, type=None, attrs=None):
self.type = type
self.attrs = attrs
@@ -645,11 +598,8 @@ def patch_start_tracing_child(fake_transaction_is_none=False):
fake_transaction = None
fake_start_child = None
- version = "2" if PY2 else "3"
-
with mock.patch(
- "sentry_sdk.tracing_utils_py%s.get_current_span" % version,
- return_value=fake_transaction,
+ "sentry_sdk.tracing_utils.get_current_span", return_value=fake_transaction
):
yield fake_start_child
diff --git a/tests/crons/__init__.py b/tests/crons/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/tests/crons/test_crons_async_py3.py b/tests/crons/test_crons_async_py3.py
deleted file mode 100644
index 53ec96d713..0000000000
--- a/tests/crons/test_crons_async_py3.py
+++ /dev/null
@@ -1,144 +0,0 @@
-import pytest
-
-import sentry_sdk
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-
-@sentry_sdk.monitor(monitor_slug="abc123")
-async def _hello_world(name):
- return "Hello, {}".format(name)
-
-
-@sentry_sdk.monitor(monitor_slug="def456")
-async def _break_world(name):
- 1 / 0
- return "Hello, {}".format(name)
-
-
-async def my_coroutine():
- return
-
-
-async def _hello_world_contextmanager(name):
- with sentry_sdk.monitor(monitor_slug="abc123"):
- await my_coroutine()
- return "Hello, {}".format(name)
-
-
-async def _break_world_contextmanager(name):
- with sentry_sdk.monitor(monitor_slug="def456"):
- await my_coroutine()
- 1 / 0
- return "Hello, {}".format(name)
-
-
-@pytest.mark.asyncio
-async def test_decorator(sentry_init):
- sentry_init()
-
- with mock.patch(
- "sentry_sdk.crons.decorator.capture_checkin"
- ) as fake_capture_checkin:
- result = await _hello_world("Grace")
- assert result == "Hello, Grace"
-
- # Check for initial checkin
- fake_capture_checkin.assert_has_calls(
- [
- mock.call(
- monitor_slug="abc123", status="in_progress", monitor_config=None
- ),
- ]
- )
-
- # Check for final checkin
- assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
- assert fake_capture_checkin.call_args[1]["status"] == "ok"
- assert fake_capture_checkin.call_args[1]["duration"]
- assert fake_capture_checkin.call_args[1]["check_in_id"]
-
-
-@pytest.mark.asyncio
-async def test_decorator_error(sentry_init):
- sentry_init()
-
- with mock.patch(
- "sentry_sdk.crons.decorator.capture_checkin"
- ) as fake_capture_checkin:
- with pytest.raises(ZeroDivisionError):
- result = await _break_world("Grace")
-
- assert "result" not in locals()
-
- # Check for initial checkin
- fake_capture_checkin.assert_has_calls(
- [
- mock.call(
- monitor_slug="def456", status="in_progress", monitor_config=None
- ),
- ]
- )
-
- # Check for final checkin
- assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
- assert fake_capture_checkin.call_args[1]["status"] == "error"
- assert fake_capture_checkin.call_args[1]["duration"]
- assert fake_capture_checkin.call_args[1]["check_in_id"]
-
-
-@pytest.mark.asyncio
-async def test_contextmanager(sentry_init):
- sentry_init()
-
- with mock.patch(
- "sentry_sdk.crons.decorator.capture_checkin"
- ) as fake_capture_checkin:
- result = await _hello_world_contextmanager("Grace")
- assert result == "Hello, Grace"
-
- # Check for initial checkin
- fake_capture_checkin.assert_has_calls(
- [
- mock.call(
- monitor_slug="abc123", status="in_progress", monitor_config=None
- ),
- ]
- )
-
- # Check for final checkin
- assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
- assert fake_capture_checkin.call_args[1]["status"] == "ok"
- assert fake_capture_checkin.call_args[1]["duration"]
- assert fake_capture_checkin.call_args[1]["check_in_id"]
-
-
-@pytest.mark.asyncio
-async def test_contextmanager_error(sentry_init):
- sentry_init()
-
- with mock.patch(
- "sentry_sdk.crons.decorator.capture_checkin"
- ) as fake_capture_checkin:
- with pytest.raises(ZeroDivisionError):
- result = await _break_world_contextmanager("Grace")
-
- assert "result" not in locals()
-
- # Check for initial checkin
- fake_capture_checkin.assert_has_calls(
- [
- mock.call(
- monitor_slug="def456", status="in_progress", monitor_config=None
- ),
- ]
- )
-
- # Check for final checkin
- assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
- assert fake_capture_checkin.call_args[1]["status"] == "error"
- assert fake_capture_checkin.call_args[1]["duration"]
- assert fake_capture_checkin.call_args[1]["check_in_id"]
diff --git a/tests/integrations/aiohttp/test_aiohttp.py b/tests/integrations/aiohttp/test_aiohttp.py
index 90ca466175..a07fe010fa 100644
--- a/tests/integrations/aiohttp/test_aiohttp.py
+++ b/tests/integrations/aiohttp/test_aiohttp.py
@@ -1,6 +1,7 @@
import asyncio
import json
from contextlib import suppress
+from unittest import mock
import pytest
from aiohttp import web
@@ -11,11 +12,6 @@
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from tests.conftest import ApproxDict
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
@pytest.mark.asyncio
async def test_basic(sentry_init, aiohttp_client, capture_events):
diff --git a/tests/integrations/arq/test_arq.py b/tests/integrations/arq/test_arq.py
index 4c4bc95163..1f597b5fec 100644
--- a/tests/integrations/arq/test_arq.py
+++ b/tests/integrations/arq/test_arq.py
@@ -1,7 +1,7 @@
import asyncio
import pytest
-from sentry_sdk import start_transaction, Hub
+from sentry_sdk import get_client, start_transaction
from sentry_sdk.integrations.arq import ArqIntegration
import arq.worker
@@ -60,7 +60,6 @@ def inner(
integrations=[ArqIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
)
server = FakeRedis()
@@ -245,7 +244,7 @@ async def dummy_job(_ctx):
pool, worker = init_arq([dummy_job])
# remove the integration to trigger the edge case
- Hub.current.client.integrations.pop("arq")
+ get_client().integrations.pop("arq")
job = await pool.enqueue_job("dummy_job")
diff --git a/tests/integrations/asgi/test_asgi.py b/tests/integrations/asgi/test_asgi.py
index d60991e99e..d5368ddfe1 100644
--- a/tests/integrations/asgi/test_asgi.py
+++ b/tests/integrations/asgi/test_asgi.py
@@ -1,5 +1,3 @@
-import sys
-
from collections import Counter
import pytest
@@ -11,11 +9,6 @@
from async_asgi_testclient import TestClient
-minimum_python_36 = pytest.mark.skipif(
- sys.version_info < (3, 6), reason="ASGI is only supported in Python >= 3.6"
-)
-
-
@pytest.fixture
def asgi3_app():
async def app(scope, receive, send):
@@ -133,7 +126,6 @@ async def app(scope, receive, send):
return app
-@minimum_python_36
def test_invalid_transaction_style(asgi3_app):
with pytest.raises(ValueError) as exp:
SentryAsgiMiddleware(asgi3_app, transaction_style="URL")
@@ -144,7 +136,6 @@ def test_invalid_transaction_style(asgi3_app):
)
-@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction(
sentry_init,
@@ -176,7 +167,6 @@ async def test_capture_transaction(
}
-@minimum_python_36
@pytest.mark.asyncio
async def test_capture_transaction_with_error(
sentry_init,
@@ -214,7 +204,6 @@ async def test_capture_transaction_with_error(
assert transaction_event["request"] == error_event["request"]
-@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_enabled(
sentry_init,
@@ -247,7 +236,6 @@ async def test_has_trace_if_performance_enabled(
)
-@minimum_python_36
@pytest.mark.asyncio
async def test_has_trace_if_performance_disabled(
sentry_init,
@@ -271,7 +259,6 @@ async def test_has_trace_if_performance_disabled(
assert "trace_id" in error_event["contexts"]["trace"]
-@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_enabled(
sentry_init,
@@ -305,7 +292,6 @@ async def test_trace_from_headers_if_performance_enabled(
assert transaction_event["contexts"]["trace"]["trace_id"] == trace_id
-@minimum_python_36
@pytest.mark.asyncio
async def test_trace_from_headers_if_performance_disabled(
sentry_init,
@@ -334,10 +320,9 @@ async def test_trace_from_headers_if_performance_disabled(
assert error_event["contexts"]["trace"]["trace_id"] == trace_id
-@minimum_python_36
@pytest.mark.asyncio
async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
- sentry_init(debug=True, send_default_pii=True)
+ sentry_init(send_default_pii=True)
events = capture_events()
@@ -367,7 +352,6 @@ async def test_websocket(sentry_init, asgi3_ws_app, capture_events, request):
assert exc["value"] == "Oh no"
-@minimum_python_36
@pytest.mark.asyncio
async def test_auto_session_tracking_with_aggregates(
sentry_init, asgi3_app, capture_envelopes
@@ -406,7 +390,6 @@ async def test_auto_session_tracking_with_aggregates(
assert len(session_aggregates) == 1
-@minimum_python_36
@pytest.mark.parametrize(
"url,transaction_style,expected_transaction,expected_source",
[
@@ -470,7 +453,6 @@ async def __call__():
pass
-@minimum_python_36
def test_looks_like_asgi3(asgi3_app):
# branch: inspect.isclass(app)
assert _looks_like_asgi3(MockAsgi3App)
@@ -487,7 +469,6 @@ def test_looks_like_asgi3(asgi3_app):
assert not _looks_like_asgi3(asgi2)
-@minimum_python_36
def test_get_ip_x_forwarded_for():
headers = [
(b"x-forwarded-for", b"8.8.8.8"),
@@ -525,7 +506,6 @@ def test_get_ip_x_forwarded_for():
assert ip == "5.5.5.5"
-@minimum_python_36
def test_get_ip_x_real_ip():
headers = [
(b"x-real-ip", b"10.10.10.10"),
@@ -550,7 +530,6 @@ def test_get_ip_x_real_ip():
assert ip == "8.8.8.8"
-@minimum_python_36
def test_get_ip():
# if now headers are provided the ip is taken from the client.
headers = []
@@ -584,7 +563,6 @@ def test_get_ip():
assert ip == "10.10.10.10"
-@minimum_python_36
def test_get_headers():
headers = [
(b"x-real-ip", b"10.10.10.10"),
@@ -602,7 +580,6 @@ def test_get_headers():
}
-@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
"request_url,transaction_style,expected_transaction_name,expected_transaction_source",
@@ -635,7 +612,6 @@ async def test_transaction_name(
"""
sentry_init(
traces_sample_rate=1.0,
- debug=True,
)
envelopes = capture_envelopes()
@@ -654,7 +630,6 @@ async def test_transaction_name(
)
-@minimum_python_36
@pytest.mark.asyncio
@pytest.mark.parametrize(
"request_url, transaction_style,expected_transaction_name,expected_transaction_source",
@@ -698,7 +673,6 @@ def dummy_traces_sampler(sampling_context):
sentry_init(
traces_sampler=dummy_traces_sampler,
traces_sample_rate=1.0,
- debug=True,
)
app = SentryAsgiMiddleware(asgi3_app, transaction_style=transaction_style)
diff --git a/tests/integrations/asyncio/test_asyncio_py3.py b/tests/integrations/asyncio/test_asyncio.py
similarity index 98%
rename from tests/integrations/asyncio/test_asyncio_py3.py
rename to tests/integrations/asyncio/test_asyncio.py
index c563f37b7d..0d7addad44 100644
--- a/tests/integrations/asyncio/test_asyncio_py3.py
+++ b/tests/integrations/asyncio/test_asyncio.py
@@ -1,6 +1,7 @@
import asyncio
import inspect
import sys
+from unittest.mock import MagicMock, patch
import pytest
@@ -8,11 +9,6 @@
from sentry_sdk.consts import OP
from sentry_sdk.integrations.asyncio import AsyncioIntegration, patch_asyncio
-try:
- from unittest.mock import MagicMock, patch
-except ImportError:
- from mock import MagicMock, patch
-
try:
from contextvars import Context, ContextVar
except ImportError:
@@ -71,7 +67,6 @@ async def test_create_task(
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
integrations=[
AsyncioIntegration(),
],
@@ -115,7 +110,6 @@ async def test_gather(
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
integrations=[
AsyncioIntegration(),
],
@@ -159,7 +153,6 @@ async def test_exception(
sentry_init(
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
integrations=[
AsyncioIntegration(),
],
@@ -250,7 +243,6 @@ def test_patch_asyncio(mock_get_running_loop):
@minimum_python_37
-@pytest.mark.forked
@patch("asyncio.get_running_loop")
@patch("sentry_sdk.integrations.asyncio.Task")
def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop): # noqa: N803
@@ -280,7 +272,6 @@ def test_sentry_task_factory_no_factory(MockTask, mock_get_running_loop): # noq
@minimum_python_37
-@pytest.mark.forked
@patch("asyncio.get_running_loop")
def test_sentry_task_factory_with_factory(mock_get_running_loop):
mock_loop = mock_get_running_loop.return_value
diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py
index 611d8ea9d9..9140216996 100644
--- a/tests/integrations/asyncpg/test_asyncpg.py
+++ b/tests/integrations/asyncpg/test_asyncpg.py
@@ -12,35 +12,27 @@
import os
-PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")
-PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "foo")
-PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "bar")
PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost")
PG_PORT = 5432
+PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres")
+PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry")
+PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres")
-
-from sentry_sdk._compat import PY2
import datetime
+from contextlib import contextmanager
+from unittest import mock
import asyncpg
import pytest
-
import pytest_asyncio
-
from asyncpg import connect, Connection
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.integrations.asyncpg import AsyncPGIntegration
from sentry_sdk.consts import SPANDATA
from sentry_sdk.tracing_utils import record_sql_queries
-from sentry_sdk._compat import contextmanager
from tests.conftest import ApproxDict
-try:
- from unittest import mock
-except ImportError:
- import mock
-
PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format(
PG_USER, PG_PASSWORD, PG_HOST, PG_NAME
@@ -636,9 +628,8 @@ async def test_query_source_with_module_in_search_path(sentry_init, capture_even
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
- if not PY2:
- assert data.get(SPANDATA.CODE_NAMESPACE) == "asyncpg_helpers.helpers"
- assert data.get(SPANDATA.CODE_FILEPATH) == "asyncpg_helpers/helpers.py"
+ assert data.get(SPANDATA.CODE_NAMESPACE) == "asyncpg_helpers.helpers"
+ assert data.get(SPANDATA.CODE_FILEPATH) == "asyncpg_helpers/helpers.py"
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
diff --git a/tests/integrations/aws_lambda/test_aws.py b/tests/integrations/aws_lambda/test_aws.py
index 5f2dba132d..cca49f2a35 100644
--- a/tests/integrations/aws_lambda/test_aws.py
+++ b/tests/integrations/aws_lambda/test_aws.py
@@ -43,7 +43,7 @@
import json
import time
-from sentry_sdk.transport import HttpTransport
+from sentry_sdk.transport import Transport
def truncate_data(data):
# AWS Lambda truncates the log output to 4kb, which is small enough to miss
@@ -114,14 +114,10 @@ def envelope_processor(envelope):
return truncate_data(item_json)
-class TestTransport(HttpTransport):
- def _send_event(self, event):
- event = event_processor(event)
- print("\\nEVENT: {}\\n".format(json.dumps(event)))
-
- def _send_envelope(self, envelope):
- envelope = envelope_processor(envelope)
- print("\\nENVELOPE: {}\\n".format(json.dumps(envelope)))
+class TestTransport(Transport):
+ def capture_envelope(self, envelope):
+ envelope_items = envelope_processor(envelope)
+ print("\\nENVELOPE: {}\\n".format(json.dumps(envelope_items)))
def init_sdk(timeout_warning=False, **extra_init_args):
sentry_sdk.init(
@@ -183,27 +179,23 @@ def inner(
response["Payload"] = json.loads(response["Payload"].read().decode("utf-8"))
del response["ResponseMetadata"]
- events = []
- envelopes = []
+ envelope_items = []
for line in response["LogResult"]:
print("AWS:", line)
- if line.startswith(b"EVENT: "):
- line = line[len(b"EVENT: ") :]
- events.append(json.loads(line.decode("utf-8")))
- elif line.startswith(b"ENVELOPE: "):
+ if line.startswith(b"ENVELOPE: "):
line = line[len(b"ENVELOPE: ") :]
- envelopes.append(json.loads(line.decode("utf-8")))
+ envelope_items.append(json.loads(line.decode("utf-8")))
else:
continue
- return envelopes, events, response
+ return envelope_items, response
return inner
def test_basic(run_lambda_function):
- _, events, response = run_lambda_function(
+ envelope_items, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -218,7 +210,7 @@ def test_handler(event, context):
assert response["FunctionError"] == "Unhandled"
- (event,) = events
+ (event,) = envelope_items
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
@@ -254,7 +246,7 @@ def test_initialization_order(run_lambda_function):
as seen by AWS already runs. At this point at least draining the queue
should work."""
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -266,7 +258,7 @@ def test_handler(event, context):
b'{"foo": "bar"}',
)
- (event,) = events
+ (event,) = envelope_items
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
@@ -275,7 +267,7 @@ def test_handler(event, context):
def test_request_data(run_lambda_function):
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -312,7 +304,7 @@ def test_handler(event, context):
""",
)
- (event,) = events
+ (event,) = envelope_items
assert event["request"] == {
"headers": {
@@ -327,7 +319,7 @@ def test_handler(event, context):
def test_init_error(run_lambda_function, lambda_runtime):
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -339,12 +331,12 @@ def test_init_error(run_lambda_function, lambda_runtime):
syntax_check=False,
)
- (event,) = events
+ (event,) = envelope_items
assert event["exception"]["values"][0]["value"] == "name 'func' is not defined"
def test_timeout_error(run_lambda_function):
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -359,7 +351,7 @@ def test_handler(event, context):
timeout=2,
)
- (event,) = events
+ (event,) = envelope_items
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
@@ -387,7 +379,7 @@ def test_handler(event, context):
def test_performance_no_error(run_lambda_function):
- envelopes, _, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -400,7 +392,7 @@ def test_handler(event, context):
b'{"foo": "bar"}',
)
- (envelope,) = envelopes
+ (envelope,) = envelope_items
assert envelope["type"] == "transaction"
assert envelope["contexts"]["trace"]["op"] == "function.aws"
@@ -409,7 +401,7 @@ def test_handler(event, context):
def test_performance_error(run_lambda_function):
- envelopes, _, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -425,7 +417,7 @@ def test_handler(event, context):
(
error_event,
transaction_event,
- ) = envelopes
+ ) = envelope_items
assert error_event["level"] == "error"
(exception,) = error_event["exception"]["values"]
@@ -499,7 +491,7 @@ def test_non_dict_event(
batch_size,
DictionaryContaining, # noqa:N803
):
- envelopes, _, response = run_lambda_function(
+ envelope_items, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -517,7 +509,7 @@ def test_handler(event, context):
(
error_event,
transaction_event,
- ) = envelopes
+ ) = envelope_items
assert error_event["level"] == "error"
assert error_event["contexts"]["trace"]["op"] == "function.aws"
@@ -594,17 +586,14 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(
import inspect
- _, _, response = run_lambda_function(
+ _, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(inspect.getsource(StringContaining))
+ dedent(inspect.getsource(DictionaryContaining))
+ dedent(inspect.getsource(ObjectDescribedBy))
+ dedent(
"""
- try:
- from unittest import mock # python 3.3 and above
- except ImportError:
- import mock # python < 3.3
+ from unittest import mock
def _safe_is_equal(x, y):
# copied from conftest.py - see docstring and comments there
@@ -677,15 +666,15 @@ def test_serverless_no_code_instrumentation(run_lambda_function):
"test_dir.test_lambda.test_handler",
]:
print("Testing Initial Handler ", initial_handler)
- _, _, response = run_lambda_function(
+ _, response = run_lambda_function(
dedent(
"""
import sentry_sdk
def test_handler(event, context):
- current_client = sentry_sdk.Hub.current.client
+ current_client = sentry_sdk.get_client()
- assert current_client is not None
+ assert current_client.is_active()
assert len(current_client.options['integrations']) == 1
assert isinstance(current_client.options['integrations'][0],
@@ -713,7 +702,7 @@ def test_handler(event, context):
reason="The limited log output we depend on is being clogged by a new warning"
)
def test_error_has_new_trace_context_performance_enabled(run_lambda_function):
- envelopes, _, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -727,7 +716,7 @@ def test_handler(event, context):
payload=b'{"foo": "bar"}',
)
- (msg_event, error_event, transaction_event) = envelopes
+ (msg_event, error_event, transaction_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -746,7 +735,7 @@ def test_handler(event, context):
def test_error_has_new_trace_context_performance_disabled(run_lambda_function):
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -760,7 +749,7 @@ def test_handler(event, context):
payload=b'{"foo": "bar"}',
)
- (msg_event, error_event) = events
+ (msg_event, error_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -791,7 +780,7 @@ def test_error_has_existing_trace_context_performance_enabled(run_lambda_functio
}
}
- envelopes, _, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -805,7 +794,7 @@ def test_handler(event, context):
payload=json.dumps(payload).encode(),
)
- (msg_event, error_event, transaction_event) = envelopes
+ (msg_event, error_event, transaction_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -838,7 +827,7 @@ def test_error_has_existing_trace_context_performance_disabled(run_lambda_functi
}
}
- _, events, _ = run_lambda_function(
+ envelope_items, _ = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -852,7 +841,7 @@ def test_handler(event, context):
payload=json.dumps(payload).encode(),
)
- (msg_event, error_event) = events
+ (msg_event, error_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -868,7 +857,7 @@ def test_handler(event, context):
def test_basic_with_eventbridge_source(run_lambda_function):
- _, events, response = run_lambda_function(
+ envelope_items, response = run_lambda_function(
LAMBDA_PRELUDE
+ dedent(
"""
@@ -883,7 +872,7 @@ def test_handler(event, context):
assert response["FunctionError"] == "Unhandled"
- (event,) = events
+ (event,) = envelope_items
assert event["level"] == "error"
(exception,) = event["exception"]["values"]
assert exception["type"] == "Exception"
diff --git a/tests/integrations/beam/test_beam.py b/tests/integrations/beam/test_beam.py
index 7926521ca6..5235b93031 100644
--- a/tests/integrations/beam/test_beam.py
+++ b/tests/integrations/beam/test_beam.py
@@ -55,7 +55,7 @@ def fa(self, x, element=False, another_element=False):
def __init__(self):
self.r = "We are in B"
- super(B, self).__init__(self.fa)
+ super().__init__(self.fa)
class SimpleFunc(DoFn):
@@ -182,7 +182,11 @@ def inner(fn):
signature = pardo._signature
output_processor = _OutputHandler()
return DoFnInvoker.create_invoker(
- signature, output_processor, DoFnContext("test")
+ signature,
+ output_processor,
+ DoFnContext("test"),
+ input_args=[],
+ input_kwargs={},
)
return inner
diff --git a/tests/integrations/boto3/aws_mock.py b/tests/integrations/boto3/aws_mock.py
index 84ff23f466..da97570e4c 100644
--- a/tests/integrations/boto3/aws_mock.py
+++ b/tests/integrations/boto3/aws_mock.py
@@ -10,7 +10,7 @@ def stream(self, **kwargs):
contents = self.read()
-class MockResponse(object):
+class MockResponse:
def __init__(self, client, status_code, headers, body):
self._client = client
self._status_code = status_code
diff --git a/tests/integrations/boto3/test_s3.py b/tests/integrations/boto3/test_s3.py
index 8c05b72a3e..6fb0434182 100644
--- a/tests/integrations/boto3/test_s3.py
+++ b/tests/integrations/boto3/test_s3.py
@@ -1,17 +1,13 @@
-import pytest
+from unittest import mock
import boto3
+import pytest
-from sentry_sdk import Hub
+import sentry_sdk
from sentry_sdk.integrations.boto3 import Boto3Integration
from tests.conftest import ApproxDict
-from tests.integrations.boto3.aws_mock import MockResponse
from tests.integrations.boto3 import read_fixture
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
+from tests.integrations.boto3.aws_mock import MockResponse
session = boto3.Session(
@@ -25,7 +21,7 @@ def test_basic(sentry_init, capture_events):
events = capture_events()
s3 = session.resource("s3")
- with Hub.current.start_transaction() as transaction, MockResponse(
+ with sentry_sdk.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, read_fixture("s3_list.xml")
):
bucket = s3.Bucket("bucket")
@@ -48,7 +44,7 @@ def test_streaming(sentry_init, capture_events):
events = capture_events()
s3 = session.resource("s3")
- with Hub.current.start_transaction() as transaction, MockResponse(
+ with sentry_sdk.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
@@ -86,7 +82,7 @@ def test_streaming_close(sentry_init, capture_events):
events = capture_events()
s3 = session.resource("s3")
- with Hub.current.start_transaction() as transaction, MockResponse(
+ with sentry_sdk.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, b"hello"
):
obj = s3.Bucket("bucket").Object("foo.pdf")
@@ -115,7 +111,7 @@ def test_omit_url_data_if_parsing_fails(sentry_init, capture_events):
"sentry_sdk.integrations.boto3.parse_url",
side_effect=ValueError,
):
- with Hub.current.start_transaction() as transaction, MockResponse(
+ with sentry_sdk.start_transaction() as transaction, MockResponse(
s3.meta.client, 200, {}, read_fixture("s3_list.xml")
):
bucket = s3.Bucket("bucket")
diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py
index c6eb55536c..708294cf7e 100644
--- a/tests/integrations/celery/test_celery.py
+++ b/tests/integrations/celery/test_celery.py
@@ -1,25 +1,18 @@
import threading
+from unittest import mock
import pytest
+from celery import Celery, VERSION
+from celery.bin import worker
from sentry_sdk import Hub, configure_scope, start_transaction, get_current_span
from sentry_sdk.integrations.celery import (
CeleryIntegration,
- _get_headers,
_wrap_apply_async,
)
-
-from sentry_sdk._compat import text_type
+from sentry_sdk.integrations.celery.beat import _get_headers
from tests.conftest import ApproxDict
-from celery import Celery, VERSION
-from celery.bin import worker
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
@pytest.fixture
def connect_signal(request):
@@ -161,11 +154,11 @@ def dummy_task(x, y):
assert (
error_event["contexts"]["trace"]["trace_id"]
- == scope._propagation_context["trace_id"]
+ == scope._propagation_context.trace_id
)
assert (
error_event["contexts"]["trace"]["span_id"]
- != scope._propagation_context["span_id"]
+ != scope._propagation_context.span_id
)
assert error_event["transaction"] == "dummy_task"
assert "celery_task_id" in error_event["tags"]
@@ -227,7 +220,7 @@ def dummy_task(x, y):
"span_id": submission_event["spans"][0]["span_id"],
"start_timestamp": submission_event["spans"][0]["start_timestamp"],
"timestamp": submission_event["spans"][0]["timestamp"],
- "trace_id": text_type(transaction.trace_id),
+ "trace_id": str(transaction.trace_id),
}
]
@@ -287,6 +280,9 @@ def dummy_task(x, y):
assert not events
+@pytest.mark.skip(
+ reason="This tests for a broken rerun in Celery 3. We don't support Celery 3 anymore."
+)
def test_broken_prerun(init_celery, connect_signal):
from celery.signals import task_prerun
@@ -360,11 +356,12 @@ def dummy_task(self):
assert e["type"] == "ZeroDivisionError"
-# TODO: This test is hanging when running test with `tox --parallel auto`. Find out why and fix it!
-@pytest.mark.skip
+@pytest.mark.skip(
+ reason="This test is hanging when running test with `tox --parallel auto`. TODO: Figure out why and fix it!"
+)
@pytest.mark.forked
def test_redis_backend_trace_propagation(init_celery, capture_events_forksafe):
- celery = init_celery(traces_sample_rate=1.0, backend="redis", debug=True)
+ celery = init_celery(traces_sample_rate=1.0, backend="redis")
events = capture_events_forksafe()
@@ -418,11 +415,24 @@ def dummy_task(self):
@pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"])
def test_newrelic_interference(init_celery, newrelic_order, celery_invocation):
def instrument_newrelic():
- import celery.app.trace as celery_mod
- from newrelic.hooks.application_celery import instrument_celery_execute_trace
+ try:
+ # older newrelic versions
+ from newrelic.hooks.application_celery import (
+ instrument_celery_execute_trace,
+ )
+ import celery.app.trace as celery_trace_module
- assert hasattr(celery_mod, "build_tracer")
- instrument_celery_execute_trace(celery_mod)
+ assert hasattr(celery_trace_module, "build_tracer")
+ instrument_celery_execute_trace(celery_trace_module)
+
+ except ImportError:
+ # newer newrelic versions
+ from newrelic.hooks.application_celery import instrument_celery_app_base
+ import celery.app as celery_app_module
+
+ assert hasattr(celery_app_module, "Celery")
+ assert hasattr(celery_app_module.Celery, "send_task")
+ instrument_celery_app_base(celery_app_module)
if newrelic_order == "sentry_first":
celery = init_celery()
@@ -504,7 +514,13 @@ def dummy_task(self, x, y):
# in the monkey patched version of `apply_async`
# in `sentry_sdk/integrations/celery.py::_wrap_apply_async()`
result = dummy_task.apply_async(args=(1, 0), headers=sentry_crons_setup)
- assert result.get() == sentry_crons_setup
+
+ expected_headers = sentry_crons_setup.copy()
+ # Newly added headers
+ expected_headers["sentry-trace"] = mock.ANY
+ expected_headers["baggage"] = mock.ANY
+
+ assert result.get() == expected_headers
def test_baggage_propagation(init_celery):
@@ -577,26 +593,6 @@ def dummy_function(*args, **kwargs):
wrapped(mock.MagicMock(), (), headers={})
-def test_apply_async_from_beat_no_span(sentry_init):
- sentry_init(
- integrations=[CeleryIntegration()],
- )
-
- def dummy_function(*args, **kwargs):
- headers = kwargs.get("headers")
- assert "sentry-trace" not in headers
- assert "baggage" not in headers
-
- wrapped = _wrap_apply_async(dummy_function)
- wrapped(
- mock.MagicMock(),
- [
- "BEAT",
- ],
- headers={},
- )
-
-
def test_apply_async_no_args(init_celery):
celery = init_celery()
diff --git a/tests/integrations/celery/test_celery_beat_crons.py b/tests/integrations/celery/test_celery_beat_crons.py
index 9ffa59b00d..58c4c6208d 100644
--- a/tests/integrations/celery/test_celery_beat_crons.py
+++ b/tests/integrations/celery/test_celery_beat_crons.py
@@ -1,27 +1,21 @@
import datetime
-import sys
+from unittest import mock
+from unittest.mock import MagicMock
import pytest
+from celery.schedules import crontab, schedule
-from sentry_sdk.integrations.celery import (
+from sentry_sdk.crons import MonitorStatus
+from sentry_sdk.integrations.celery.beat import (
_get_headers,
- _get_humanized_interval,
_get_monitor_config,
_patch_beat_apply_entry,
_patch_redbeat_maybe_due,
- crons_task_success,
crons_task_failure,
crons_task_retry,
+ crons_task_success,
)
-from sentry_sdk.crons import MonitorStatus
-from celery.schedules import crontab, schedule
-
-try:
- from unittest import mock # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- import mock # python < 3.3
- from mock import MagicMock
+from sentry_sdk.integrations.celery.utils import _get_humanized_interval
def test_get_headers():
@@ -97,10 +91,10 @@ def test_crons_task_success():
}
with mock.patch(
- "sentry_sdk.integrations.celery.capture_checkin"
+ "sentry_sdk.integrations.celery.beat.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
- "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+ "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_success(fake_task)
@@ -141,10 +135,10 @@ def test_crons_task_failure():
}
with mock.patch(
- "sentry_sdk.integrations.celery.capture_checkin"
+ "sentry_sdk.integrations.celery.beat.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
- "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+ "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_failure(fake_task)
@@ -185,10 +179,10 @@ def test_crons_task_retry():
}
with mock.patch(
- "sentry_sdk.integrations.celery.capture_checkin"
+ "sentry_sdk.integrations.celery.beat.capture_checkin"
) as mock_capture_checkin:
with mock.patch(
- "sentry_sdk.integrations.celery._now_seconds_since_epoch",
+ "sentry_sdk.integrations.celery.beat._now_seconds_since_epoch",
return_value=500.5,
):
crons_task_retry(fake_task)
@@ -273,9 +267,7 @@ def test_get_monitor_config_seconds():
celery_schedule = schedule(run_every=3) # seconds
- with mock.patch(
- "sentry_sdk.integrations.celery.logger.warning"
- ) as mock_logger_warning:
+ with mock.patch("sentry_sdk.integrations.logger.warning") as mock_logger_warning:
monitor_config = _get_monitor_config(celery_schedule, app, "foo")
mock_logger_warning.assert_called_with(
"Intervals shorter than one minute are not supported by Sentry Crons. Monitor '%s' has an interval of %s seconds. Use the `exclude_beat_tasks` option in the celery integration to exclude it.",
@@ -379,10 +371,6 @@ def test_get_monitor_config_timezone_in_app_conf():
assert monitor_config["timezone"] == "Asia/Karachi"
-@pytest.mark.skipif(
- sys.version_info < (3, 0),
- reason="no datetime.timezone for Python 2, so skipping this test.",
-)
def test_get_monitor_config_timezone_in_celery_schedule():
app = MagicMock()
app.timezone = "Asia/Karachi"
@@ -418,20 +406,23 @@ def test_exclude_beat_tasks_option(
fake_integration = MagicMock()
fake_integration.exclude_beat_tasks = exclude_beat_tasks
+ fake_client = MagicMock()
+ fake_client.get_integration.return_value = fake_integration
+
fake_schedule_entry = MagicMock()
fake_schedule_entry.name = task_name
fake_get_monitor_config = MagicMock()
with mock.patch(
- "sentry_sdk.integrations.celery.Scheduler", fake_scheduler
+ "sentry_sdk.integrations.celery.beat.Scheduler", fake_scheduler
) as Scheduler: # noqa: N806
with mock.patch(
- "sentry_sdk.integrations.celery.Hub.current.get_integration",
- return_value=fake_integration,
+ "sentry_sdk.integrations.celery.sentry_sdk.get_client",
+ return_value=fake_client,
):
with mock.patch(
- "sentry_sdk.integrations.celery._get_monitor_config",
+ "sentry_sdk.integrations.celery.beat._get_monitor_config",
fake_get_monitor_config,
) as _get_monitor_config:
# Mimic CeleryIntegration patching of Scheduler.apply_entry()
@@ -471,20 +462,23 @@ def test_exclude_redbeat_tasks_option(
fake_integration = MagicMock()
fake_integration.exclude_beat_tasks = exclude_beat_tasks
+ fake_client = MagicMock()
+ fake_client.get_integration.return_value = fake_integration
+
fake_schedule_entry = MagicMock()
fake_schedule_entry.name = task_name
fake_get_monitor_config = MagicMock()
with mock.patch(
- "sentry_sdk.integrations.celery.RedBeatScheduler", fake_redbeat_scheduler
+ "sentry_sdk.integrations.celery.beat.RedBeatScheduler", fake_redbeat_scheduler
) as RedBeatScheduler: # noqa: N806
with mock.patch(
- "sentry_sdk.integrations.celery.Hub.current.get_integration",
- return_value=fake_integration,
+ "sentry_sdk.integrations.celery.sentry_sdk.get_client",
+ return_value=fake_client,
):
with mock.patch(
- "sentry_sdk.integrations.celery._get_monitor_config",
+ "sentry_sdk.integrations.celery.beat._get_monitor_config",
fake_get_monitor_config,
) as _get_monitor_config:
# Mimic CeleryIntegration patching of RedBeatScheduler.maybe_due()
diff --git a/tests/integrations/celery/test_update_celery_task_headers.py b/tests/integrations/celery/test_update_celery_task_headers.py
new file mode 100644
index 0000000000..b1588e86b8
--- /dev/null
+++ b/tests/integrations/celery/test_update_celery_task_headers.py
@@ -0,0 +1,172 @@
+from copy import copy
+import pytest
+
+from unittest import mock
+
+from sentry_sdk.integrations.celery import _update_celery_task_headers
+import sentry_sdk
+from sentry_sdk.tracing_utils import Baggage
+
+
+BAGGAGE_VALUE = (
+ "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+ "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+ "sentry-sample_rate=0.1337,"
+ "custom=value"
+)
+
+SENTRY_TRACE_VALUE = "771a43a4192642f0b136d5159a501700-1234567890abcdef-1"
+
+
+@pytest.mark.parametrize("monitor_beat_tasks", [True, False, None, "", "bla", 1, 0])
+def test_monitor_beat_tasks(monitor_beat_tasks):
+ headers = {}
+ span = None
+
+ updated_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)
+
+ assert headers == {} # left unchanged
+
+ if monitor_beat_tasks:
+ assert updated_headers == {
+ "headers": {"sentry-monitor-start-timestamp-s": mock.ANY},
+ "sentry-monitor-start-timestamp-s": mock.ANY,
+ }
+ else:
+ assert updated_headers == headers
+
+
+@pytest.mark.parametrize("monitor_beat_tasks", [True, False, None, "", "bla", 1, 0])
+def test_monitor_beat_tasks_with_headers(monitor_beat_tasks):
+ headers = {
+ "blub": "foo",
+ "sentry-something": "bar",
+ }
+ span = None
+
+ updated_headers = _update_celery_task_headers(headers, span, monitor_beat_tasks)
+
+ if monitor_beat_tasks:
+ assert updated_headers == {
+ "blub": "foo",
+ "sentry-something": "bar",
+ "headers": {
+ "sentry-monitor-start-timestamp-s": mock.ANY,
+ "sentry-something": "bar",
+ },
+ "sentry-monitor-start-timestamp-s": mock.ANY,
+ }
+ else:
+ assert updated_headers == headers
+
+
+def test_span_with_transaction(sentry_init):
+ sentry_init(enable_tracing=True)
+ headers = {}
+
+ with sentry_sdk.start_transaction(name="test_transaction") as transaction:
+ with sentry_sdk.start_span(op="test_span") as span:
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["baggage"] == transaction.get_baggage().serialize()
+ assert (
+ updated_headers["headers"]["baggage"]
+ == transaction.get_baggage().serialize()
+ )
+
+
+def test_span_with_no_transaction(sentry_init):
+ sentry_init(enable_tracing=True)
+ headers = {}
+
+ with sentry_sdk.start_span(op="test_span") as span:
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+ assert "baggage" not in updated_headers.keys()
+ assert "baggage" not in updated_headers["headers"].keys()
+
+
+def test_custom_span(sentry_init):
+ sentry_init(enable_tracing=True)
+ span = sentry_sdk.tracing.Span()
+ headers = {}
+
+ with sentry_sdk.start_transaction(name="test_transaction"):
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+ assert "baggage" not in updated_headers.keys()
+ assert "baggage" not in updated_headers["headers"].keys()
+
+
+def test_span_with_transaction_custom_headers(sentry_init):
+ sentry_init(enable_tracing=True)
+ headers = {
+ "baggage": BAGGAGE_VALUE,
+ "sentry-trace": SENTRY_TRACE_VALUE,
+ }
+
+ with sentry_sdk.start_transaction(name="test_transaction") as transaction:
+ with sentry_sdk.start_span(op="test_span") as span:
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+
+ incoming_baggage = Baggage.from_incoming_header(headers["baggage"])
+ combined_baggage = copy(transaction.get_baggage())
+ combined_baggage.sentry_items.update(incoming_baggage.sentry_items)
+ combined_baggage.third_party_items = ",".join(
+ [
+ x
+ for x in [
+ combined_baggage.third_party_items,
+ incoming_baggage.third_party_items,
+ ]
+ if x is not None and x != ""
+ ]
+ )
+ assert updated_headers["baggage"] == combined_baggage.serialize(
+ include_third_party=True
+ )
+ assert updated_headers["headers"]["baggage"] == combined_baggage.serialize(
+ include_third_party=True
+ )
+
+
+def test_span_with_no_transaction_custom_headers(sentry_init):
+ sentry_init(enable_tracing=True)
+ headers = {
+ "baggage": BAGGAGE_VALUE,
+ "sentry-trace": SENTRY_TRACE_VALUE,
+ }
+
+ with sentry_sdk.start_span(op="test_span") as span:
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["baggage"] == headers["baggage"]
+ assert updated_headers["headers"]["baggage"] == headers["baggage"]
+
+
+def test_custom_span_custom_headers(sentry_init):
+ sentry_init(enable_tracing=True)
+ span = sentry_sdk.tracing.Span()
+ headers = {
+ "baggage": BAGGAGE_VALUE,
+ "sentry-trace": SENTRY_TRACE_VALUE,
+ }
+
+ with sentry_sdk.start_transaction(name="test_transaction"):
+ updated_headers = _update_celery_task_headers(headers, span, False)
+
+ assert updated_headers["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["headers"]["sentry-trace"] == span.to_traceparent()
+ assert updated_headers["baggage"] == headers["baggage"]
+ assert updated_headers["headers"]["baggage"] == headers["baggage"]
diff --git a/tests/integrations/cloud_resource_context/test_cloud_resource_context.py b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
index b36f795a2b..90c78b28ec 100644
--- a/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
+++ b/tests/integrations/cloud_resource_context/test_cloud_resource_context.py
@@ -1,14 +1,9 @@
import json
+from unittest import mock
+from unittest.mock import MagicMock
import pytest
-try:
- from unittest import mock # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- import mock # python < 3.3
- from mock import MagicMock
-
from sentry_sdk.integrations.cloud_resource_context import (
CLOUD_PLATFORM,
CLOUD_PROVIDER,
@@ -32,16 +27,11 @@
"version": "2017-09-30",
}
-try:
- # Python 3
- AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
- json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
- )
-except TypeError:
- # Python 2
- AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
- json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD)
- ).encode("utf-8")
+
+AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD_BYTES = bytes(
+ json.dumps(AWS_EC2_EXAMPLE_IMDSv2_PAYLOAD), "utf-8"
+)
+
GCP_GCE_EXAMPLE_METADATA_PLAYLOAD = {
"instance": {
@@ -405,6 +395,12 @@ def test_setup_once(
fake_set_context.assert_not_called()
if warning_called:
- assert fake_warning.call_count == 1
+ correct_warning_found = False
+ for call in fake_warning.call_args_list:
+ if call[0][0].startswith("Invalid value for cloud_provider:"):
+ correct_warning_found = True
+ break
+
+ assert correct_warning_found
else:
fake_warning.assert_not_called()
diff --git a/tests/integrations/conftest.py b/tests/integrations/conftest.py
index cffb278d70..9f30ccf076 100644
--- a/tests/integrations/conftest.py
+++ b/tests/integrations/conftest.py
@@ -6,16 +6,26 @@
def capture_exceptions(monkeypatch):
def inner():
errors = set()
- old_capture_event = sentry_sdk.Hub.capture_event
+ old_capture_event_hub = sentry_sdk.Hub.capture_event
+ old_capture_event_scope = sentry_sdk.Scope.capture_event
- def capture_event(self, event, hint=None):
+ def capture_event_hub(self, event, hint=None, scope=None):
if hint:
if "exc_info" in hint:
error = hint["exc_info"][1]
errors.add(error)
- return old_capture_event(self, event, hint=hint)
+ return old_capture_event_hub(self, event, hint=hint, scope=scope)
+
+ def capture_event_scope(self, event, hint=None, scope=None):
+ if hint:
+ if "exc_info" in hint:
+ error = hint["exc_info"][1]
+ errors.add(error)
+ return old_capture_event_scope(self, event, hint=hint, scope=scope)
+
+ monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event_hub)
+ monkeypatch.setattr(sentry_sdk.Scope, "capture_event", capture_event_scope)
- monkeypatch.setattr(sentry_sdk.Hub, "capture_event", capture_event)
return errors
return inner
diff --git a/tests/integrations/django/asgi/test_asgi.py b/tests/integrations/django/asgi/test_asgi.py
index 21a72e4a32..fd266c4fae 100644
--- a/tests/integrations/django/asgi/test_asgi.py
+++ b/tests/integrations/django/asgi/test_asgi.py
@@ -1,6 +1,7 @@
import base64
import json
import os
+from unittest import mock
import django
import pytest
@@ -14,10 +15,6 @@
except ImportError:
from django.core.urlresolvers import reverse
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
APPS = [channels_application]
if django.VERSION >= (3, 0):
@@ -110,7 +107,6 @@ async def test_active_thread_id(sentry_init, capture_envelopes, endpoint, applic
await comm.wait()
data = json.loads(response["body"])
-
envelopes = [envelope for envelope in envelopes]
assert len(envelopes) == 1
@@ -137,8 +133,12 @@ async def test_async_views_concurrent_execution(sentry_init, settings):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
- comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
- comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+ comm = HttpCommunicator(
+ asgi_application, "GET", "/my_async_view"
+ ) # sleeps for 1 second
+ comm2 = HttpCommunicator(
+ asgi_application, "GET", "/my_async_view"
+ ) # sleeps for 1 second
loop = asyncio.get_event_loop()
@@ -154,7 +154,9 @@ async def test_async_views_concurrent_execution(sentry_init, settings):
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
- assert end - start < 1.5
+ assert (
+ end - start < 2
+ ) # it takes less than 2 seconds so it was ececuting concurrently
@pytest.mark.asyncio
@@ -175,8 +177,12 @@ async def test_async_middleware_that_is_function_concurrent_execution(
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
- comm = HttpCommunicator(asgi_application, "GET", "/my_async_view")
- comm2 = HttpCommunicator(asgi_application, "GET", "/my_async_view")
+ comm = HttpCommunicator(
+ asgi_application, "GET", "/my_async_view"
+ ) # sleeps for 1 second
+ comm2 = HttpCommunicator(
+ asgi_application, "GET", "/my_async_view"
+ ) # sleeps for 1 second
loop = asyncio.get_event_loop()
@@ -192,7 +198,9 @@ async def test_async_middleware_that_is_function_concurrent_execution(
assert resp1.result()["status"] == 200
assert resp2.result()["status"] == 200
- assert end - start < 1.5
+ assert (
+ end - start < 2
+ ) # it takes less than 2 seconds so it was ececuting concurrently
@pytest.mark.asyncio
diff --git a/tests/integrations/django/myapp/custom_urls.py b/tests/integrations/django/myapp/custom_urls.py
index bc703e0afe..5b2a1e428b 100644
--- a/tests/integrations/django/myapp/custom_urls.py
+++ b/tests/integrations/django/myapp/custom_urls.py
@@ -14,8 +14,6 @@
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
-from __future__ import absolute_import
-
try:
from django.urls import path
except ImportError:
diff --git a/tests/integrations/django/myapp/settings.py b/tests/integrations/django/myapp/settings.py
index ac06d9204e..8956357a51 100644
--- a/tests/integrations/django/myapp/settings.py
+++ b/tests/integrations/django/myapp/settings.py
@@ -17,16 +17,9 @@
sentry_sdk.init(integrations=[DjangoIntegration()])
-
import os
-try:
- # Django >= 1.10
- from django.utils.deprecation import MiddlewareMixin
-except ImportError:
- # Not required for Django <= 1.9, see:
- # https://docs.djangoproject.com/en/1.10/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
- MiddlewareMixin = object
+from django.utils.deprecation import MiddlewareMixin
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@@ -128,11 +121,13 @@ def middleware(request):
DATABASES["postgres"] = {
"ENGINE": db_engine,
- "NAME": os.environ["SENTRY_PYTHON_TEST_POSTGRES_NAME"],
- "USER": os.environ["SENTRY_PYTHON_TEST_POSTGRES_USER"],
- "PASSWORD": os.environ["SENTRY_PYTHON_TEST_POSTGRES_PASSWORD"],
"HOST": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost"),
"PORT": 5432,
+ "USER": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres"),
+ "PASSWORD": os.environ.get("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry"),
+ "NAME": os.environ.get(
+ "SENTRY_PYTHON_TEST_POSTGRES_NAME", f"myapp_db_{os.getpid()}"
+ ),
}
except (ImportError, KeyError):
from sentry_sdk.utils import logger
diff --git a/tests/integrations/django/myapp/urls.py b/tests/integrations/django/myapp/urls.py
index 672a9b15ae..b6565c3cdd 100644
--- a/tests/integrations/django/myapp/urls.py
+++ b/tests/integrations/django/myapp/urls.py
@@ -14,8 +14,6 @@
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
-from __future__ import absolute_import
-
try:
from django.urls import path
except ImportError:
diff --git a/tests/integrations/django/myapp/views.py b/tests/integrations/django/myapp/views.py
index 294895430b..4e6b4ee27f 100644
--- a/tests/integrations/django/myapp/views.py
+++ b/tests/integrations/django/myapp/views.py
@@ -1,7 +1,7 @@
+import asyncio
import json
import threading
-from django import VERSION
from django.contrib.auth import login
from django.contrib.auth.models import User
from django.core.exceptions import PermissionDenied
@@ -89,14 +89,14 @@ def view_with_cached_template_fragment(request):
# interesting property of this one is that csrf_exempt, as a class attribute,
# is not in __dict__, so regular use of functools.wraps will not forward the
# attribute.
-class SentryClassBasedView(object):
+class SentryClassBasedView:
csrf_exempt = True
def __call__(self, request):
return HttpResponse("ok")
-class SentryClassBasedViewWithCsrf(object):
+class SentryClassBasedViewWithCsrf:
def __call__(self, request):
return HttpResponse("ok")
@@ -123,7 +123,7 @@ def mylogin(request):
@csrf_exempt
def handler500(request):
- return HttpResponseServerError("Sentry error: %s" % sentry_sdk.last_event_id())
+ return HttpResponseServerError("Sentry error.")
class ClassBasedView(ListView):
@@ -131,7 +131,7 @@ class ClassBasedView(ListView):
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
- return super(ClassBasedView, self).dispatch(request, *args, **kwargs)
+ return super().dispatch(request, *args, **kwargs)
def head(self, *args, **kwargs):
sentry_sdk.capture_message("hi")
@@ -182,10 +182,17 @@ def template_test2(request, *args, **kwargs):
@csrf_exempt
def template_test3(request, *args, **kwargs):
- from sentry_sdk import Hub
+ from sentry_sdk import Scope
- hub = Hub.current
- capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+ traceparent = Scope.get_current_scope().get_traceparent()
+ if traceparent is None:
+ traceparent = Scope.get_isolation_scope().get_traceparent()
+
+ baggage = Scope.get_current_scope().get_baggage()
+ if baggage is None:
+ baggage = Scope.get_isolation_scope().get_baggage()
+
+ capture_message(traceparent + "\n" + baggage.serialize())
return render(request, "trace_meta.html", {})
@@ -223,41 +230,32 @@ def thread_ids_sync(*args, **kwargs):
return HttpResponse(response)
-if VERSION >= (3, 1):
- # Use exec to produce valid Python 2
- exec(
- """async def async_message(request):
+async def async_message(request):
sentry_sdk.capture_message("hi")
- return HttpResponse("ok")"""
- )
+ return HttpResponse("ok")
+
- exec(
- """async def my_async_view(request):
- import asyncio
+async def my_async_view(request):
await asyncio.sleep(1)
- return HttpResponse('Hello World')"""
- )
+ return HttpResponse("Hello World")
+
- exec(
- """async def thread_ids_async(request):
- response = json.dumps({
- "main": threading.main_thread().ident,
- "active": threading.current_thread().ident,
- })
- return HttpResponse(response)"""
+async def thread_ids_async(request):
+ response = json.dumps(
+ {
+ "main": threading.main_thread().ident,
+ "active": threading.current_thread().ident,
+ }
)
+ return HttpResponse(response)
- exec(
- """async def post_echo_async(request):
+
+async def post_echo_async(request):
sentry_sdk.capture_message("hi")
return HttpResponse(request.body)
-post_echo_async.csrf_exempt = True"""
- )
-else:
- async_message = None
- my_async_view = None
- thread_ids_async = None
- post_echo_async = None
+
+
+post_echo_async.csrf_exempt = True
@csrf_exempt
diff --git a/tests/integrations/django/test_basic.py b/tests/integrations/django/test_basic.py
index 1efe4be278..88cf413f47 100644
--- a/tests/integrations/django/test_basic.py
+++ b/tests/integrations/django/test_basic.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import json
import os
import random
@@ -19,13 +17,14 @@
except ImportError:
from django.core.urlresolvers import reverse
-from sentry_sdk._compat import PY2, PY310
-from sentry_sdk import capture_message, capture_exception, configure_scope
+from sentry_sdk._compat import PY310
+from sentry_sdk import capture_message, capture_exception
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.django import DjangoIntegration, _set_db_data
from sentry_sdk.integrations.django.signals_handlers import _get_receiver_name
from sentry_sdk.integrations.django.caching import _get_span_description
from sentry_sdk.integrations.executing import ExecutingIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.tracing import Span
from tests.conftest import ApproxDict, unpack_werkzeug_response
from tests.integrations.django.myapp.wsgi import application
@@ -277,7 +276,7 @@ def test_trace_from_headers_if_performance_disabled(
@pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
def test_user_captured(sentry_init, client, capture_events):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
events = capture_events()
@@ -299,7 +298,7 @@ def test_user_captured(sentry_init, client, capture_events):
@pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
def test_queryset_repr(sentry_init, capture_events):
sentry_init(integrations=[DjangoIntegration()])
events = capture_events()
@@ -340,17 +339,14 @@ def test_custom_error_handler_request_context(sentry_init, client, capture_event
}
-def test_500(sentry_init, client, capture_events):
+def test_500(sentry_init, client):
sentry_init(integrations=[DjangoIntegration()], send_default_pii=True)
- events = capture_events()
content, status, headers = unpack_werkzeug_response(client.get("/view-exc"))
assert status.lower() == "500 internal server error"
content = content.decode("utf-8")
- (event,) = events
- event_id = event["event_id"]
- assert content == "Sentry error: %s" % event_id
+ assert content == "Sentry error."
@pytest.mark.forked
@@ -363,7 +359,7 @@ def test_management_command_raises():
@pytest.mark.forked
-@pytest.mark.django_db
+@pytest_mark_django_db_decorator()
@pytest.mark.parametrize("with_integration", [True, False])
def test_sql_queries(sentry_init, capture_events, with_integration):
sentry_init(
@@ -378,8 +374,7 @@ def test_sql_queries(sentry_init, capture_events, with_integration):
sql = connection.cursor()
- with configure_scope() as scope:
- scope.clear_breadcrumbs()
+ Scope.get_isolation_scope().clear_breadcrumbs()
with pytest.raises(OperationalError):
# table doesn't even exist
@@ -413,8 +408,7 @@ def test_sql_dict_query_params(sentry_init, capture_events):
sql = connections["postgres"].cursor()
events = capture_events()
- with configure_scope() as scope:
- scope.clear_breadcrumbs()
+ Scope.get_isolation_scope().clear_breadcrumbs()
with pytest.raises(ProgrammingError):
sql.execute(
@@ -479,8 +473,7 @@ def test_sql_psycopg2_string_composition(sentry_init, capture_events, query):
sql = connections["postgres"].cursor()
- with configure_scope() as scope:
- scope.clear_breadcrumbs()
+ Scope.get_isolation_scope().clear_breadcrumbs()
events = capture_events()
@@ -513,8 +506,7 @@ def test_sql_psycopg2_placeholders(sentry_init, capture_events):
sql = connections["postgres"].cursor()
events = capture_events()
- with configure_scope() as scope:
- scope.clear_breadcrumbs()
+ Scope.get_isolation_scope().clear_breadcrumbs()
with pytest.raises(DataError):
names = ["foo", "bar"]
@@ -670,7 +662,7 @@ def test_db_connection_span_data(sentry_init, client, capture_events):
def test_set_db_data_custom_backend():
- class DummyBackend(object):
+ class DummyBackend:
# https://github.com/mongodb/mongo-python-driver/blob/6ffae5522c960252b8c9adfe2a19b29ff28187cb/pymongo/collection.py#L126
def __getattr__(self, attr):
return self
@@ -796,9 +788,8 @@ def test_template_tracing_meta(sentry_init, client, capture_events):
assert match is not None
assert match.group(1) == traceparent
- # Python 2 does not preserve sort order
rendered_baggage = match.group(2)
- assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+ assert rendered_baggage == baggage
@pytest.mark.parametrize("with_executing_integration", [[], [ExecutingIntegration()]])
@@ -1156,13 +1147,10 @@ def dummy(a, b):
name = _get_receiver_name(dummy)
- if PY2:
- assert name == "tests.integrations.django.test_basic.dummy"
- else:
- assert (
- name
- == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
- )
+ assert (
+ name
+ == "tests.integrations.django.test_basic.test_get_receiver_name..dummy"
+ )
a_partial = partial(dummy)
name = _get_receiver_name(a_partial)
diff --git a/tests/integrations/django/test_db_query_data.py b/tests/integrations/django/test_db_query_data.py
index 92b1415f78..878babf507 100644
--- a/tests/integrations/django/test_db_query_data.py
+++ b/tests/integrations/django/test_db_query_data.py
@@ -1,10 +1,9 @@
-from __future__ import absolute_import
-
import os
+
import pytest
from datetime import datetime
+from unittest import mock
-from sentry_sdk._compat import PY2
from django import VERSION as DJANGO_VERSION
from django.db import connections
@@ -23,11 +22,6 @@
from tests.integrations.django.utils import pytest_mark_django_db_decorator
from tests.integrations.django.myapp.wsgi import application
-try:
- from unittest import mock
-except ImportError:
- import mock
-
@pytest.fixture
def client():
@@ -210,10 +204,8 @@ def test_query_source_with_module_in_search_path(sentry_init, client, capture_ev
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
-
- if not PY2:
- assert data.get(SPANDATA.CODE_NAMESPACE) == "django_helpers.views"
- assert data.get(SPANDATA.CODE_FILEPATH) == "django_helpers/views.py"
+ assert data.get(SPANDATA.CODE_NAMESPACE) == "django_helpers.views"
+ assert data.get(SPANDATA.CODE_FILEPATH) == "django_helpers/views.py"
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
diff --git a/tests/integrations/django/test_transactions.py b/tests/integrations/django/test_transactions.py
index c9914c8ec5..75323f11e5 100644
--- a/tests/integrations/django/test_transactions.py
+++ b/tests/integrations/django/test_transactions.py
@@ -1,13 +1,8 @@
-from __future__ import absolute_import
+from unittest import mock
import pytest
import django
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
# django<2.0 has only `url` with regex based patterns.
# django>=2.0 renames `url` to `re_path`, and additionally introduces `path`
diff --git a/tests/integrations/excepthook/test_excepthook.py b/tests/integrations/excepthook/test_excepthook.py
index 18deccd76e..7cb4e8b765 100644
--- a/tests/integrations/excepthook/test_excepthook.py
+++ b/tests/integrations/excepthook/test_excepthook.py
@@ -12,11 +12,13 @@ def test_excepthook(tmpdir):
"""
from sentry_sdk import init, transport
- def send_event(self, event):
- print("capture event was called")
- print(event)
+ def capture_envelope(self, envelope):
+ print("capture_envelope was called")
+ event = envelope.get_event()
+ if event is not None:
+ print(event)
- transport.HttpTransport._send_event = send_event
+ transport.HttpTransport.capture_envelope = capture_envelope
init("http://foobar@localhost/123")
@@ -35,7 +37,7 @@ def send_event(self, event):
assert b"ZeroDivisionError" in output
assert b"LOL" in output
- assert b"capture event was called" in output
+ assert b"capture_envelope was called" in output
def test_always_value_excepthook(tmpdir):
@@ -47,11 +49,13 @@ def test_always_value_excepthook(tmpdir):
from sentry_sdk import init, transport
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
- def send_event(self, event):
- print("capture event was called")
- print(event)
+ def capture_envelope(self, envelope):
+ print("capture_envelope was called")
+ event = envelope.get_event()
+ if event is not None:
+ print(event)
- transport.HttpTransport._send_event = send_event
+ transport.HttpTransport.capture_envelope = capture_envelope
sys.ps1 = "always_value_test"
init("http://foobar@localhost/123",
@@ -73,4 +77,4 @@ def send_event(self, event):
assert b"ZeroDivisionError" in output
assert b"LOL" in output
- assert b"capture event was called" in output
+ assert b"capture_envelope was called" in output
diff --git a/tests/integrations/falcon/test_falcon.py b/tests/integrations/falcon/test_falcon.py
index 65140a9fd7..0a202c0081 100644
--- a/tests/integrations/falcon/test_falcon.py
+++ b/tests/integrations/falcon/test_falcon.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import logging
import pytest
@@ -9,6 +7,7 @@
import sentry_sdk
from sentry_sdk.integrations.falcon import FalconIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import parse_version
@@ -113,7 +112,7 @@ def test_transaction_style(
def test_unhandled_errors(sentry_init, capture_exceptions, capture_events):
- sentry_init(integrations=[FalconIntegration()], debug=True)
+ sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_get(self, req, resp):
@@ -141,7 +140,7 @@ def on_get(self, req, resp):
def test_raised_5xx_errors(sentry_init, capture_exceptions, capture_events):
- sentry_init(integrations=[FalconIntegration()], debug=True)
+ sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_get(self, req, resp):
@@ -165,7 +164,7 @@ def on_get(self, req, resp):
def test_raised_4xx_errors(sentry_init, capture_exceptions, capture_events):
- sentry_init(integrations=[FalconIntegration()], debug=True)
+ sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_get(self, req, resp):
@@ -189,7 +188,7 @@ def test_http_status(sentry_init, capture_exceptions, capture_events):
This just demonstrates, that if Falcon raises a HTTPStatus with code 500
(instead of a HTTPError with code 500) Sentry will not capture it.
"""
- sentry_init(integrations=[FalconIntegration()], debug=True)
+ sentry_init(integrations=[FalconIntegration()])
class Resource:
def on_get(self, req, resp):
@@ -305,7 +304,7 @@ def on_get(self, req, resp):
assert event["level"] == "error"
-def test_500(sentry_init, capture_events):
+def test_500(sentry_init):
sentry_init(integrations=[FalconIntegration()])
app = falcon.API()
@@ -318,17 +317,14 @@ def on_get(self, req, resp):
def http500_handler(ex, req, resp, params):
sentry_sdk.capture_exception(ex)
- resp.media = {"message": "Sentry error: %s" % sentry_sdk.last_event_id()}
+ resp.media = {"message": "Sentry error."}
app.add_error_handler(Exception, http500_handler)
- events = capture_events()
-
client = falcon.testing.TestClient(app)
response = client.simulate_get("/")
- (event,) = events
- assert response.json == {"message": "Sentry error: %s" % event["event_id"]}
+ assert response.json == {"message": "Sentry error."}
def test_error_in_errorhandler(sentry_init, capture_events):
@@ -384,20 +380,17 @@ def test_does_not_leak_scope(sentry_init, capture_events):
sentry_init(integrations=[FalconIntegration()])
events = capture_events()
- with sentry_sdk.configure_scope() as scope:
- scope.set_tag("request_data", False)
+ Scope.get_isolation_scope().set_tag("request_data", False)
app = falcon.API()
class Resource:
def on_get(self, req, resp):
- with sentry_sdk.configure_scope() as scope:
- scope.set_tag("request_data", True)
+ Scope.get_isolation_scope().set_tag("request_data", True)
def generator():
for row in range(1000):
- with sentry_sdk.configure_scope() as scope:
- assert scope._tags["request_data"]
+ assert Scope.get_isolation_scope()._tags["request_data"]
yield (str(row) + "\n").encode()
@@ -411,9 +404,7 @@ def generator():
expected_response = "".join(str(row) + "\n" for row in range(1000))
assert response.text == expected_response
assert not events
-
- with sentry_sdk.configure_scope() as scope:
- assert not scope._tags["request_data"]
+ assert not Scope.get_isolation_scope()._tags["request_data"]
@pytest.mark.skipif(
diff --git a/tests/integrations/fastapi/test_fastapi.py b/tests/integrations/fastapi/test_fastapi.py
index 56d52be474..00f693fd8c 100644
--- a/tests/integrations/fastapi/test_fastapi.py
+++ b/tests/integrations/fastapi/test_fastapi.py
@@ -1,21 +1,17 @@
import json
import logging
import threading
+from unittest import mock
import pytest
-from sentry_sdk.integrations.fastapi import FastApiIntegration
-
from fastapi import FastAPI, Request
from fastapi.testclient import TestClient
from fastapi.middleware.trustedhost import TrustedHostMiddleware
+
from sentry_sdk import capture_message
-from sentry_sdk.integrations.starlette import StarletteIntegration
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
+from sentry_sdk.integrations.fastapi import FastApiIntegration
+from sentry_sdk.integrations.starlette import StarletteIntegration
def fastapi_app_factory():
@@ -63,7 +59,6 @@ async def test_response(sentry_init, capture_events):
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
)
app = fastapi_app_factory()
@@ -200,7 +195,6 @@ async def test_original_request_not_scrubbed(sentry_init, capture_events):
sentry_init(
integrations=[StarletteIntegration(), FastApiIntegration()],
traces_sample_rate=1.0,
- debug=True,
)
app = FastAPI()
@@ -358,7 +352,6 @@ def test_transaction_name(
FastApiIntegration(transaction_style=transaction_style),
],
traces_sample_rate=1.0,
- debug=True,
)
envelopes = capture_envelopes()
@@ -388,7 +381,6 @@ def test_route_endpoint_equal_dependant_call(sentry_init):
FastApiIntegration(),
],
traces_sample_rate=1.0,
- debug=True,
)
app = fastapi_app_factory()
@@ -442,7 +434,6 @@ def dummy_traces_sampler(sampling_context):
integrations=[StarletteIntegration(transaction_style=transaction_style)],
traces_sampler=dummy_traces_sampler,
traces_sample_rate=1.0,
- debug=True,
)
app = fastapi_app_factory()
@@ -486,7 +477,6 @@ def test_transaction_name_in_middleware(
FastApiIntegration(transaction_style=transaction_style),
],
traces_sample_rate=1.0,
- debug=True,
)
envelopes = capture_envelopes()
diff --git a/tests/integrations/flask/test_flask.py b/tests/integrations/flask/test_flask.py
index 3d3572e2d3..bfd8ed9938 100644
--- a/tests/integrations/flask/test_flask.py
+++ b/tests/integrations/flask/test_flask.py
@@ -20,16 +20,15 @@
except ImportError:
UnsupportedMediaType = None
+import sentry_sdk
import sentry_sdk.integrations.flask as flask_sentry
from sentry_sdk import (
set_tag,
- configure_scope,
capture_message,
capture_exception,
- last_event_id,
- Hub,
)
from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
@@ -126,7 +125,7 @@ def test_errors(
testing,
integration_enabled_params,
):
- sentry_init(debug=True, **integration_enabled_params)
+ sentry_init(**integration_enabled_params)
app.debug = debug
app.testing = testing
@@ -212,7 +211,7 @@ def test_flask_login_configured(
):
sentry_init(send_default_pii=send_default_pii, **integration_enabled_params)
- class User(object):
+ class User:
is_authenticated = is_active = True
is_anonymous = user_id is not None
@@ -279,8 +278,7 @@ def test_flask_session_tracking(sentry_init, capture_envelopes, app):
@app.route("/")
def index():
- with configure_scope() as scope:
- scope.set_user({"ip_address": "1.2.3.4", "id": "42"})
+ Scope.get_isolation_scope().set_user({"ip_address": "1.2.3.4", "id": "42"})
try:
raise ValueError("stuff")
except Exception:
@@ -295,7 +293,7 @@ def index():
except ZeroDivisionError:
pass
- Hub.current.client.flush()
+ sentry_sdk.get_client().flush()
(first_event, error_event, session) = envelopes
first_event = first_event.get_event()
@@ -599,7 +597,7 @@ def wsgi_app(environ, start_response):
assert event["exception"]["values"][0]["mechanism"]["type"] == "wsgi"
-def test_500(sentry_init, capture_events, app):
+def test_500(sentry_init, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
app.debug = False
@@ -611,15 +609,12 @@ def index():
@app.errorhandler(500)
def error_handler(err):
- return "Sentry error: %s" % last_event_id()
-
- events = capture_events()
+ return "Sentry error."
client = app.test_client()
response = client.get("/")
- (event,) = events
- assert response.data.decode("utf-8") == "Sentry error: %s" % event["event_id"]
+ assert response.data.decode("utf-8") == "Sentry error."
def test_error_in_errorhandler(sentry_init, capture_events, app):
@@ -671,18 +666,15 @@ def test_does_not_leak_scope(sentry_init, capture_events, app):
sentry_init(integrations=[flask_sentry.FlaskIntegration()])
events = capture_events()
- with configure_scope() as scope:
- scope.set_tag("request_data", False)
+ Scope.get_isolation_scope().set_tag("request_data", False)
@app.route("/")
def index():
- with configure_scope() as scope:
- scope.set_tag("request_data", True)
+ Scope.get_isolation_scope().set_tag("request_data", True)
def generate():
for row in range(1000):
- with configure_scope() as scope:
- assert scope._tags["request_data"]
+ assert Scope.get_isolation_scope()._tags["request_data"]
yield str(row) + "\n"
@@ -693,8 +685,7 @@ def generate():
assert response.data.decode() == "".join(str(row) + "\n" for row in range(1000))
assert not events
- with configure_scope() as scope:
- assert not scope._tags["request_data"]
+ assert not Scope.get_isolation_scope()._tags["request_data"]
def test_scoped_test_client(sentry_init, app):
@@ -842,8 +833,7 @@ def test_template_tracing_meta(sentry_init, app, capture_events, template_string
@app.route("/")
def index():
- hub = Hub.current
- capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+ capture_message(sentry_sdk.get_traceparent() + "\n" + sentry_sdk.get_baggage())
return render_template_string(template_string)
with app.test_client() as client:
@@ -862,9 +852,8 @@ def index():
assert match is not None
assert match.group(1) == traceparent
- # Python 2 does not preserve sort order
rendered_baggage = match.group(2)
- assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+ assert rendered_baggage == baggage
def test_dont_override_sentry_trace_context(sentry_init, app):
@@ -903,37 +892,6 @@ def index():
assert event["request"]["headers"]["Authorization"] == "[Filtered]"
-@pytest.mark.parametrize("traces_sample_rate", [None, 1.0])
-def test_replay_event_context(sentry_init, capture_events, app, traces_sample_rate):
- """
- Tests that the replay context is added to the event context.
- This is not strictly a Flask integration test, but it's the easiest way to test this.
- """
- sentry_init(traces_sample_rate=traces_sample_rate)
-
- @app.route("/error")
- def error():
- return 1 / 0
-
- events = capture_events()
-
- client = app.test_client()
- headers = {
- "baggage": "other-vendor-value-1=foo;bar;baz,sentry-trace_id=771a43a4192642f0b136d5159a501700,sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,other-vendor-value-2=foo;bar,sentry-replay_id=12312012123120121231201212312012",
- "sentry-trace": "771a43a4192642f0b136d5159a501700-1234567890abcdef-1",
- }
- with pytest.raises(ZeroDivisionError):
- client.get("/error", headers=headers)
-
- event = events[0]
-
- assert event["contexts"]
- assert event["contexts"]["replay"]
- assert (
- event["contexts"]["replay"]["replay_id"] == "12312012123120121231201212312012"
- )
-
-
def test_response_status_code_ok_in_transaction_context(
sentry_init, capture_envelopes, app
):
@@ -952,7 +910,7 @@ def test_response_status_code_ok_in_transaction_context(
client = app.test_client()
client.get("/message")
- Hub.current.client.flush()
+ sentry_sdk.get_client().flush()
(_, transaction_envelope, _) = envelopes
transaction = transaction_envelope.get_transaction_event()
@@ -979,7 +937,7 @@ def test_response_status_code_not_found_in_transaction_context(
client = app.test_client()
client.get("/not-existing-route")
- Hub.current.client.flush()
+ sentry_sdk.get_client().flush()
(transaction_envelope, _) = envelopes
transaction = transaction_envelope.get_transaction_event()
diff --git a/tests/integrations/gcp/test_gcp.py b/tests/integrations/gcp/test_gcp.py
index 9c4e11e8d5..20ae6e56b0 100644
--- a/tests/integrations/gcp/test_gcp.py
+++ b/tests/integrations/gcp/test_gcp.py
@@ -13,10 +13,6 @@
import os.path
import os
-pytestmark = pytest.mark.skipif(
- not hasattr(tempfile, "TemporaryDirectory"), reason="need Python 3.2+"
-)
-
FUNCTIONS_PRELUDE = """
from unittest.mock import Mock
@@ -63,17 +59,9 @@ def envelope_processor(envelope):
return item.get_bytes()
class TestTransport(HttpTransport):
- def _send_event(self, event):
- event = event_processor(event)
- # Writing a single string to stdout holds the GIL (seems like) and
- # therefore cannot be interleaved with other threads. This is why we
- # explicitly add a newline at the end even though `print` would provide
- # us one.
- print("\\nEVENT: {}\\n".format(json.dumps(event)))
-
- def _send_envelope(self, envelope):
- envelope = envelope_processor(envelope)
- print("\\nENVELOPE: {}\\n".format(envelope.decode(\"utf-8\")))
+ def capture_envelope(self, envelope):
+ envelope_item = envelope_processor(envelope)
+ print("\\nENVELOPE: {}\\n".format(envelope_item.decode(\"utf-8\")))
def init_sdk(timeout_warning=False, **extra_init_args):
@@ -94,8 +82,7 @@ def init_sdk(timeout_warning=False, **extra_init_args):
@pytest.fixture
def run_cloud_function():
def inner(code, subprocess_kwargs=()):
- events = []
- envelopes = []
+ envelope_items = []
return_value = None
# STEP : Create a zip of cloud function
@@ -131,12 +118,9 @@ def inner(code, subprocess_kwargs=()):
for line in stream_data.splitlines():
print("GCP:", line)
- if line.startswith("EVENT: "):
- line = line[len("EVENT: ") :]
- events.append(json.loads(line))
- elif line.startswith("ENVELOPE: "):
+ if line.startswith("ENVELOPE: "):
line = line[len("ENVELOPE: ") :]
- envelopes.append(json.loads(line))
+ envelope_items.append(json.loads(line))
elif line.startswith("RETURN VALUE: "):
line = line[len("RETURN VALUE: ") :]
return_value = json.loads(line)
@@ -145,13 +129,13 @@ def inner(code, subprocess_kwargs=()):
stream.close()
- return envelopes, events, return_value
+ return envelope_items, return_value
return inner
def test_handled_exception(run_cloud_function):
- _, events, return_value = run_cloud_function(
+ envelope_items, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -168,8 +152,8 @@ def cloud_function(functionhandler, event):
"""
)
)
- assert events[0]["level"] == "error"
- (exception,) = events[0]["exception"]["values"]
+ assert envelope_items[0]["level"] == "error"
+ (exception,) = envelope_items[0]["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
@@ -178,7 +162,7 @@ def cloud_function(functionhandler, event):
def test_unhandled_exception(run_cloud_function):
- _, events, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -196,8 +180,8 @@ def cloud_function(functionhandler, event):
"""
)
)
- assert events[0]["level"] == "error"
- (exception,) = events[0]["exception"]["values"]
+ assert envelope_items[0]["level"] == "error"
+ (exception,) = envelope_items[0]["exception"]["values"]
assert exception["type"] == "ZeroDivisionError"
assert exception["value"] == "division by zero"
@@ -206,7 +190,7 @@ def cloud_function(functionhandler, event):
def test_timeout_error(run_cloud_function):
- _, events, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -224,8 +208,8 @@ def cloud_function(functionhandler, event):
"""
)
)
- assert events[0]["level"] == "error"
- (exception,) = events[0]["exception"]["values"]
+ assert envelope_items[0]["level"] == "error"
+ (exception,) = envelope_items[0]["exception"]["values"]
assert exception["type"] == "ServerlessTimeoutWarning"
assert (
@@ -237,7 +221,7 @@ def cloud_function(functionhandler, event):
def test_performance_no_error(run_cloud_function):
- envelopes, _, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -255,15 +239,15 @@ def cloud_function(functionhandler, event):
)
)
- assert envelopes[0]["type"] == "transaction"
- assert envelopes[0]["contexts"]["trace"]["op"] == "function.gcp"
- assert envelopes[0]["transaction"].startswith("Google Cloud function")
- assert envelopes[0]["transaction_info"] == {"source": "component"}
- assert envelopes[0]["transaction"] in envelopes[0]["request"]["url"]
+ assert envelope_items[0]["type"] == "transaction"
+ assert envelope_items[0]["contexts"]["trace"]["op"] == "function.gcp"
+ assert envelope_items[0]["transaction"].startswith("Google Cloud function")
+ assert envelope_items[0]["transaction_info"] == {"source": "component"}
+ assert envelope_items[0]["transaction"] in envelope_items[0]["request"]["url"]
def test_performance_error(run_cloud_function):
- envelopes, events, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -281,18 +265,18 @@ def cloud_function(functionhandler, event):
)
)
- assert envelopes[0]["level"] == "error"
- (exception,) = envelopes[0]["exception"]["values"]
+ assert envelope_items[0]["level"] == "error"
+ (exception,) = envelope_items[0]["exception"]["values"]
assert exception["type"] == "Exception"
assert exception["value"] == "something went wrong"
assert exception["mechanism"]["type"] == "gcp"
assert not exception["mechanism"]["handled"]
- assert envelopes[1]["type"] == "transaction"
- assert envelopes[1]["contexts"]["trace"]["op"] == "function.gcp"
- assert envelopes[1]["transaction"].startswith("Google Cloud function")
- assert envelopes[1]["transaction"] in envelopes[0]["request"]["url"]
+ assert envelope_items[1]["type"] == "transaction"
+ assert envelope_items[1]["contexts"]["trace"]["op"] == "function.gcp"
+ assert envelope_items[1]["transaction"].startswith("Google Cloud function")
+ assert envelope_items[1]["transaction"] in envelope_items[0]["request"]["url"]
def test_traces_sampler_gets_correct_values_in_sampling_context(
@@ -305,7 +289,7 @@ def test_traces_sampler_gets_correct_values_in_sampling_context(
import inspect
- envelopes, events, return_value = run_cloud_function(
+ _, return_value = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -378,7 +362,7 @@ def test_error_has_new_trace_context_performance_enabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions when performance monitoring is enabled.
"""
- envelopes, _, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -397,7 +381,7 @@ def cloud_function(functionhandler, event):
"""
)
)
- (msg_event, error_event, transaction_event) = envelopes
+ (msg_event, error_event, transaction_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -419,7 +403,7 @@ def test_error_has_new_trace_context_performance_disabled(run_cloud_function):
"""
Check if an 'trace' context is added to errros and transactions when performance monitoring is disabled.
"""
- _, events, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -439,7 +423,7 @@ def cloud_function(functionhandler, event):
)
)
- (msg_event, error_event) = events
+ (msg_event, error_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -463,7 +447,7 @@ def test_error_has_existing_trace_context_performance_enabled(run_cloud_function
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
- envelopes, _, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -487,7 +471,7 @@ def cloud_function(functionhandler, event):
"""
)
)
- (msg_event, error_event, transaction_event) = envelopes
+ (msg_event, error_event, transaction_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
@@ -516,7 +500,7 @@ def test_error_has_existing_trace_context_performance_disabled(run_cloud_functio
parent_sampled = 1
sentry_trace_header = "{}-{}-{}".format(trace_id, parent_span_id, parent_sampled)
- _, events, _ = run_cloud_function(
+ envelope_items, _ = run_cloud_function(
dedent(
"""
functionhandler = None
@@ -540,7 +524,7 @@ def cloud_function(functionhandler, event):
"""
)
)
- (msg_event, error_event) = events
+ (msg_event, error_event) = envelope_items
assert "trace" in msg_event["contexts"]
assert "trace_id" in msg_event["contexts"]["trace"]
diff --git a/tests/integrations/gql/test_gql.py b/tests/integrations/gql/test_gql.py
index 7ae3cfe77d..f87fb974d0 100644
--- a/tests/integrations/gql/test_gql.py
+++ b/tests/integrations/gql/test_gql.py
@@ -5,21 +5,7 @@
from gql import Client
from gql.transport.exceptions import TransportQueryError
from gql.transport.requests import RequestsHTTPTransport
-from graphql import DocumentNode
from sentry_sdk.integrations.gql import GQLIntegration
-from unittest.mock import MagicMock, patch
-
-
-class _MockClientBase(MagicMock):
- """
- Mocked version of GQL Client class, following same spec as GQL Client.
- """
-
- def __init__(self, *args, **kwargs):
- kwargs["spec"] = Client
- super().__init__(*args, **kwargs)
-
- transport = MagicMock()
@responses.activate
@@ -81,95 +67,6 @@ def test_gql_init(sentry_init):
sentry_init(integrations=[GQLIntegration()])
-@patch("sentry_sdk.integrations.gql.Hub")
-def test_setup_once_patches_execute_and_patched_function_calls_original(_):
- """
- Unit test which ensures the following:
- 1. The GQLIntegration setup_once function patches the gql.Client.execute method
- 2. The patched gql.Client.execute method still calls the original method, and it
- forwards its arguments to the original method.
- 3. The patched gql.Client.execute method returns the same value that the original
- method returns.
- """
- original_method_return_value = MagicMock()
-
- class OriginalMockClient(_MockClientBase):
- """
- This mock client always returns the mock original_method_return_value when a query
- is executed. This can be used to simulate successful GraphQL queries.
- """
-
- execute = MagicMock(
- spec=Client.execute, return_value=original_method_return_value
- )
-
- original_execute_method = OriginalMockClient.execute
-
- with patch(
- "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
- ) as PatchedMockClient: # noqa: N806
- # Below line should patch the PatchedMockClient with Sentry SDK magic
- GQLIntegration.setup_once()
-
- # We expect GQLIntegration.setup_once to patch the execute method.
- assert (
- PatchedMockClient.execute is not original_execute_method
- ), "execute method not patched"
-
- # Now, let's instantiate a client and send it a query. Original execute still should get called.
- mock_query = MagicMock(spec=DocumentNode)
- client_instance = PatchedMockClient()
- patched_method_return_value = client_instance.execute(mock_query)
-
- # Here, we check that the original execute was called
- original_execute_method.assert_called_once_with(client_instance, mock_query)
-
- # Also, let's verify that the patched execute returns the expected value.
- assert (
- patched_method_return_value is original_method_return_value
- ), "pathced execute method returns a different value than the original execute method"
-
-
-@patch("sentry_sdk.integrations.gql.event_from_exception")
-@patch("sentry_sdk.integrations.gql.Hub")
-def test_patched_gql_execute_captures_and_reraises_graphql_exception(
- mock_hub, mock_event_from_exception
-):
- """
- Unit test which ensures that in the case that calling the execute method results in a
- TransportQueryError (which gql raises when a GraphQL error occurs), the patched method
- captures the event on the current Hub and it reraises the error.
- """
- mock_event_from_exception.return_value = (dict(), MagicMock())
-
- class OriginalMockClient(_MockClientBase):
- """
- This mock client always raises a TransportQueryError when a GraphQL query is attempted.
- This simulates a GraphQL query which results in errors.
- """
-
- execute = MagicMock(
- spec=Client.execute, side_effect=TransportQueryError("query failed")
- )
-
- with patch(
- "sentry_sdk.integrations.gql.gql.Client", new=OriginalMockClient
- ) as PatchedMockClient: # noqa: N806
- # Below line should patch the PatchedMockClient with Sentry SDK magic
- GQLIntegration.setup_once()
-
- mock_query = MagicMock(spec=DocumentNode)
- client_instance = PatchedMockClient()
-
- # The error should still get raised even though we have instrumented the execute method.
- with pytest.raises(TransportQueryError):
- client_instance.execute(mock_query)
-
- # However, we should have also captured the error on the hub.
- mock_capture_event = mock_hub.current.capture_event
- mock_capture_event.assert_called_once()
-
-
def test_real_gql_request_no_error(sentry_init, capture_events):
"""
Integration test verifying that the GQLIntegration works as expected with successful query.
diff --git a/tests/integrations/graphene/test_graphene_py3.py b/tests/integrations/graphene/test_graphene.py
similarity index 100%
rename from tests/integrations/graphene/test_graphene_py3.py
rename to tests/integrations/graphene/test_graphene.py
diff --git a/tests/integrations/grpc/test_grpc.py b/tests/integrations/grpc/test_grpc.py
index 3f49c0a0f4..50cf70cf44 100644
--- a/tests/integrations/grpc/test_grpc.py
+++ b/tests/integrations/grpc/test_grpc.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import os
from typing import List, Optional
from concurrent import futures
@@ -8,7 +6,7 @@
import grpc
import pytest
-from sentry_sdk import Hub, start_transaction
+from sentry_sdk import start_span, start_transaction
from sentry_sdk.consts import OP
from sentry_sdk.integrations.grpc import GRPCIntegration
from tests.conftest import ApproxDict
@@ -319,8 +317,7 @@ class TestService(gRPCTestServiceServicer):
@staticmethod
def TestServe(request, context): # noqa: N802
- hub = Hub.current
- with hub.start_span(op="test", description="test"):
+ with start_span(op="test", description="test"):
pass
return gRPCTestMessage(text=request.text)
diff --git a/tests/integrations/grpc/test_grpc_aio.py b/tests/integrations/grpc/test_grpc_aio.py
index 3e21188ec8..0b02a59f71 100644
--- a/tests/integrations/grpc/test_grpc_aio.py
+++ b/tests/integrations/grpc/test_grpc_aio.py
@@ -1,5 +1,3 @@
-from __future__ import absolute_import
-
import asyncio
import os
@@ -226,6 +224,8 @@ async def test_stream_unary(grpc_server):
class TestService(gRPCTestServiceServicer):
class TestException(Exception):
+ __test__ = False
+
def __init__(self):
super().__init__("test")
diff --git a/tests/integrations/httpx/test_httpx.py b/tests/integrations/httpx/test_httpx.py
index c4ca97321c..fa22c44452 100644
--- a/tests/integrations/httpx/test_httpx.py
+++ b/tests/integrations/httpx/test_httpx.py
@@ -1,7 +1,8 @@
import asyncio
+from unittest import mock
-import pytest
import httpx
+import pytest
import responses
from sentry_sdk import capture_message, start_transaction
@@ -9,11 +10,6 @@
from sentry_sdk.integrations.httpx import HttpxIntegration
from tests.conftest import ApproxDict
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
@pytest.mark.parametrize(
"httpx_client",
diff --git a/tests/integrations/huey/test_huey.py b/tests/integrations/huey/test_huey.py
index 48a3da97f4..f887080533 100644
--- a/tests/integrations/huey/test_huey.py
+++ b/tests/integrations/huey/test_huey.py
@@ -20,7 +20,6 @@ def inner():
integrations=[HueyIntegration()],
traces_sample_rate=1.0,
send_default_pii=True,
- debug=True,
)
return MemoryHuey(name="sentry_sdk")
diff --git a/tests/integrations/logging/test_logging.py b/tests/integrations/logging/test_logging.py
index 92d0674c09..02eb26a04d 100644
--- a/tests/integrations/logging/test_logging.py
+++ b/tests/integrations/logging/test_logging.py
@@ -1,10 +1,8 @@
-# coding: utf-8
-import sys
-
-import pytest
import logging
import warnings
+import pytest
+
from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
other_logger = logging.getLogger("testfoo")
@@ -79,7 +77,6 @@ def test_logging_extra_data_integer_keys(sentry_init, capture_events):
assert event["extra"] == {"1": 1}
-@pytest.mark.xfail(sys.version_info[:2] == (3, 4), reason="buggy logging module")
def test_logging_stack(sentry_init, capture_events):
sentry_init(integrations=[LoggingIntegration()], default_integrations=False)
events = capture_events()
@@ -128,9 +125,7 @@ def test_custom_log_level_names(sentry_init, capture_events):
}
# set custom log level names
- # fmt: off
- logging.addLevelName(logging.DEBUG, u"custom level debüg: ")
- # fmt: on
+ logging.addLevelName(logging.DEBUG, "custom level debüg: ")
logging.addLevelName(logging.INFO, "")
logging.addLevelName(logging.WARN, "custom level warn: ")
logging.addLevelName(logging.WARNING, "custom level warning: ")
diff --git a/tests/integrations/loguru/test_loguru.py b/tests/integrations/loguru/test_loguru.py
index 48133aab85..98b8cb4dee 100644
--- a/tests/integrations/loguru/test_loguru.py
+++ b/tests/integrations/loguru/test_loguru.py
@@ -54,7 +54,7 @@ def test_just_log(
if not created_event:
assert not events
- breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+ breadcrumbs = sentry_sdk.Scope.get_isolation_scope()._breadcrumbs
if (
not disable_breadcrumbs and created_event is not None
): # not None == not TRACE or DEBUG level
@@ -92,7 +92,7 @@ def test_breadcrumb_format(sentry_init, capture_events):
logger.info("test")
formatted_message = "test"
- breadcrumbs = sentry_sdk.Hub.current.scope._breadcrumbs
+ breadcrumbs = sentry_sdk.Scope.get_isolation_scope()._breadcrumbs
(breadcrumb,) = breadcrumbs
assert breadcrumb["message"] == formatted_message
diff --git a/tests/integrations/opentelemetry/test_experimental.py b/tests/integrations/opentelemetry/test_experimental.py
index 77286330a5..06672a8657 100644
--- a/tests/integrations/opentelemetry/test_experimental.py
+++ b/tests/integrations/opentelemetry/test_experimental.py
@@ -1,13 +1,11 @@
-try:
- # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- # python < 3.3
- from mock import MagicMock
+import pytest
+
+from unittest.mock import MagicMock
from sentry_sdk.integrations.opentelemetry.integration import OpenTelemetryIntegration
+@pytest.mark.forked
def test_integration_enabled_if_option_is_on(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init(
@@ -18,6 +16,7 @@ def test_integration_enabled_if_option_is_on(sentry_init):
OpenTelemetryIntegration.setup_once.assert_called_once()
+@pytest.mark.forked
def test_integration_not_enabled_if_option_is_off(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init(
@@ -28,6 +27,7 @@ def test_integration_not_enabled_if_option_is_off(sentry_init):
OpenTelemetryIntegration.setup_once.assert_not_called()
+@pytest.mark.forked
def test_integration_not_enabled_if_option_is_missing(sentry_init):
OpenTelemetryIntegration.setup_once = MagicMock()
sentry_init()
diff --git a/tests/integrations/opentelemetry/test_propagator.py b/tests/integrations/opentelemetry/test_propagator.py
index 510118f67f..1b3249e87c 100644
--- a/tests/integrations/opentelemetry/test_propagator.py
+++ b/tests/integrations/opentelemetry/test_propagator.py
@@ -1,9 +1,7 @@
-try:
- from unittest import mock # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- import mock # python < 3.3
- from mock import MagicMock
+import pytest
+
+from unittest import mock
+from unittest.mock import MagicMock
from opentelemetry.context import get_current
from opentelemetry.trace.propagation import get_current_span
@@ -16,12 +14,12 @@
SENTRY_BAGGAGE_KEY,
SENTRY_TRACE_KEY,
)
-
from sentry_sdk.integrations.opentelemetry.propagator import SentryPropagator
from sentry_sdk.integrations.opentelemetry.span_processor import SentrySpanProcessor
from sentry_sdk.tracing_utils import Baggage
+@pytest.mark.forked
def test_extract_no_context_no_sentry_trace_header():
"""
No context and NO Sentry trace data in getter.
@@ -37,6 +35,7 @@ def test_extract_no_context_no_sentry_trace_header():
assert modified_context == {}
+@pytest.mark.forked
def test_extract_context_no_sentry_trace_header():
"""
Context but NO Sentry trace data in getter.
@@ -52,6 +51,7 @@ def test_extract_context_no_sentry_trace_header():
assert modified_context == context
+@pytest.mark.forked
def test_extract_empty_context_sentry_trace_header_no_baggage():
"""
Empty context but Sentry trace data but NO Baggage in getter.
@@ -81,6 +81,7 @@ def test_extract_empty_context_sentry_trace_header_no_baggage():
assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+@pytest.mark.forked
def test_extract_context_sentry_trace_header_baggage():
"""
Empty context but Sentry trace data and Baggage in getter.
@@ -121,6 +122,7 @@ def test_extract_context_sentry_trace_header_baggage():
assert span_context.trace_id == int("1234567890abcdef1234567890abcdef", 16)
+@pytest.mark.forked
def test_inject_empty_otel_span_map():
"""
Empty otel_span_map.
@@ -151,6 +153,7 @@ def test_inject_empty_otel_span_map():
setter.set.assert_not_called()
+@pytest.mark.forked
def test_inject_sentry_span_no_baggage():
"""
Inject a sentry span with no baggage.
@@ -195,6 +198,7 @@ def test_inject_sentry_span_no_baggage():
)
+@pytest.mark.forked
def test_inject_sentry_span_baggage():
"""
Inject a sentry span with baggage.
diff --git a/tests/integrations/opentelemetry/test_span_processor.py b/tests/integrations/opentelemetry/test_span_processor.py
index 02e3059ca8..418d08b739 100644
--- a/tests/integrations/opentelemetry/test_span_processor.py
+++ b/tests/integrations/opentelemetry/test_span_processor.py
@@ -1,50 +1,42 @@
-from datetime import datetime
-from datetime import timezone
import time
-import pytest
+from datetime import datetime, timezone
+from unittest import mock
+from unittest.mock import MagicMock
-try:
- from unittest import mock # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- import mock
- from mock import MagicMock # python < 3.3
+import pytest
+from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
from sentry_sdk.integrations.opentelemetry.span_processor import (
SentrySpanProcessor,
link_trace_context_to_error_event,
)
+from sentry_sdk.scope import Scope
from sentry_sdk.tracing import Span, Transaction
-
-from opentelemetry.trace import SpanKind, SpanContext, Status, StatusCode
from sentry_sdk.tracing_utils import extract_sentrytrace_data
def test_is_sentry_span():
otel_span = MagicMock()
- hub = MagicMock()
- hub.client = None
-
span_processor = SentrySpanProcessor()
- assert not span_processor._is_sentry_span(hub, otel_span)
+ assert not span_processor._is_sentry_span(otel_span)
client = MagicMock()
client.options = {"instrumenter": "otel"}
client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+ Scope.get_global_scope().set_client(client)
- hub.client = client
- assert not span_processor._is_sentry_span(hub, otel_span)
+ assert not span_processor._is_sentry_span(otel_span)
otel_span.attributes = {
"http.url": "https://example.com",
}
- assert not span_processor._is_sentry_span(hub, otel_span)
+ assert not span_processor._is_sentry_span(otel_span)
otel_span.attributes = {
"http.url": "https://o123456.ingest.sentry.io/api/123/envelope",
}
- assert span_processor._is_sentry_span(hub, otel_span)
+ assert span_processor._is_sentry_span(otel_span)
def test_get_otel_context():
@@ -310,23 +302,21 @@ def test_on_start_transaction():
parent_context = {}
+ fake_start_transaction = MagicMock()
+
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
-
- current_hub = MagicMock()
- current_hub.client = fake_client
-
- fake_hub = MagicMock()
- fake_hub.current = current_hub
+ Scope.get_global_scope().set_client(fake_client)
with mock.patch(
- "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
+ "sentry_sdk.integrations.opentelemetry.span_processor.start_transaction",
+ fake_start_transaction,
):
span_processor = SentrySpanProcessor()
span_processor.on_start(otel_span, parent_context)
- fake_hub.current.start_transaction.assert_called_once_with(
+ fake_start_transaction.assert_called_once_with(
name="Sample OTel Span",
span_id="1234567890abcdef",
parent_span_id="abcdef1234567890",
@@ -360,34 +350,26 @@ def test_on_start_child():
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+ Scope.get_global_scope().set_client(fake_client)
- current_hub = MagicMock()
- current_hub.client = fake_client
+ fake_span = MagicMock()
- fake_hub = MagicMock()
- fake_hub.current = current_hub
-
- with mock.patch(
- "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
- ):
- fake_span = MagicMock()
-
- span_processor = SentrySpanProcessor()
- span_processor.otel_span_map["abcdef1234567890"] = fake_span
- span_processor.on_start(otel_span, parent_context)
-
- fake_span.start_child.assert_called_once_with(
- span_id="1234567890abcdef",
- description="Sample OTel Span",
- start_timestamp=datetime.fromtimestamp(
- otel_span.start_time / 1e9, timezone.utc
- ),
- instrumenter="otel",
- )
+ span_processor = SentrySpanProcessor()
+ span_processor.otel_span_map["abcdef1234567890"] = fake_span
+ span_processor.on_start(otel_span, parent_context)
+
+ fake_span.start_child.assert_called_once_with(
+ span_id="1234567890abcdef",
+ description="Sample OTel Span",
+ start_timestamp=datetime.fromtimestamp(
+ otel_span.start_time / 1e9, timezone.utc
+ ),
+ instrumenter="otel",
+ )
- assert len(span_processor.otel_span_map.keys()) == 2
- assert "abcdef1234567890" in span_processor.otel_span_map.keys()
- assert "1234567890abcdef" in span_processor.otel_span_map.keys()
+ assert len(span_processor.otel_span_map.keys()) == 2
+ assert "abcdef1234567890" in span_processor.otel_span_map.keys()
+ assert "1234567890abcdef" in span_processor.otel_span_map.keys()
def test_on_end_no_sentry_span():
@@ -430,6 +412,10 @@ def test_on_end_sentry_transaction():
)
otel_span.get_span_context.return_value = span_context
+ fake_client = MagicMock()
+ fake_client.options = {"instrumenter": "otel"}
+ Scope.get_global_scope().set_client(fake_client)
+
fake_sentry_span = MagicMock(spec=Transaction)
fake_sentry_span.set_context = MagicMock()
fake_sentry_span.finish = MagicMock()
@@ -462,6 +448,10 @@ def test_on_end_sentry_span():
)
otel_span.get_span_context.return_value = span_context
+ fake_client = MagicMock()
+ fake_client.options = {"instrumenter": "otel"}
+ Scope.get_global_scope().set_client(fake_client)
+
fake_sentry_span = MagicMock(spec=Span)
fake_sentry_span.set_context = MagicMock()
fake_sentry_span.finish = MagicMock()
@@ -487,12 +477,7 @@ def test_link_trace_context_to_error_event():
"""
fake_client = MagicMock()
fake_client.options = {"instrumenter": "otel"}
-
- current_hub = MagicMock()
- current_hub.client = fake_client
-
- fake_hub = MagicMock()
- fake_hub.current = current_hub
+ Scope.get_global_scope().set_client(fake_client)
span_id = "1234567890abcdef"
trace_id = "1234567890abcdef1234567890abcdef"
@@ -548,41 +533,33 @@ def test_pruning_old_spans_on_start():
parent_context = {}
fake_client = MagicMock()
- fake_client.options = {"instrumenter": "otel"}
+ fake_client.options = {"instrumenter": "otel", "debug": False}
fake_client.dsn = "https://1234567890abcdef@o123456.ingest.sentry.io/123456"
+ Scope.get_global_scope().set_client(fake_client)
- current_hub = MagicMock()
- current_hub.client = fake_client
-
- fake_hub = MagicMock()
- fake_hub.current = current_hub
-
- with mock.patch(
- "sentry_sdk.integrations.opentelemetry.span_processor.Hub", fake_hub
- ):
- span_processor = SentrySpanProcessor()
+ span_processor = SentrySpanProcessor()
- span_processor.otel_span_map = {
- "111111111abcdef": MagicMock(), # should stay
- "2222222222abcdef": MagicMock(), # should go
- "3333333333abcdef": MagicMock(), # should go
- }
- current_time_minutes = int(time.time() / 60)
- span_processor.open_spans = {
- current_time_minutes - 3: {"111111111abcdef"}, # should stay
- current_time_minutes
- - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go
- }
+ span_processor.otel_span_map = {
+ "111111111abcdef": MagicMock(), # should stay
+ "2222222222abcdef": MagicMock(), # should go
+ "3333333333abcdef": MagicMock(), # should go
+ }
+ current_time_minutes = int(time.time() / 60)
+ span_processor.open_spans = {
+ current_time_minutes - 3: {"111111111abcdef"}, # should stay
+ current_time_minutes
+ - 11: {"2222222222abcdef", "3333333333abcdef"}, # should go
+ }
- span_processor.on_start(otel_span, parent_context)
- assert sorted(list(span_processor.otel_span_map.keys())) == [
- "111111111abcdef",
- "1234567890abcdef",
- ]
- assert sorted(list(span_processor.open_spans.values())) == [
- {"111111111abcdef"},
- {"1234567890abcdef"},
- ]
+ span_processor.on_start(otel_span, parent_context)
+ assert sorted(list(span_processor.otel_span_map.keys())) == [
+ "111111111abcdef",
+ "1234567890abcdef",
+ ]
+ assert sorted(list(span_processor.open_spans.values())) == [
+ {"111111111abcdef"},
+ {"1234567890abcdef"},
+ ]
def test_pruning_old_spans_on_end():
@@ -598,6 +575,10 @@ def test_pruning_old_spans_on_end():
otel_span.parent = MagicMock()
otel_span.parent.span_id = int("abcdef1234567890", 16)
+ fake_client = MagicMock()
+ fake_client.options = {"instrumenter": "otel"}
+ Scope.get_global_scope().set_client(fake_client)
+
fake_sentry_span = MagicMock(spec=Span)
fake_sentry_span.set_context = MagicMock()
fake_sentry_span.finish = MagicMock()
diff --git a/tests/integrations/pure_eval/test_pure_eval.py b/tests/integrations/pure_eval/test_pure_eval.py
index 2d1a92026e..497a8768d0 100644
--- a/tests/integrations/pure_eval/test_pure_eval.py
+++ b/tests/integrations/pure_eval/test_pure_eval.py
@@ -1,4 +1,3 @@
-import sys
from types import SimpleNamespace
import pytest
@@ -64,10 +63,7 @@ def foo():
"u",
"y",
]
- if sys.version_info[:2] == (3, 5):
- assert frame_vars.keys() == set(expected_keys)
- else:
- assert list(frame_vars.keys()) == expected_keys
+ assert list(frame_vars.keys()) == expected_keys
assert frame_vars["namespace.d"] == {"1": "2"}
assert frame_vars["namespace.d[1]"] == "2"
else:
diff --git a/tests/integrations/pyramid/test_pyramid.py b/tests/integrations/pyramid/test_pyramid.py
index 6237174604..a25dbef2fc 100644
--- a/tests/integrations/pyramid/test_pyramid.py
+++ b/tests/integrations/pyramid/test_pyramid.py
@@ -366,9 +366,9 @@ def test_error_in_authenticated_userid(
)
logger = logging.getLogger("test_pyramid")
- class AuthenticationPolicy(object):
+ class AuthenticationPolicy:
def authenticated_userid(self, request):
- logger.error("failed to identify user")
+ logger.warning("failed to identify user")
pyramid_config.set_authorization_policy(ACLAuthorizationPolicy())
pyramid_config.set_authentication_policy(AuthenticationPolicy())
@@ -380,6 +380,16 @@ def authenticated_userid(self, request):
assert len(events) == 1
+ # In `authenticated_userid` there used to be a call to `logging.error`. This would print this error in the
+ # event processor of the Pyramid integration and the logging integration would capture this and send it to Sentry.
+ # This is not possible anymore, because capturing that error in the logging integration would again run all the
+ # event processors (from the global, isolation and current scope) and thus would again run the same pyramid
+ # event processor that raised the error in the first place, leading on an infinite loop.
+ # This test here is now deactivated and always passes, but it is kept here to document the problem.
+ # This change in behavior is also mentioned in the migration documentation for Python SDK 2.0
+
+ # assert "message" not in events[0].keys()
+
def tween_factory(handler, registry):
def tween(request):
diff --git a/tests/integrations/quart/test_quart.py b/tests/integrations/quart/test_quart.py
index 0f693088c9..32948f6e1d 100644
--- a/tests/integrations/quart/test_quart.py
+++ b/tests/integrations/quart/test_quart.py
@@ -6,13 +6,12 @@
from sentry_sdk import (
set_tag,
- configure_scope,
capture_message,
capture_exception,
- last_event_id,
)
from sentry_sdk.integrations.logging import LoggingIntegration
import sentry_sdk.integrations.quart as quart_sentry
+from sentry_sdk.scope import Scope
from quart import Quart, Response, abort, stream_with_context
from quart.views import View
@@ -130,7 +129,7 @@ async def test_errors(
app,
integration_enabled_params,
):
- sentry_init(debug=True, **integration_enabled_params)
+ sentry_init(**integration_enabled_params)
@app.route("/")
async def index():
@@ -313,7 +312,7 @@ def foo():
@pytest.mark.asyncio
-async def test_500(sentry_init, capture_events, app):
+async def test_500(sentry_init, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
@app.route("/")
@@ -322,17 +321,12 @@ async def index():
@app.errorhandler(500)
async def error_handler(err):
- return "Sentry error: %s" % last_event_id()
-
- events = capture_events()
+ return "Sentry error."
client = app.test_client()
response = await client.get("/")
- (event,) = events
- assert (await response.get_data(as_text=True)) == "Sentry error: %s" % event[
- "event_id"
- ]
+ assert (await response.get_data(as_text=True)) == "Sentry error."
@pytest.mark.asyncio
@@ -384,18 +378,15 @@ async def test_does_not_leak_scope(sentry_init, capture_events, app):
sentry_init(integrations=[quart_sentry.QuartIntegration()])
events = capture_events()
- with configure_scope() as scope:
- scope.set_tag("request_data", False)
+ Scope.get_isolation_scope().set_tag("request_data", False)
@app.route("/")
async def index():
- with configure_scope() as scope:
- scope.set_tag("request_data", True)
+ Scope.get_isolation_scope().set_tag("request_data", True)
async def generate():
for row in range(1000):
- with configure_scope() as scope:
- assert scope._tags["request_data"]
+ assert Scope.get_isolation_scope()._tags["request_data"]
yield str(row) + "\n"
@@ -407,9 +398,7 @@ async def generate():
str(row) + "\n" for row in range(1000)
)
assert not events
-
- with configure_scope() as scope:
- assert not scope._tags["request_data"]
+ assert not Scope.get_isolation_scope()._tags["request_data"]
@pytest.mark.asyncio
diff --git a/tests/integrations/redis/test_redis.py b/tests/integrations/redis/test_redis.py
index d25e630f6a..57ac1c9ab1 100644
--- a/tests/integrations/redis/test_redis.py
+++ b/tests/integrations/redis/test_redis.py
@@ -1,16 +1,12 @@
+from unittest import mock
+
import pytest
+from fakeredis import FakeStrictRedis
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.redis import RedisIntegration
-from fakeredis import FakeStrictRedis
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
diff --git a/tests/integrations/rediscluster/test_rediscluster.py b/tests/integrations/rediscluster/test_rediscluster.py
index 88f987758b..36a27d569d 100644
--- a/tests/integrations/rediscluster/test_rediscluster.py
+++ b/tests/integrations/rediscluster/test_rediscluster.py
@@ -1,4 +1,7 @@
+from unittest import mock
+
import pytest
+import rediscluster
from sentry_sdk import capture_message
from sentry_sdk.api import start_transaction
@@ -6,13 +9,6 @@
from sentry_sdk.integrations.redis import RedisIntegration
from tests.conftest import ApproxDict
-try:
- from unittest import mock
-except ImportError:
- import mock
-
-import rediscluster
-
MOCK_CONNECTION_POOL = mock.MagicMock()
MOCK_CONNECTION_POOL.connection_kwargs = {
diff --git a/tests/integrations/requests/test_requests.py b/tests/integrations/requests/test_requests.py
index 1f4dd412d7..42efbb5acc 100644
--- a/tests/integrations/requests/test_requests.py
+++ b/tests/integrations/requests/test_requests.py
@@ -1,18 +1,14 @@
-import requests
-import responses
+from unittest import mock
import pytest
+import requests
+import responses
from sentry_sdk import capture_message
from sentry_sdk.consts import SPANDATA
from sentry_sdk.integrations.stdlib import StdlibIntegration
from tests.conftest import ApproxDict
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
def test_crumb_capture(sentry_init, capture_events):
sentry_init(integrations=[StdlibIntegration()])
diff --git a/tests/integrations/rq/test_rq.py b/tests/integrations/rq/test_rq.py
index b0d71e8f7d..094a458063 100644
--- a/tests/integrations/rq/test_rq.py
+++ b/tests/integrations/rq/test_rq.py
@@ -1,16 +1,14 @@
+from unittest import mock
+
import pytest
+import rq
from fakeredis import FakeStrictRedis
-from sentry_sdk import configure_scope, start_transaction
+
+from sentry_sdk import start_transaction
from sentry_sdk.integrations.rq import RqIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.utils import parse_version
-import rq
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
@pytest.fixture(autouse=True)
def _patch_rq_get_server_version(monkeypatch):
@@ -19,8 +17,10 @@ def _patch_rq_get_server_version(monkeypatch):
https://github.com/jamesls/fakeredis/issues/273
"""
-
- from distutils.version import StrictVersion
+ try:
+ from distutils.version import StrictVersion
+ except ImportError:
+ return
if parse_version(rq.VERSION) <= (1, 5, 1):
for k in (
@@ -181,19 +181,17 @@ def test_tracing_disabled(
queue = rq.Queue(connection=FakeStrictRedis())
worker = rq.SimpleWorker([queue], connection=queue.connection)
- with configure_scope() as scope:
- queue.enqueue(crashing_job, foo=None)
- worker.work(burst=True)
+ scope = Scope.get_isolation_scope()
+ queue.enqueue(crashing_job, foo=None)
+ worker.work(burst=True)
- (error_event,) = events
+ (error_event,) = events
- assert (
- error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
- )
- assert (
- error_event["contexts"]["trace"]["trace_id"]
- == scope._propagation_context["trace_id"]
- )
+ assert error_event["transaction"] == "tests.integrations.rq.test_rq.crashing_job"
+ assert (
+ error_event["contexts"]["trace"]["trace_id"]
+ == scope._propagation_context.trace_id
+ )
def test_transaction_no_error(
diff --git a/tests/integrations/sanic/test_sanic.py b/tests/integrations/sanic/test_sanic.py
index b338a5e6fb..d714690936 100644
--- a/tests/integrations/sanic/test_sanic.py
+++ b/tests/integrations/sanic/test_sanic.py
@@ -7,8 +7,9 @@
import pytest
-from sentry_sdk import capture_message, configure_scope
+from sentry_sdk import capture_message
from sentry_sdk.integrations.sanic import SanicIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.tracing import TRANSACTION_SOURCE_COMPONENT, TRANSACTION_SOURCE_URL
from sanic import Sanic, request, response, __version__ as SANIC_VERSION_RAW
@@ -233,13 +234,13 @@ def test_concurrency(sentry_init, app):
@app.route("/context-check/")
async def context_check(request, i):
- with configure_scope() as scope:
- scope.set_tag("i", i)
+ scope = Scope.get_isolation_scope()
+ scope.set_tag("i", i)
await asyncio.sleep(random.random())
- with configure_scope() as scope:
- assert scope._tags["i"] == i
+ scope = Scope.get_isolation_scope()
+ assert scope._tags["i"] == i
return response.text("ok")
@@ -328,8 +329,8 @@ async def runner():
else:
asyncio.run(runner())
- with configure_scope() as scope:
- assert not scope._tags
+ scope = Scope.get_isolation_scope()
+ assert not scope._tags
class TransactionTestConfig:
diff --git a/tests/integrations/serverless/test_serverless.py b/tests/integrations/serverless/test_serverless.py
index cc578ff4c4..a0a33e31ec 100644
--- a/tests/integrations/serverless/test_serverless.py
+++ b/tests/integrations/serverless/test_serverless.py
@@ -11,9 +11,7 @@ def test_basic(sentry_init, capture_exceptions, monkeypatch):
@serverless_function
def foo():
- monkeypatch.setattr(
- "sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1)
- )
+ monkeypatch.setattr("sentry_sdk.flush", lambda: flush_calls.append(1))
1 / 0
with pytest.raises(ZeroDivisionError):
@@ -31,7 +29,7 @@ def test_flush_disabled(sentry_init, capture_exceptions, monkeypatch):
flush_calls = []
- monkeypatch.setattr("sentry_sdk.Hub.current.flush", lambda: flush_calls.append(1))
+ monkeypatch.setattr("sentry_sdk.flush", lambda: flush_calls.append(1))
@serverless_function(flush=False)
def foo():
diff --git a/tests/integrations/sqlalchemy/test_sqlalchemy.py b/tests/integrations/sqlalchemy/test_sqlalchemy.py
index 08c8e29ec4..99d6a5c5fc 100644
--- a/tests/integrations/sqlalchemy/test_sqlalchemy.py
+++ b/tests/integrations/sqlalchemy/test_sqlalchemy.py
@@ -1,27 +1,22 @@
import os
-import pytest
-import sys
from datetime import datetime
+from unittest import mock
-from sentry_sdk._compat import PY2
+import pytest
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import text
-from sentry_sdk import capture_message, start_transaction, configure_scope
+from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import DEFAULT_MAX_VALUE_LENGTH, SPANDATA
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
+from sentry_sdk.scope import Scope
from sentry_sdk.serializer import MAX_EVENT_BYTES
from sentry_sdk.tracing_utils import record_sql_queries
from sentry_sdk.utils import json_dumps
-try:
- from unittest import mock
-except ImportError:
- import mock
-
def test_orm_queries(sentry_init, capture_events):
sentry_init(
@@ -45,7 +40,9 @@ class Address(Base):
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -81,9 +78,6 @@ class Address(Base):
]
-@pytest.mark.skipif(
- sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
-)
def test_transactions(sentry_init, capture_events, render_span_tree):
sentry_init(
integrations=[SqlalchemyIntegration()],
@@ -108,7 +102,9 @@ class Address(Base):
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -155,9 +151,6 @@ class Address(Base):
)
-@pytest.mark.skipif(
- sys.version_info < (3,), reason="This sqla usage seems to be broken on Py2"
-)
def test_transactions_no_engine_url(sentry_init, capture_events):
sentry_init(
integrations=[SqlalchemyIntegration()],
@@ -182,7 +175,9 @@ class Address(Base):
person_id = Column(Integer, ForeignKey("person.id"))
person = relationship(Person)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
engine.url = None
Base.metadata.create_all(engine)
@@ -218,7 +213,9 @@ def test_long_sql_query_preserved(sentry_init, capture_events):
)
events = capture_events()
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
with start_transaction(name="test"):
with engine.connect() as con:
con.execute(text(" UNION ".join("SELECT {}".format(i) for i in range(100))))
@@ -238,14 +235,16 @@ def test_large_event_not_truncated(sentry_init, capture_events):
long_str = "x" * (DEFAULT_MAX_VALUE_LENGTH + 10)
- with configure_scope() as scope:
+ scope = Scope.get_isolation_scope()
- @scope.add_event_processor
- def processor(event, hint):
- event["message"] = long_str
- return event
+ @scope.add_event_processor
+ def processor(event, hint):
+ event["message"] = long_str
+ return event
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
with start_transaction(name="test"):
with engine.connect() as con:
for _ in range(1500):
@@ -285,7 +284,9 @@ def test_engine_name_not_string(sentry_init):
integrations=[SqlalchemyIntegration()],
)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
engine.dialect.name = b"sqlite"
with engine.connect() as con:
@@ -312,7 +313,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -362,7 +365,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -407,7 +412,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -475,7 +482,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -502,11 +511,8 @@ class Person(Base):
assert type(data.get(SPANDATA.CODE_LINENO)) == int
assert data.get(SPANDATA.CODE_LINENO) > 0
- if not PY2:
- assert data.get(SPANDATA.CODE_NAMESPACE) == "sqlalchemy_helpers.helpers"
- assert (
- data.get(SPANDATA.CODE_FILEPATH) == "sqlalchemy_helpers/helpers.py"
- )
+ assert data.get(SPANDATA.CODE_NAMESPACE) == "sqlalchemy_helpers.helpers"
+ assert data.get(SPANDATA.CODE_FILEPATH) == "sqlalchemy_helpers/helpers.py"
is_relative_path = data.get(SPANDATA.CODE_FILEPATH)[0] != os.sep
assert is_relative_path
@@ -534,7 +540,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
@@ -598,7 +606,9 @@ class Person(Base):
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
- engine = create_engine("sqlite:///:memory:")
+ engine = create_engine(
+ "sqlite:///:memory:", connect_args={"check_same_thread": False}
+ )
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine) # noqa: N806
diff --git a/tests/integrations/starlette/test_starlette.py b/tests/integrations/starlette/test_starlette.py
index 202f8b53de..e1f3c1a482 100644
--- a/tests/integrations/starlette/test_starlette.py
+++ b/tests/integrations/starlette/test_starlette.py
@@ -6,23 +6,17 @@
import os
import re
import threading
+from unittest import mock
import pytest
-from sentry_sdk import last_event_id, capture_exception
+from sentry_sdk import capture_message, get_baggage, get_traceparent
from sentry_sdk.integrations.asgi import SentryAsgiMiddleware
-from sentry_sdk.utils import parse_version
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-from sentry_sdk import capture_message
from sentry_sdk.integrations.starlette import (
StarletteIntegration,
StarletteRequestExtractor,
)
+from sentry_sdk.utils import parse_version
import starlette
from starlette.authentication import (
@@ -97,7 +91,6 @@ async def _mock_receive(msg):
return msg
-from sentry_sdk import Hub
from starlette.templating import Jinja2Templates
@@ -139,8 +132,7 @@ async def _thread_ids_async(request):
)
async def _render_template(request):
- hub = Hub.current
- capture_message(hub.get_traceparent() + "\n" + hub.get_baggage())
+ capture_message(get_traceparent() + "\n" + get_baggage())
template_context = {
"request": request,
@@ -821,30 +813,6 @@ def test_middleware_partial_receive_send(sentry_init, capture_events):
idx += 1
-def test_last_event_id(sentry_init, capture_events):
- sentry_init(
- integrations=[StarletteIntegration()],
- )
- events = capture_events()
-
- def handler(request, exc):
- capture_exception(exc)
- return starlette.responses.PlainTextResponse(last_event_id(), status_code=500)
-
- app = starlette_app_factory(debug=False)
- app.add_exception_handler(500, handler)
-
- client = TestClient(SentryAsgiMiddleware(app), raise_server_exceptions=False)
- response = client.get("/custom_error")
- assert response.status_code == 500
-
- event = events[0]
- assert response.content.strip().decode("ascii") == event["event_id"]
- (exception,) = event["exception"]["values"]
- assert exception["type"] == "Exception"
- assert exception["value"] == "Too Hot"
-
-
def test_legacy_setup(
sentry_init,
capture_events,
@@ -950,9 +918,8 @@ def test_template_tracing_meta(sentry_init, capture_events):
assert match is not None
assert match.group(1) == traceparent
- # Python 2 does not preserve sort order
rendered_baggage = match.group(2)
- assert sorted(rendered_baggage.split(",")) == sorted(baggage.split(","))
+ assert rendered_baggage == baggage
@pytest.mark.parametrize(
@@ -987,7 +954,6 @@ def test_transaction_name(
auto_enabling_integrations=False, # Make sure that httpx integration is not added, because it adds tracing information to the starlette test clients request.
integrations=[StarletteIntegration(transaction_style=transaction_style)],
traces_sample_rate=1.0,
- debug=True,
)
envelopes = capture_envelopes()
@@ -1048,7 +1014,6 @@ def dummy_traces_sampler(sampling_context):
integrations=[StarletteIntegration(transaction_style=transaction_style)],
traces_sampler=dummy_traces_sampler,
traces_sample_rate=1.0,
- debug=True,
)
app = starlette_app_factory()
@@ -1090,7 +1055,6 @@ def test_transaction_name_in_middleware(
StarletteIntegration(transaction_style=transaction_style),
],
traces_sample_rate=1.0,
- debug=True,
)
envelopes = capture_envelopes()
diff --git a/tests/integrations/starlite/test_starlite.py b/tests/integrations/starlite/test_starlite.py
index 0412133f5e..5f1b199be6 100644
--- a/tests/integrations/starlite/test_starlite.py
+++ b/tests/integrations/starlite/test_starlite.py
@@ -2,16 +2,14 @@
import pytest
-from sentry_sdk import capture_exception, capture_message, last_event_id
+from sentry_sdk import capture_message
from sentry_sdk.integrations.starlite import StarliteIntegration
from typing import Any, Dict
-import starlite
from starlite import AbstractMiddleware, LoggingConfig, Starlite, get, Controller
from starlite.middleware import LoggingMiddlewareConfig, RateLimitConfig
from starlite.middleware.session.memory_backend import MemoryBackendConfig
-from starlite.status_codes import HTTP_500_INTERNAL_SERVER_ERROR
from starlite.testing import TestClient
@@ -291,27 +289,3 @@ def test_middleware_partial_receive_send(sentry_init, capture_events):
assert span["op"] == expected[idx]["op"]
assert span["description"].startswith(expected[idx]["description"])
assert span["tags"] == expected[idx]["tags"]
-
-
-def test_last_event_id(sentry_init, capture_events):
- sentry_init(
- integrations=[StarliteIntegration()],
- )
- events = capture_events()
-
- def handler(request, exc):
- capture_exception(exc)
- return starlite.response.Response(last_event_id(), status_code=500)
-
- app = starlite_app_factory(
- debug=False, exception_handlers={HTTP_500_INTERNAL_SERVER_ERROR: handler}
- )
-
- client = TestClient(app, raise_server_exceptions=False)
- response = client.get("/custom_error")
- assert response.status_code == 500
- event = events[-1]
- assert response.content.strip().decode("ascii").strip('"') == event["event_id"]
- (exception,) = event["exception"]["values"]
- assert exception["type"] == "Exception"
- assert exception["value"] == "Too Hot"
diff --git a/tests/integrations/stdlib/test_httplib.py b/tests/integrations/stdlib/test_httplib.py
index 6055b86ab8..3dc7c6c50f 100644
--- a/tests/integrations/stdlib/test_httplib.py
+++ b/tests/integrations/stdlib/test_httplib.py
@@ -1,27 +1,10 @@
import random
+from http.client import HTTPConnection, HTTPSConnection
+from urllib.request import urlopen
+from unittest import mock
import pytest
-try:
- # py3
- from urllib.request import urlopen
-except ImportError:
- # py2
- from urllib import urlopen
-
-try:
- # py2
- from httplib import HTTPConnection, HTTPSConnection
-except ImportError:
- # py3
- from http.client import HTTPConnection, HTTPSConnection
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-
from sentry_sdk import capture_message, start_transaction
from sentry_sdk.consts import MATCH_ALL, SPANDATA
from sentry_sdk.tracing import Transaction
@@ -188,17 +171,15 @@ def test_outgoing_trace_headers(sentry_init, monkeypatch):
)
assert request_headers["sentry-trace"] == expected_sentry_trace
- expected_outgoing_baggage_items = [
- "sentry-trace_id=771a43a4192642f0b136d5159a501700",
- "sentry-public_key=49d0f7386ad645858ae85020e393bef3",
- "sentry-sample_rate=0.01337",
- "sentry-user_id=Am%C3%A9lie",
- ]
-
- assert sorted(request_headers["baggage"].split(",")) == sorted(
- expected_outgoing_baggage_items
+ expected_outgoing_baggage = (
+ "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+ "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+ "sentry-sample_rate=0.01337,"
+ "sentry-user_id=Am%C3%A9lie"
)
+ assert request_headers["baggage"] == expected_outgoing_baggage
+
def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
# HTTPSConnection.send is passed a string containing (among other things)
@@ -231,17 +212,15 @@ def test_outgoing_trace_headers_head_sdk(sentry_init, monkeypatch):
)
assert request_headers["sentry-trace"] == expected_sentry_trace
- expected_outgoing_baggage_items = [
- "sentry-trace_id=%s" % transaction.trace_id,
- "sentry-sample_rate=0.5",
- "sentry-sampled=%s" % "true" if transaction.sampled else "false",
- "sentry-release=foo",
- "sentry-environment=production",
- ]
+ expected_outgoing_baggage = (
+ "sentry-trace_id=%s,"
+ "sentry-environment=production,"
+ "sentry-release=foo,"
+ "sentry-sample_rate=0.5,"
+ "sentry-sampled=%s"
+ ) % (transaction.trace_id, "true" if transaction.sampled else "false")
- assert sorted(request_headers["baggage"].split(",")) == sorted(
- expected_outgoing_baggage_items
- )
+ assert request_headers["baggage"] == expected_outgoing_baggage
@pytest.mark.parametrize(
diff --git a/tests/integrations/stdlib/test_subprocess.py b/tests/integrations/stdlib/test_subprocess.py
index d61be35fd2..c931db09c4 100644
--- a/tests/integrations/stdlib/test_subprocess.py
+++ b/tests/integrations/stdlib/test_subprocess.py
@@ -2,21 +2,15 @@
import platform
import subprocess
import sys
+from collections.abc import Mapping
import pytest
from sentry_sdk import capture_message, start_transaction
-from sentry_sdk._compat import PY2
from sentry_sdk.integrations.stdlib import StdlibIntegration
from tests.conftest import ApproxDict
-if PY2:
- from collections import Mapping
-else:
- from collections.abc import Mapping
-
-
class ImmutableDict(Mapping):
def __init__(self, inner):
self.inner = inner
diff --git a/tests/integrations/strawberry/test_strawberry_py3.py b/tests/integrations/strawberry/test_strawberry.py
similarity index 100%
rename from tests/integrations/strawberry/test_strawberry_py3.py
rename to tests/integrations/strawberry/test_strawberry.py
diff --git a/tests/integrations/threading/test_threading.py b/tests/integrations/threading/test_threading.py
index 97f480f155..328d0708c4 100644
--- a/tests/integrations/threading/test_threading.py
+++ b/tests/integrations/threading/test_threading.py
@@ -1,23 +1,18 @@
import gc
-import sys
+from concurrent import futures
from threading import Thread
-try:
- from concurrent import futures
-except ImportError:
- futures = None
-
import pytest
import sentry_sdk
-from sentry_sdk import configure_scope, capture_message
+from sentry_sdk import capture_message
from sentry_sdk.integrations.threading import ThreadingIntegration
+from sentry_sdk.scope import Scope
original_start = Thread.start
original_run = Thread.run
-@pytest.mark.forked
@pytest.mark.parametrize("integrations", [[ThreadingIntegration()], []])
def test_handles_exceptions(sentry_init, capture_events, integrations):
sentry_init(default_integrations=False, integrations=integrations)
@@ -41,7 +36,6 @@ def crash():
assert not events
-@pytest.mark.forked
@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_hub(sentry_init, capture_events, propagate_hub):
sentry_init(
@@ -51,8 +45,7 @@ def test_propagates_hub(sentry_init, capture_events, propagate_hub):
events = capture_events()
def stage1():
- with configure_scope() as scope:
- scope.set_tag("stage1", "true")
+ Scope.get_isolation_scope().set_tag("stage1", "true")
t = Thread(target=stage2)
t.start()
@@ -79,10 +72,6 @@ def stage2():
assert "stage1" not in event.get("tags", {})
-@pytest.mark.skipif(
- futures is None,
- reason="ThreadPool was added in 3.2",
-)
@pytest.mark.parametrize("propagate_hub", (True, False))
def test_propagates_threadpool_hub(sentry_init, capture_events, propagate_hub):
sentry_init(
@@ -115,6 +104,7 @@ def double(number):
assert len(event["spans"]) == 0
+@pytest.mark.skip(reason="Temporarily disable to release SDK 2.0a1.")
def test_circular_references(sentry_init, request):
sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
@@ -135,7 +125,6 @@ def run(self):
assert unreachable_objects == 0
-@pytest.mark.forked
def test_double_patching(sentry_init, capture_events):
sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
events = capture_events()
@@ -164,7 +153,6 @@ def run(self):
assert exception["type"] == "ZeroDivisionError"
-@pytest.mark.skipif(sys.version_info < (3, 2), reason="no __qualname__ in older python")
def test_wrapper_attributes(sentry_init):
sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
@@ -185,24 +173,3 @@ def target():
assert Thread.run.__qualname__ == original_run.__qualname__
assert t.run.__name__ == "run"
assert t.run.__qualname__ == original_run.__qualname__
-
-
-@pytest.mark.skipif(
- sys.version_info > (2, 7),
- reason="simpler test for py2.7 without py3 only __qualname__",
-)
-def test_wrapper_attributes_no_qualname(sentry_init):
- sentry_init(default_integrations=False, integrations=[ThreadingIntegration()])
-
- def target():
- assert t.run.__name__ == "run"
-
- t = Thread(target=target)
- t.start()
- t.join()
-
- assert Thread.start.__name__ == "start"
- assert t.start.__name__ == "start"
-
- assert Thread.run.__name__ == "run"
- assert t.run.__name__ == "run"
diff --git a/tests/integrations/tornado/test_tornado.py b/tests/integrations/tornado/test_tornado.py
index 2160154933..181c17cd49 100644
--- a/tests/integrations/tornado/test_tornado.py
+++ b/tests/integrations/tornado/test_tornado.py
@@ -2,8 +2,9 @@
import pytest
-from sentry_sdk import configure_scope, start_transaction, capture_message
+from sentry_sdk import start_transaction, capture_message
from sentry_sdk.integrations.tornado import TornadoIntegration
+from sentry_sdk.scope import Scope
from tornado.web import RequestHandler, Application, HTTPError
from tornado.testing import AsyncHTTPTestCase
@@ -36,13 +37,11 @@ def bogustest(self):
class CrashingHandler(RequestHandler):
def get(self):
- with configure_scope() as scope:
- scope.set_tag("foo", "42")
+ Scope.get_isolation_scope().set_tag("foo", "42")
1 / 0
def post(self):
- with configure_scope() as scope:
- scope.set_tag("foo", "43")
+ Scope.get_isolation_scope().set_tag("foo", "43")
1 / 0
@@ -54,14 +53,12 @@ def get(self):
class HelloHandler(RequestHandler):
async def get(self):
- with configure_scope() as scope:
- scope.set_tag("foo", "42")
+ Scope.get_isolation_scope().set_tag("foo", "42")
return b"hello"
async def post(self):
- with configure_scope() as scope:
- scope.set_tag("foo", "43")
+ Scope.get_isolation_scope().set_tag("foo", "43")
return b"hello"
@@ -104,8 +101,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events):
)
assert event["transaction_info"] == {"source": "component"}
- with configure_scope() as scope:
- assert not scope._tags
+ assert not Scope.get_isolation_scope()._tags
@pytest.mark.parametrize(
@@ -116,7 +112,7 @@ def test_basic(tornado_testcase, sentry_init, capture_events):
],
)
def test_transactions(tornado_testcase, sentry_init, capture_events, handler, code):
- sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0, debug=True)
+ sentry_init(integrations=[TornadoIntegration()], traces_sample_rate=1.0)
events = capture_events()
client = tornado_testcase(Application([(r"/hi", handler)]))
diff --git a/tests/integrations/trytond/test_trytond.py b/tests/integrations/trytond/test_trytond.py
index c4593c3060..870b6ccf96 100644
--- a/tests/integrations/trytond/test_trytond.py
+++ b/tests/integrations/trytond/test_trytond.py
@@ -11,7 +11,6 @@
from trytond.wsgi import app as trytond_app
from werkzeug.test import Client
-from sentry_sdk import last_event_id
from sentry_sdk.integrations.trytond import TrytondWSGIIntegration
@@ -79,13 +78,12 @@ def _(request):
@pytest.mark.skipif(
trytond.__version__.split(".") < ["5", "4"], reason="At least Trytond-5.4 required"
)
-def test_rpc_error_page(sentry_init, app, capture_events, get_client):
+def test_rpc_error_page(sentry_init, app, get_client):
"""Test that, after initializing the Trytond-SentrySDK integration
a custom error handler can be registered to the Trytond WSGI app so as to
inform the event identifiers to the Tryton RPC client"""
sentry_init(integrations=[TrytondWSGIIntegration()])
- events = capture_events()
@app.route("/rpcerror", methods=["POST"])
def _(request):
@@ -96,8 +94,7 @@ def _(app, request, e):
if isinstance(e, TrytondBaseException):
return
else:
- event_id = last_event_id()
- data = TrytondUserError(str(event_id), str(e))
+ data = TrytondUserError("Sentry error.", str(e))
return app.make_response(request, data)
client = get_client()
@@ -121,9 +118,8 @@ def _(app, request, e):
"/rpcerror", content_type="application/json", data=json.dumps(_data)
)
- (event,) = events
(content, status, headers) = response
data = json.loads(next(content))
assert status == "200 OK"
assert headers.get("Content-Type") == "application/json"
- assert data == dict(id=42, error=["UserError", [event["event_id"], "foo", None]])
+ assert data == dict(id=42, error=["UserError", ["Sentry error.", "foo", None]])
diff --git a/tests/integrations/wsgi/test_wsgi.py b/tests/integrations/wsgi/test_wsgi.py
index 0b76bf6887..03ebdb5107 100644
--- a/tests/integrations/wsgi/test_wsgi.py
+++ b/tests/integrations/wsgi/test_wsgi.py
@@ -1,18 +1,12 @@
-import sys
-
-from werkzeug.test import Client
+from collections import Counter
+from unittest import mock
import pytest
+from werkzeug.test import Client
import sentry_sdk
from sentry_sdk import capture_message
from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
-from collections import Counter
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
@pytest.fixture
@@ -23,7 +17,7 @@ def app(environ, start_response):
return app
-class IterableApp(object):
+class IterableApp:
def __init__(self, iterable):
self.iterable = iterable
@@ -31,7 +25,7 @@ def __call__(self, environ, start_response):
return self.iterable
-class ExitingIterable(object):
+class ExitingIterable:
def __init__(self, exc_func):
self._exc_func = exc_func
@@ -418,9 +412,6 @@ def sample_app(environ, start_response):
assert len(session_aggregates) == 1
-@pytest.mark.skipif(
- sys.version_info < (3, 3), reason="Profiling is only supported in Python >= 3.3"
-)
@mock.patch("sentry_sdk.profiler.PROFILE_MINIMUM_SAMPLES", 0)
def test_profile_sent(
sentry_init,
diff --git a/tests/test_api.py b/tests/test_api.py
index 1adb9095f0..738882f965 100644
--- a/tests/test_api.py
+++ b/tests/test_api.py
@@ -1,42 +1,44 @@
+import pytest
+from unittest import mock
+
from sentry_sdk import (
- configure_scope,
continue_trace,
get_baggage,
+ get_client,
get_current_span,
get_traceparent,
+ is_initialized,
start_transaction,
)
-from sentry_sdk.hub import Hub
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
+from sentry_sdk.client import Client, NonRecordingClient
+from sentry_sdk.scope import Scope
+@pytest.mark.forked
def test_get_current_span():
- fake_hub = mock.MagicMock()
- fake_hub.scope = mock.MagicMock()
-
- fake_hub.scope.span = mock.MagicMock()
- assert get_current_span(fake_hub) == fake_hub.scope.span
+ fake_scope = mock.MagicMock()
+ fake_scope.span = mock.MagicMock()
+ assert get_current_span(fake_scope) == fake_scope.span
- fake_hub.scope.span = None
- assert get_current_span(fake_hub) is None
+ fake_scope.span = None
+ assert get_current_span(fake_scope) is None
+@pytest.mark.forked
def test_get_current_span_default_hub(sentry_init):
sentry_init()
assert get_current_span() is None
- with configure_scope() as scope:
- fake_span = mock.MagicMock()
- scope.span = fake_span
+ scope = Scope.get_current_scope()
+ fake_span = mock.MagicMock()
+ scope.span = fake_span
- assert get_current_span() == fake_span
+ assert get_current_span() == fake_span
+@pytest.mark.forked
def test_get_current_span_default_hub_with_transaction(sentry_init):
sentry_init()
@@ -46,6 +48,7 @@ def test_get_current_span_default_hub_with_transaction(sentry_init):
assert get_current_span() == new_transaction
+@pytest.mark.forked
def test_traceparent_with_tracing_enabled(sentry_init):
sentry_init(traces_sample_rate=1.0)
@@ -57,39 +60,41 @@ def test_traceparent_with_tracing_enabled(sentry_init):
assert get_traceparent() == expected_traceparent
+@pytest.mark.forked
def test_traceparent_with_tracing_disabled(sentry_init):
sentry_init()
- propagation_context = Hub.current.scope._propagation_context
+ propagation_context = Scope.get_isolation_scope()._propagation_context
expected_traceparent = "%s-%s" % (
- propagation_context["trace_id"],
- propagation_context["span_id"],
+ propagation_context.trace_id,
+ propagation_context.span_id,
)
assert get_traceparent() == expected_traceparent
+@pytest.mark.forked
def test_baggage_with_tracing_disabled(sentry_init):
sentry_init(release="1.0.0", environment="dev")
- propagation_context = Hub.current.scope._propagation_context
+ propagation_context = Scope.get_isolation_scope()._propagation_context
expected_baggage = (
"sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0".format(
- propagation_context["trace_id"]
+ propagation_context.trace_id
)
)
- # order not guaranteed in older python versions
- assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+ assert get_baggage() == expected_baggage
+@pytest.mark.forked
def test_baggage_with_tracing_enabled(sentry_init):
sentry_init(traces_sample_rate=1.0, release="1.0.0", environment="dev")
with start_transaction() as transaction:
expected_baggage = "sentry-trace_id={},sentry-environment=dev,sentry-release=1.0.0,sentry-sample_rate=1.0,sentry-sampled={}".format(
transaction.trace_id, "true" if transaction.sampled else "false"
)
- # order not guaranteed in older python versions
- assert sorted(get_baggage().split(",")) == sorted(expected_baggage.split(","))
+ assert get_baggage() == expected_baggage
+@pytest.mark.forked
def test_continue_trace(sentry_init):
sentry_init()
@@ -106,10 +111,27 @@ def test_continue_trace(sentry_init):
with start_transaction(transaction):
assert transaction.name == "some name"
- propagation_context = Hub.current.scope._propagation_context
- assert propagation_context["trace_id"] == transaction.trace_id == trace_id
- assert propagation_context["parent_span_id"] == parent_span_id
- assert propagation_context["parent_sampled"] == parent_sampled
- assert propagation_context["dynamic_sampling_context"] == {
+ propagation_context = Scope.get_isolation_scope()._propagation_context
+ assert propagation_context.trace_id == transaction.trace_id == trace_id
+ assert propagation_context.parent_span_id == parent_span_id
+ assert propagation_context.parent_sampled == parent_sampled
+ assert propagation_context.dynamic_sampling_context == {
"trace_id": "566e3688a61d4bc888951642d6f14a19"
}
+
+
+@pytest.mark.forked
+def test_is_initialized():
+ assert not is_initialized()
+
+ scope = Scope.get_global_scope()
+ scope.set_client(Client())
+ assert is_initialized()
+
+
+@pytest.mark.forked
+def test_get_client():
+ client = get_client()
+ assert client is not None
+ assert client.__class__ == NonRecordingClient
+ assert not client.is_active()
diff --git a/tests/test_basics.py b/tests/test_basics.py
index 26dad73274..bf42634710 100644
--- a/tests/test_basics.py
+++ b/tests/test_basics.py
@@ -4,11 +4,11 @@
import time
import pytest
+from sentry_sdk.client import Client
from tests.conftest import patch_start_tracing_child
from sentry_sdk import (
- Client,
push_scope,
configure_scope,
capture_event,
@@ -16,10 +16,9 @@
capture_message,
start_transaction,
add_breadcrumb,
- last_event_id,
Hub,
+ Scope,
)
-from sentry_sdk._compat import reraise, PY2
from sentry_sdk.integrations import (
_AUTO_ENABLING_INTEGRATIONS,
Integration,
@@ -31,7 +30,7 @@
add_global_event_processor,
global_event_processors,
)
-from sentry_sdk.utils import get_sdk_name
+from sentry_sdk.utils import get_sdk_name, reraise
from sentry_sdk.tracing_utils import has_tracing_enabled
@@ -109,28 +108,6 @@ def test_auto_enabling_integrations_catches_import_error(sentry_init, caplog):
), "Problem with checking auto enabling {}".format(import_string)
-def test_event_id(sentry_init, capture_events):
- sentry_init()
- events = capture_events()
-
- try:
- raise ValueError("aha!")
- except Exception:
- event_id = capture_exception()
- int(event_id, 16)
- assert len(event_id) == 32
-
- (event,) = events
- assert event["event_id"] == event_id
- assert last_event_id() == event_id
- assert Hub.current.last_event_id() == event_id
-
- new_event_id = Hub.current.capture_event({"type": "transaction"})
- assert new_event_id is not None
- assert new_event_id != event_id
- assert Hub.current.last_event_id() == event_id
-
-
def test_generic_mechanism(sentry_init, capture_events):
sentry_init()
events = capture_events()
@@ -347,6 +324,9 @@ def test_push_scope_null_client(sentry_init, capture_events):
assert len(events) == 0
+@pytest.mark.skip(
+ reason="This test is not valid anymore, because push_scope just returns the isolation scope. This test should be removed once the Hub is removed"
+)
@pytest.mark.parametrize("null_client", (True, False))
def test_push_scope_callback(sentry_init, null_client, capture_events):
sentry_init()
@@ -396,8 +376,7 @@ def test_breadcrumbs(sentry_init, capture_events):
category="auth", message="Authenticated user %s" % i, level="info"
)
- with configure_scope() as scope:
- scope.clear()
+ Scope.get_isolation_scope().clear()
capture_exception(ValueError())
(event,) = events
@@ -454,10 +433,13 @@ def test_integration_scoping(sentry_init, capture_events):
assert not events
+@pytest.mark.skip(
+ reason="This test is not valid anymore, because with the new Scopes calling bind_client on the Hub sets the client on the global scope. This test should be removed once the Hub is removed"
+)
def test_client_initialized_within_scope(sentry_init, caplog):
caplog.set_level(logging.WARNING)
- sentry_init(debug=True)
+ sentry_init()
with push_scope():
Hub.current.bind_client(Client())
@@ -467,10 +449,13 @@ def test_client_initialized_within_scope(sentry_init, caplog):
assert record.msg.startswith("init() called inside of pushed scope.")
+@pytest.mark.skip(
+ reason="This test is not valid anymore, because with the new Scopes the push_scope just returns the isolation scope. This test should be removed once the Hub is removed"
+)
def test_scope_leaks_cleaned_up(sentry_init, caplog):
caplog.set_level(logging.WARNING)
- sentry_init(debug=True)
+ sentry_init()
old_stack = list(Hub.current._stack)
@@ -484,10 +469,13 @@ def test_scope_leaks_cleaned_up(sentry_init, caplog):
assert record.message.startswith("Leaked 1 scopes:")
+@pytest.mark.skip(
+ reason="This test is not valid anymore, because with the new Scopes there is not pushing and popping of scopes. This test should be removed once the Hub is removed"
+)
def test_scope_popped_too_soon(sentry_init, caplog):
caplog.set_level(logging.ERROR)
- sentry_init(debug=True)
+ sentry_init()
old_stack = list(Hub.current._stack)
@@ -531,7 +519,7 @@ def bar(event, hint):
def test_capture_event_with_scope_kwargs(sentry_init, capture_events):
- sentry_init(debug=True)
+ sentry_init()
events = capture_events()
capture_event({}, level="info", extras={"foo": "bar"})
(event,) = events
@@ -752,18 +740,16 @@ def class_(cls, arg):
def test_staticmethod_tracing(sentry_init):
test_staticmethod_name = "tests.test_basics.TracingTestClass.static"
- if not PY2:
- # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
- # since the assertion would be expected to fail in Python 3 if there is any problem.
- assert (
- ".".join(
- [
- TracingTestClass.static.__module__,
- TracingTestClass.static.__qualname__,
- ]
- )
- == test_staticmethod_name
- ), "The test static method was moved or renamed. Please update the name accordingly"
+
+ assert (
+ ".".join(
+ [
+ TracingTestClass.static.__module__,
+ TracingTestClass.static.__qualname__,
+ ]
+ )
+ == test_staticmethod_name
+ ), "The test static method was moved or renamed. Please update the name accordingly"
sentry_init(functions_to_trace=[{"qualified_name": test_staticmethod_name}])
@@ -775,18 +761,16 @@ def test_staticmethod_tracing(sentry_init):
def test_classmethod_tracing(sentry_init):
test_classmethod_name = "tests.test_basics.TracingTestClass.class_"
- if not PY2:
- # Skip this check on Python 2 since __qualname__ is available in Python 3 only. Skipping is okay,
- # since the assertion would be expected to fail in Python 3 if there is any problem.
- assert (
- ".".join(
- [
- TracingTestClass.class_.__module__,
- TracingTestClass.class_.__qualname__,
- ]
- )
- == test_classmethod_name
- ), "The test class method was moved or renamed. Please update the name accordingly"
+
+ assert (
+ ".".join(
+ [
+ TracingTestClass.class_.__module__,
+ TracingTestClass.class_.__qualname__,
+ ]
+ )
+ == test_classmethod_name
+ ), "The test class method was moved or renamed. Please update the name accordingly"
sentry_init(functions_to_trace=[{"qualified_name": test_classmethod_name}])
diff --git a/tests/test_client.py b/tests/test_client.py
index 0954a8c5e8..0464f32b5e 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -1,11 +1,13 @@
-# coding: utf-8
import os
import json
-import pytest
import subprocess
import sys
import time
+from collections.abc import Mapping
from textwrap import dedent
+from unittest import mock
+
+import pytest
from sentry_sdk import (
Hub,
@@ -15,14 +17,10 @@
capture_message,
capture_exception,
capture_event,
- start_transaction,
set_tag,
)
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.transport import Transport
-from sentry_sdk._compat import text_type, PY2
-from sentry_sdk.utils import HAS_CHAINED_EXCEPTIONS
-from sentry_sdk.utils import logger
from sentry_sdk.serializer import MAX_DATABAG_BREADTH
from sentry_sdk.consts import DEFAULT_MAX_BREADCRUMBS, DEFAULT_MAX_VALUE_LENGTH
from sentry_sdk._types import TYPE_CHECKING
@@ -32,28 +30,14 @@
from typing import Any, Optional, Union
from sentry_sdk._types import Event
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-if PY2:
- # Importing ABCs from collections is deprecated, and will stop working in 3.8
- # https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
- from collections import Mapping
-else:
- # New in 3.3
- # https://docs.python.org/3/library/collections.abc.html
- from collections.abc import Mapping
-
-class EventCapturedError(Exception):
+class EnvelopeCapturedError(Exception):
pass
class _TestTransport(Transport):
- def capture_event(self, event):
- raise EventCapturedError(event)
+ def capture_envelope(self, envelope):
+ raise EnvelopeCapturedError(envelope)
def test_transport_option(monkeypatch):
@@ -66,8 +50,8 @@ def test_transport_option(monkeypatch):
assert Client().dsn is None
monkeypatch.setenv("SENTRY_DSN", dsn)
- transport = Transport({"dsn": dsn2})
- assert text_type(transport.parsed_dsn) == dsn2
+ transport = _TestTransport({"dsn": dsn2})
+ assert str(transport.parsed_dsn) == dsn2
assert str(Client(transport=transport).dsn) == dsn
@@ -378,60 +362,8 @@ def e(exc):
e(ValueError())
assert mock_capture_internal_exception.call_count == 1
- assert mock_capture_internal_exception.call_args[0][0][0] == EventCapturedError
-
-
-def test_with_locals_deprecation_enabled(sentry_init):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- sentry_init(with_locals=True)
-
- client = Hub.current.client
- assert "with_locals" not in client.options
- assert "include_local_variables" in client.options
- assert client.options["include_local_variables"]
-
- fake_warning.assert_called_once_with(
- "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
- )
-
-
-def test_with_locals_deprecation_disabled(sentry_init):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- sentry_init(with_locals=False)
-
- client = Hub.current.client
- assert "with_locals" not in client.options
- assert "include_local_variables" in client.options
- assert not client.options["include_local_variables"]
-
- fake_warning.assert_called_once_with(
- "Deprecated: The option 'with_locals' was renamed to 'include_local_variables'. Please use 'include_local_variables'. The option 'with_locals' will be removed in the future."
- )
-
-
-def test_include_local_variables_deprecation(sentry_init):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- sentry_init(include_local_variables=False)
-
- client = Hub.current.client
- assert "with_locals" not in client.options
- assert "include_local_variables" in client.options
- assert not client.options["include_local_variables"]
-
- fake_warning.assert_not_called()
-
-
-def test_request_bodies_deprecation(sentry_init):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- sentry_init(request_bodies="small")
-
- client = Hub.current.client
- assert "request_bodies" not in client.options
- assert "max_request_body_size" in client.options
- assert client.options["max_request_body_size"] == "small"
-
- fake_warning.assert_called_once_with(
- "Deprecated: The option 'request_bodies' was renamed to 'max_request_body_size'. Please use 'max_request_body_size'. The option 'request_bodies' will be removed in the future."
+ assert (
+ mock_capture_internal_exception.call_args[0][0][0] == EnvelopeCapturedError
)
@@ -590,8 +522,8 @@ def test_attach_stacktrace_disabled(sentry_init, capture_events):
def test_capture_event_works(sentry_init):
sentry_init(transport=_TestTransport())
- pytest.raises(EventCapturedError, lambda: capture_event({}))
- pytest.raises(EventCapturedError, lambda: capture_event({}))
+ pytest.raises(EnvelopeCapturedError, lambda: capture_event({}))
+ pytest.raises(EnvelopeCapturedError, lambda: capture_event({}))
@pytest.mark.parametrize("num_messages", [10, 20])
@@ -603,11 +535,13 @@ def test_atexit(tmpdir, monkeypatch, num_messages):
import time
from sentry_sdk import init, transport, capture_message
- def send_event(self, event):
+ def capture_envelope(self, envelope):
time.sleep(0.1)
- print(event["message"])
+ event = envelope.get_event() or dict()
+ message = event.get("message", "")
+ print(message)
- transport.HttpTransport._send_event = send_event
+ transport.HttpTransport.capture_envelope = capture_envelope
init("http://foobar@localhost/123", shutdown_timeout={num_messages})
for _ in range({num_messages}):
@@ -665,6 +599,9 @@ def test_client_debug_option_disabled(with_client, sentry_init, caplog):
assert "OK" not in caplog.text
+@pytest.mark.skip(
+ reason="New behavior in SDK 2.0: You have a scope before init and add data to it."
+)
def test_scope_initialized_before_client(sentry_init, capture_events):
"""
This is a consequence of how configure_scope() works. We must
@@ -686,9 +623,7 @@ def test_scope_initialized_before_client(sentry_init, capture_events):
def test_weird_chars(sentry_init, capture_events):
sentry_init()
events = capture_events()
- # fmt: off
- capture_message(u"föö".encode("latin1"))
- # fmt: on
+ capture_message("föö".encode("latin1"))
(event,) = events
assert json.loads(json.dumps(event)) == event
@@ -813,7 +748,6 @@ def inner():
assert len(json.dumps(event)) < 10000
-@pytest.mark.skipif(not HAS_CHAINED_EXCEPTIONS, reason="Only works on 3.3+")
def test_chained_exceptions(sentry_init, capture_events):
sentry_init()
events = capture_events()
@@ -908,7 +842,7 @@ def test_object_sends_exception(sentry_init, capture_events):
sentry_init()
events = capture_events()
- class C(object):
+ class C:
def __repr__(self):
try:
1 / 0
@@ -976,7 +910,7 @@ def test_dict_changed_during_iteration(sentry_init, capture_events):
sentry_init(send_default_pii=True)
events = capture_events()
- class TooSmartClass(object):
+ class TooSmartClass:
def __init__(self, environ):
self.environ = environ
@@ -1020,91 +954,6 @@ def test_init_string_types(dsn, sentry_init):
)
-def test_sending_events_with_tracing():
- """
- Tests for calling the right transport method (capture_event vs
- capture_envelope) from the SDK client for different data types.
- """
-
- envelopes = []
- events = []
-
- class CustomTransport(Transport):
- def capture_envelope(self, envelope):
- envelopes.append(envelope)
-
- def capture_event(self, event):
- events.append(event)
-
- with Hub(Client(enable_tracing=True, transport=CustomTransport())):
- try:
- 1 / 0
- except Exception:
- event_id = capture_exception()
-
- # Assert error events get passed in via capture_envelope
- assert not events
- envelope = envelopes.pop()
- (item,) = envelope.items
- assert item.data_category == "error"
- assert item.headers.get("type") == "event"
- assert item.get_event()["event_id"] == event_id
-
- with start_transaction(name="foo"):
- pass
-
- # Assert transactions get passed in via capture_envelope
- assert not events
- envelope = envelopes.pop()
-
- (item,) = envelope.items
- assert item.data_category == "transaction"
- assert item.headers.get("type") == "transaction"
-
- assert not envelopes
- assert not events
-
-
-def test_sending_events_with_no_tracing():
- """
- Tests for calling the right transport method (capture_event vs
- capture_envelope) from the SDK client for different data types.
- """
-
- envelopes = []
- events = []
-
- class CustomTransport(Transport):
- def capture_envelope(self, envelope):
- envelopes.append(envelope)
-
- def capture_event(self, event):
- events.append(event)
-
- with Hub(Client(enable_tracing=False, transport=CustomTransport())):
- try:
- 1 / 0
- except Exception:
- event_id = capture_exception()
-
- # Assert error events get passed in via capture_event
- assert not envelopes
- event = events.pop()
-
- assert event["event_id"] == event_id
- assert "type" not in event
-
- with start_transaction(name="foo"):
- pass
-
- # Assert transactions get passed in via capture_envelope
- assert not events
- assert not envelopes
-
- assert not envelopes
- assert not events
-
-
@pytest.mark.parametrize(
"sdk_options, expected_breadcrumbs",
[({}, DEFAULT_MAX_BREADCRUMBS), ({"max_breadcrumbs": 50}, 50)],
diff --git a/tests/test_conftest.py b/tests/test_conftest.py
index 1b006ed12e..3b8cd098f5 100644
--- a/tests/test_conftest.py
+++ b/tests/test_conftest.py
@@ -53,7 +53,7 @@ def test_dictionary_containing(
) is expected_result
-class Animal(object): # noqa: B903
+class Animal: # noqa: B903
def __init__(self, name=None, age=None, description=None):
self.name = name
self.age = age
diff --git a/tests/crons/test_crons.py b/tests/test_crons.py
similarity index 69%
rename from tests/crons/test_crons.py
rename to tests/test_crons.py
index 1f50a33751..2b4ed3cab2 100644
--- a/tests/crons/test_crons.py
+++ b/tests/test_crons.py
@@ -1,15 +1,12 @@
-import pytest
import uuid
+from unittest import mock
+
+import pytest
import sentry_sdk
from sentry_sdk import Hub, configure_scope, set_level
from sentry_sdk.crons import capture_checkin
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
@sentry_sdk.monitor(monitor_slug="abc123")
def _hello_world(name):
@@ -33,6 +30,34 @@ def _break_world_contextmanager(name):
return "Hello, {}".format(name)
+@sentry_sdk.monitor(monitor_slug="abc123")
+async def _hello_world_async(name):
+ return "Hello, {}".format(name)
+
+
+@sentry_sdk.monitor(monitor_slug="def456")
+async def _break_world_async(name):
+ 1 / 0
+ return "Hello, {}".format(name)
+
+
+async def my_coroutine():
+ return
+
+
+async def _hello_world_contextmanager_async(name):
+ with sentry_sdk.monitor(monitor_slug="abc123"):
+ await my_coroutine()
+ return "Hello, {}".format(name)
+
+
+async def _break_world_contextmanager_async(name):
+ with sentry_sdk.monitor(monitor_slug="def456"):
+ await my_coroutine()
+ 1 / 0
+ return "Hello, {}".format(name)
+
+
@sentry_sdk.monitor(monitor_slug="ghi789", monitor_config=None)
def _no_monitor_config():
return
@@ -341,3 +366,111 @@ def test_scope_data_in_checkin(sentry_init, capture_envelopes):
assert len(invalid_keys) == 0, "Unexpected keys found in checkin: {}".format(
invalid_keys
)
+
+
+@pytest.mark.asyncio
+async def test_decorator_async(sentry_init):
+ sentry_init()
+
+ with mock.patch(
+ "sentry_sdk.crons.decorator.capture_checkin"
+ ) as fake_capture_checkin:
+ result = await _hello_world_async("Grace")
+ assert result == "Hello, Grace"
+
+ # Check for initial checkin
+ fake_capture_checkin.assert_has_calls(
+ [
+ mock.call(
+ monitor_slug="abc123", status="in_progress", monitor_config=None
+ ),
+ ]
+ )
+
+ # Check for final checkin
+ assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+ assert fake_capture_checkin.call_args[1]["status"] == "ok"
+ assert fake_capture_checkin.call_args[1]["duration"]
+ assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_decorator_error_async(sentry_init):
+ sentry_init()
+
+ with mock.patch(
+ "sentry_sdk.crons.decorator.capture_checkin"
+ ) as fake_capture_checkin:
+ with pytest.raises(ZeroDivisionError):
+ result = await _break_world_async("Grace")
+
+ assert "result" not in locals()
+
+ # Check for initial checkin
+ fake_capture_checkin.assert_has_calls(
+ [
+ mock.call(
+ monitor_slug="def456", status="in_progress", monitor_config=None
+ ),
+ ]
+ )
+
+ # Check for final checkin
+ assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+ assert fake_capture_checkin.call_args[1]["status"] == "error"
+ assert fake_capture_checkin.call_args[1]["duration"]
+ assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_contextmanager_async(sentry_init):
+ sentry_init()
+
+ with mock.patch(
+ "sentry_sdk.crons.decorator.capture_checkin"
+ ) as fake_capture_checkin:
+ result = await _hello_world_contextmanager_async("Grace")
+ assert result == "Hello, Grace"
+
+ # Check for initial checkin
+ fake_capture_checkin.assert_has_calls(
+ [
+ mock.call(
+ monitor_slug="abc123", status="in_progress", monitor_config=None
+ ),
+ ]
+ )
+
+ # Check for final checkin
+ assert fake_capture_checkin.call_args[1]["monitor_slug"] == "abc123"
+ assert fake_capture_checkin.call_args[1]["status"] == "ok"
+ assert fake_capture_checkin.call_args[1]["duration"]
+ assert fake_capture_checkin.call_args[1]["check_in_id"]
+
+
+@pytest.mark.asyncio
+async def test_contextmanager_error_async(sentry_init):
+ sentry_init()
+
+ with mock.patch(
+ "sentry_sdk.crons.decorator.capture_checkin"
+ ) as fake_capture_checkin:
+ with pytest.raises(ZeroDivisionError):
+ result = await _break_world_contextmanager_async("Grace")
+
+ assert "result" not in locals()
+
+ # Check for initial checkin
+ fake_capture_checkin.assert_has_calls(
+ [
+ mock.call(
+ monitor_slug="def456", status="in_progress", monitor_config=None
+ ),
+ ]
+ )
+
+ # Check for final checkin
+ assert fake_capture_checkin.call_args[1]["monitor_slug"] == "def456"
+ assert fake_capture_checkin.call_args[1]["status"] == "error"
+ assert fake_capture_checkin.call_args[1]["duration"]
+ assert fake_capture_checkin.call_args[1]["check_in_id"]
diff --git a/tests/test_envelope.py b/tests/test_envelope.py
index a8b3ac11f4..d1bc668f05 100644
--- a/tests/test_envelope.py
+++ b/tests/test_envelope.py
@@ -24,7 +24,6 @@ def generate_transaction_item():
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
- "user_segment": "bigs",
"transaction": "/interactions/other-dogs/new-dog",
},
}
@@ -105,7 +104,6 @@ def test_envelope_headers(sentry_init, capture_envelopes, monkeypatch):
"environment": "dogpark",
"release": "off.leash.park",
"public_key": "dogsarebadatkeepingsecrets",
- "user_segment": "bigs",
"transaction": "/interactions/other-dogs/new-dog",
},
}
diff --git a/tests/test_metrics.py b/tests/test_metrics.py
index 741935615d..c0793e8015 100644
--- a/tests/test_metrics.py
+++ b/tests/test_metrics.py
@@ -1,18 +1,14 @@
-# coding: utf-8
-import pytest
import sys
import time
import linecache
+from unittest import mock
+
+import pytest
-from sentry_sdk import Hub, metrics, push_scope, start_transaction
+from sentry_sdk import Hub, Scope, metrics, start_transaction
from sentry_sdk.tracing import TRANSACTION_SOURCE_ROUTE
from sentry_sdk.envelope import parse_json
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
try:
import gevent
except ImportError:
@@ -541,12 +537,12 @@ def test_transaction_name(
ts = time.time()
envelopes = capture_envelopes()
- with push_scope() as scope:
- scope.set_transaction_name("/user/{user_id}", source="route")
- metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
- metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
- metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
- metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
+ scope = Scope.get_current_scope()
+ scope.set_transaction_name("/user/{user_id}", source="route")
+ metrics.distribution("dist", 1.0, tags={"a": "b"}, timestamp=ts)
+ metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+ metrics.distribution("dist", 2.0, tags={"a": "b"}, timestamp=ts)
+ metrics.distribution("dist", 3.0, tags={"a": "b"}, timestamp=ts)
Hub.current.flush()
@@ -692,9 +688,7 @@ def test_metric_summaries(
"nano|\nsecond",
"my.amaze.metric_I_guess@nanosecond",
),
- # fmt: off
- (u"métríc", u"nanöseconď", u"m_tr_c@nansecon"),
- # fmt: on
+ ("métríc", "nanöseconď", "m_tr_c@nansecon"),
],
)
def test_metric_name_normalization(
@@ -733,9 +727,10 @@ def test_metric_name_normalization(
[
({"f-oo|bar": "%$foo/"}, {"f-oobar": "%$foo/"}),
({"foo$.$.$bar": "blah{}"}, {"foo..bar": "blah{}"}),
- # fmt: off
- ({u"foö-bar": u"snöwmän"}, {u"fo-bar": u"snöwmän"},),
- # fmt: on
+ (
+ {"foö-bar": "snöwmän"},
+ {"fo-bar": "snöwmän"},
+ ),
({"route": "GET /foo"}, {"route": "GET /foo"}),
({"__bar__": "this | or , that"}, {"__bar__": "this \\u{7c} or \\u{2c} that"}),
({"foo/": "hello!\n\r\t\\"}, {"foo/": "hello!\\n\\r\\t\\\\"}),
diff --git a/tests/test_monitor.py b/tests/test_monitor.py
index 42d600ebbb..3822437df3 100644
--- a/tests/test_monitor.py
+++ b/tests/test_monitor.py
@@ -1,19 +1,12 @@
import random
+from unittest import mock
from sentry_sdk import Hub, start_transaction
from sentry_sdk.transport import Transport
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
class HealthyTestTransport(Transport):
- def _send_event(self, event):
- pass
-
- def _send_envelope(self, envelope):
+ def capture_envelope(self, _):
pass
def is_healthy(self):
diff --git a/tests/test_new_scopes_compat.py b/tests/test_new_scopes_compat.py
new file mode 100644
index 0000000000..21e2ac27d3
--- /dev/null
+++ b/tests/test_new_scopes_compat.py
@@ -0,0 +1,275 @@
+import sentry_sdk
+from sentry_sdk.hub import Hub
+
+"""
+Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x.
+
+Those tests have been run with the latest SDK 1.x versiona and the data used in the `assert` statements represents
+the behvaior of the SDK 1.x.
+
+This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here)
+"""
+
+
+def test_configure_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with configure_scope` block.
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with sentry_sdk.configure_scope() as scope: # configure scope
+ sentry_sdk.set_tag("B1", 1)
+ scope.set_tag("B2", 1)
+ sentry_sdk.capture_message("Event B")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
+ assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1}
+
+
+def test_push_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with push_scope` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with sentry_sdk.push_scope() as scope: # push scope
+ sentry_sdk.set_tag("B1", 1)
+ scope.set_tag("B2", 1)
+ sentry_sdk.capture_message("Event B")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
+ assert event_z["tags"] == {"A": 1, "Z": 1}
+
+
+def test_with_hub_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with Hub:` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub.current as hub: # with hub
+ sentry_sdk.set_tag("B1", 1)
+ hub.scope.set_tag("B2", 1)
+ sentry_sdk.capture_message("Event B")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
+ assert event_z["tags"] == {"A": 1, "B1": 1, "B2": 1, "Z": 1}
+
+
+def test_with_hub_configure_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with Hub:` containing a `with configure_scope` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub.current as hub: # with hub
+ sentry_sdk.set_tag("B1", 1)
+ with hub.configure_scope() as scope: # configure scope
+ sentry_sdk.set_tag("B2", 1)
+ hub.scope.set_tag("B3", 1)
+ scope.set_tag("B4", 1)
+ sentry_sdk.capture_message("Event B")
+ sentry_sdk.set_tag("B5", 1)
+ sentry_sdk.capture_message("Event C")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_c, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
+ assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1}
+ assert event_z["tags"] == {
+ "A": 1,
+ "B1": 1,
+ "B2": 1,
+ "B3": 1,
+ "B4": 1,
+ "B5": 1,
+ "Z": 1,
+ }
+
+
+def test_with_hub_push_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with Hub:` containing a `with push_scope` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub.current as hub: # with hub
+ sentry_sdk.set_tag("B1", 1)
+ with hub.push_scope() as scope: # push scope
+ sentry_sdk.set_tag("B2", 1)
+ hub.scope.set_tag("B3", 1)
+ scope.set_tag("B4", 1)
+ sentry_sdk.capture_message("Event B")
+ sentry_sdk.set_tag("B5", 1)
+ sentry_sdk.capture_message("Event C")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_c, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
+ assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1}
+ assert event_z["tags"] == {"A": 1, "B1": 1, "B5": 1, "Z": 1}
+
+
+def test_with_cloned_hub_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with cloned Hub:` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub(Hub.current) as hub: # clone hub
+ sentry_sdk.set_tag("B1", 1)
+ hub.scope.set_tag("B2", 1)
+ sentry_sdk.capture_message("Event B")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1}
+ assert event_z["tags"] == {"A": 1, "Z": 1}
+
+
+def test_with_cloned_hub_configure_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with cloned Hub:` containing a `with configure_scope` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub(Hub.current) as hub: # clone hub
+ sentry_sdk.set_tag("B1", 1)
+ with hub.configure_scope() as scope: # configure scope
+ sentry_sdk.set_tag("B2", 1)
+ hub.scope.set_tag("B3", 1)
+ scope.set_tag("B4", 1)
+ sentry_sdk.capture_message("Event B")
+ sentry_sdk.set_tag("B5", 1)
+ sentry_sdk.capture_message("Event C")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_c, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
+ assert event_c["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1, "B5": 1}
+ assert event_z["tags"] == {"A": 1, "Z": 1}
+
+
+def test_with_cloned_hub_push_scope_sdk1(sentry_init, capture_events):
+ """
+ Mutate data in a `with cloned Hub:` containing a `with push_scope` block
+
+ Checks the results of SDK 2.x against the results the same code returned in SDK 1.x.
+ """
+ sentry_init()
+
+ events = capture_events()
+
+ sentry_sdk.set_tag("A", 1)
+ sentry_sdk.capture_message("Event A")
+
+ with Hub(Hub.current) as hub: # clone hub
+ sentry_sdk.set_tag("B1", 1)
+ with hub.push_scope() as scope: # push scope
+ sentry_sdk.set_tag("B2", 1)
+ hub.scope.set_tag("B3", 1)
+ scope.set_tag("B4", 1)
+ sentry_sdk.capture_message("Event B")
+ sentry_sdk.set_tag("B5", 1)
+ sentry_sdk.capture_message("Event C")
+
+ sentry_sdk.set_tag("Z", 1)
+ sentry_sdk.capture_message("Event Z")
+
+ (event_a, event_b, event_c, event_z) = events
+
+ # Check against the results the same code returned in SDK 1.x
+ assert event_a["tags"] == {"A": 1}
+ assert event_b["tags"] == {"A": 1, "B1": 1, "B2": 1, "B3": 1, "B4": 1}
+ assert event_c["tags"] == {"A": 1, "B1": 1, "B5": 1}
+ assert event_z["tags"] == {"A": 1, "Z": 1}
diff --git a/tests/test_new_scopes_compat_event.py b/tests/test_new_scopes_compat_event.py
new file mode 100644
index 0000000000..034beb50b8
--- /dev/null
+++ b/tests/test_new_scopes_compat_event.py
@@ -0,0 +1,498 @@
+import pytest
+
+from unittest import mock
+
+import sentry_sdk
+from sentry_sdk.hub import Hub
+from sentry_sdk.scrubber import EventScrubber, DEFAULT_DENYLIST
+
+
+"""
+Those tests are meant to check the compatibility of the new scopes in SDK 2.0 with the old Hub/Scope system in SDK 1.x.
+
+Those tests have been run with the latest SDK 1.x version and the data used in the `assert` statements represents
+the behvaior of the SDK 1.x.
+
+This makes sure that we are backwards compatible. (on a best effort basis, there will probably be some edge cases that are not covered here)
+"""
+
+
+@pytest.fixture
+def expected_error():
+ def create_expected_error_event(trx, span):
+ return {
+ "level": "warning-X",
+ "exception": {
+ "values": [
+ {
+ "mechanism": {"type": "generic", "handled": True},
+ "module": None,
+ "type": "ValueError",
+ "value": "This is a test exception",
+ "stacktrace": {
+ "frames": [
+ {
+ "filename": "tests/test_new_scopes_compat_event.py",
+ "abs_path": mock.ANY,
+ "function": "_faulty_function",
+ "module": "tests.test_new_scopes_compat_event",
+ "lineno": 240,
+ "pre_context": [
+ " return create_expected_transaction_event",
+ "",
+ "",
+ "def _faulty_function():",
+ " try:",
+ ],
+ "context_line": ' raise ValueError("This is a test exception")',
+ "post_context": [
+ " except ValueError as ex:",
+ " sentry_sdk.capture_exception(ex)",
+ "",
+ "",
+ "def _test_before_send(event, hint):",
+ ],
+ "vars": {
+ "ex": mock.ANY,
+ },
+ "in_app": True,
+ }
+ ]
+ },
+ }
+ ]
+ },
+ "event_id": mock.ANY,
+ "timestamp": mock.ANY,
+ "contexts": {
+ "character": {
+ "name": "Mighty Fighter changed by before_send",
+ "age": 19,
+ "attack_type": "melee",
+ },
+ "trace": {
+ "trace_id": trx.trace_id,
+ "span_id": span.span_id,
+ "parent_span_id": span.parent_span_id,
+ "op": "test_span",
+ "description": None,
+ },
+ "runtime": {
+ "name": "CPython",
+ "version": mock.ANY,
+ "build": mock.ANY,
+ },
+ },
+ "user": {
+ "id": "123",
+ "email": "jane.doe@example.com",
+ "ip_address": "[Filtered]",
+ },
+ "transaction": "test_transaction",
+ "transaction_info": {"source": "custom"},
+ "tags": {"tag1": "tag1_value", "tag2": "tag2_value"},
+ "extra": {
+ "extra1": "extra1_value",
+ "extra2": "extra2_value",
+ "should_be_removed_by_event_scrubber": "[Filtered]",
+ "sys.argv": "[Filtered]",
+ },
+ "breadcrumbs": {
+ "values": [
+ {
+ "category": "error-level",
+ "message": "Authenticated user %s",
+ "level": "error",
+ "data": {"breadcrumb2": "somedata"},
+ "timestamp": mock.ANY,
+ "type": "default",
+ }
+ ]
+ },
+ "modules": mock.ANY,
+ "release": "0.1.2rc3",
+ "environment": "checking-compatibility-with-sdk1",
+ "server_name": mock.ANY,
+ "sdk": {
+ "name": "sentry.python",
+ "version": mock.ANY,
+ "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}],
+ "integrations": [
+ "argv",
+ "atexit",
+ "dedupe",
+ "excepthook",
+ "logging",
+ "modules",
+ "stdlib",
+ "threading",
+ ],
+ },
+ "platform": "python",
+ "_meta": {
+ "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}},
+ "extra": {
+ "should_be_removed_by_event_scrubber": {
+ "": {"rem": [["!config", "s"]]}
+ },
+ "sys.argv": {"": {"rem": [["!config", "s"]]}},
+ },
+ },
+ }
+
+ return create_expected_error_event
+
+
+@pytest.fixture
+def expected_transaction():
+ def create_expected_transaction_event(trx, span):
+ return {
+ "type": "transaction",
+ "transaction": "test_transaction changed by before_send_transaction",
+ "transaction_info": {"source": "custom"},
+ "contexts": {
+ "trace": {
+ "trace_id": trx.trace_id,
+ "span_id": trx.span_id,
+ "parent_span_id": None,
+ "op": "test_transaction_op",
+ "description": None,
+ },
+ "character": {
+ "name": "Mighty Fighter changed by before_send_transaction",
+ "age": 19,
+ "attack_type": "melee",
+ },
+ "runtime": {
+ "name": "CPython",
+ "version": mock.ANY,
+ "build": mock.ANY,
+ },
+ },
+ "tags": {"tag1": "tag1_value", "tag2": "tag2_value"},
+ "timestamp": mock.ANY,
+ "start_timestamp": mock.ANY,
+ "spans": [
+ {
+ "data": {
+ "thread.id": mock.ANY,
+ "thread.name": "MainThread",
+ },
+ "trace_id": trx.trace_id,
+ "span_id": span.span_id,
+ "parent_span_id": span.parent_span_id,
+ "same_process_as_parent": True,
+ "op": "test_span",
+ "description": None,
+ "start_timestamp": mock.ANY,
+ "timestamp": mock.ANY,
+ }
+ ],
+ "measurements": {"memory_used": {"value": 456, "unit": "byte"}},
+ "event_id": mock.ANY,
+ "level": "warning-X",
+ "user": {
+ "id": "123",
+ "email": "jane.doe@example.com",
+ "ip_address": "[Filtered]",
+ },
+ "extra": {
+ "extra1": "extra1_value",
+ "extra2": "extra2_value",
+ "should_be_removed_by_event_scrubber": "[Filtered]",
+ "sys.argv": "[Filtered]",
+ },
+ "release": "0.1.2rc3",
+ "environment": "checking-compatibility-with-sdk1",
+ "server_name": mock.ANY,
+ "sdk": {
+ "name": "sentry.python",
+ "version": mock.ANY,
+ "packages": [{"name": "pypi:sentry-sdk", "version": mock.ANY}],
+ "integrations": [
+ "argv",
+ "atexit",
+ "dedupe",
+ "excepthook",
+ "logging",
+ "modules",
+ "stdlib",
+ "threading",
+ ],
+ },
+ "platform": "python",
+ "_meta": {
+ "user": {"ip_address": {"": {"rem": [["!config", "s"]]}}},
+ "extra": {
+ "should_be_removed_by_event_scrubber": {
+ "": {"rem": [["!config", "s"]]}
+ },
+ "sys.argv": {"": {"rem": [["!config", "s"]]}},
+ },
+ },
+ }
+
+ return create_expected_transaction_event
+
+
+def _faulty_function():
+ try:
+ raise ValueError("This is a test exception")
+ except ValueError as ex:
+ sentry_sdk.capture_exception(ex)
+
+
+def _test_before_send(event, hint):
+ event["contexts"]["character"]["name"] += " changed by before_send"
+ return event
+
+
+def _test_before_send_transaction(event, hint):
+ event["transaction"] += " changed by before_send_transaction"
+ event["contexts"]["character"]["name"] += " changed by before_send_transaction"
+ return event
+
+
+def _test_before_breadcrumb(breadcrumb, hint):
+ if breadcrumb["category"] == "info-level":
+ return None
+ return breadcrumb
+
+
+def _generate_event_data(scope=None):
+ """
+ Generates some data to be used in the events sent by the tests.
+ """
+ sentry_sdk.set_level("warning-X")
+
+ sentry_sdk.add_breadcrumb(
+ category="info-level",
+ message="Authenticated user %s",
+ level="info",
+ data={"breadcrumb1": "somedata"},
+ )
+ sentry_sdk.add_breadcrumb(
+ category="error-level",
+ message="Authenticated user %s",
+ level="error",
+ data={"breadcrumb2": "somedata"},
+ )
+
+ sentry_sdk.set_context(
+ "character",
+ {
+ "name": "Mighty Fighter",
+ "age": 19,
+ "attack_type": "melee",
+ },
+ )
+
+ sentry_sdk.set_extra("extra1", "extra1_value")
+ sentry_sdk.set_extra("extra2", "extra2_value")
+ sentry_sdk.set_extra("should_be_removed_by_event_scrubber", "XXX")
+
+ sentry_sdk.set_tag("tag1", "tag1_value")
+ sentry_sdk.set_tag("tag2", "tag2_value")
+
+ sentry_sdk.set_user(
+ {"id": "123", "email": "jane.doe@example.com", "ip_address": "211.161.1.124"}
+ )
+
+ sentry_sdk.set_measurement("memory_used", 456, "byte")
+
+ if scope is not None:
+ scope.add_attachment(bytes=b"Hello World", filename="hello.txt")
+
+
+def _init_sentry_sdk(sentry_init):
+ sentry_init(
+ environment="checking-compatibility-with-sdk1",
+ release="0.1.2rc3",
+ before_send=_test_before_send,
+ before_send_transaction=_test_before_send_transaction,
+ before_breadcrumb=_test_before_breadcrumb,
+ event_scrubber=EventScrubber(
+ denylist=DEFAULT_DENYLIST
+ + ["should_be_removed_by_event_scrubber", "sys.argv"]
+ ),
+ send_default_pii=False,
+ traces_sample_rate=1.0,
+ )
+
+
+#
+# The actual Tests start here!
+#
+
+
+def test_event(sentry_init, capture_envelopes, expected_error, expected_transaction):
+ _init_sentry_sdk(sentry_init)
+
+ envelopes = capture_envelopes()
+
+ with sentry_sdk.start_transaction(
+ name="test_transaction", op="test_transaction_op"
+ ) as trx:
+ with sentry_sdk.start_span(op="test_span") as span:
+ with sentry_sdk.configure_scope() as scope: # configure scope
+ _generate_event_data(scope)
+ _faulty_function()
+
+ (error_envelope, transaction_envelope) = envelopes
+
+ error = error_envelope.get_event()
+ transaction = transaction_envelope.get_transaction_event()
+ attachment = error_envelope.items[-1]
+
+ assert error == expected_error(trx, span)
+ assert transaction == expected_transaction(trx, span)
+ assert attachment.headers == {
+ "filename": "hello.txt",
+ "type": "attachment",
+ "content_type": "text/plain",
+ }
+ assert attachment.payload.bytes == b"Hello World"
+
+
+def test_event2(sentry_init, capture_envelopes, expected_error, expected_transaction):
+ _init_sentry_sdk(sentry_init)
+
+ envelopes = capture_envelopes()
+
+ with Hub(Hub.current):
+ sentry_sdk.set_tag("A", 1) # will not be added
+
+ with Hub.current: # with hub
+ with sentry_sdk.push_scope() as scope:
+ scope.set_tag("B", 1) # will not be added
+
+ with sentry_sdk.start_transaction(
+ name="test_transaction", op="test_transaction_op"
+ ) as trx:
+ with sentry_sdk.start_span(op="test_span") as span:
+ with sentry_sdk.configure_scope() as scope: # configure scope
+ _generate_event_data(scope)
+ _faulty_function()
+
+ (error_envelope, transaction_envelope) = envelopes
+
+ error = error_envelope.get_event()
+ transaction = transaction_envelope.get_transaction_event()
+ attachment = error_envelope.items[-1]
+
+ assert error == expected_error(trx, span)
+ assert transaction == expected_transaction(trx, span)
+ assert attachment.headers == {
+ "filename": "hello.txt",
+ "type": "attachment",
+ "content_type": "text/plain",
+ }
+ assert attachment.payload.bytes == b"Hello World"
+
+
+def test_event3(sentry_init, capture_envelopes, expected_error, expected_transaction):
+ _init_sentry_sdk(sentry_init)
+
+ envelopes = capture_envelopes()
+
+ with Hub(Hub.current):
+ sentry_sdk.set_tag("A", 1) # will not be added
+
+ with Hub.current: # with hub
+ with sentry_sdk.push_scope() as scope:
+ scope.set_tag("B", 1) # will not be added
+
+ with sentry_sdk.push_scope() as scope: # push scope
+ with sentry_sdk.start_transaction(
+ name="test_transaction", op="test_transaction_op"
+ ) as trx:
+ with sentry_sdk.start_span(op="test_span") as span:
+ _generate_event_data(scope)
+ _faulty_function()
+
+ (error_envelope, transaction_envelope) = envelopes
+
+ error = error_envelope.get_event()
+ transaction = transaction_envelope.get_transaction_event()
+ attachment = error_envelope.items[-1]
+
+ assert error == expected_error(trx, span)
+ assert transaction == expected_transaction(trx, span)
+ assert attachment.headers == {
+ "filename": "hello.txt",
+ "type": "attachment",
+ "content_type": "text/plain",
+ }
+ assert attachment.payload.bytes == b"Hello World"
+
+
+def test_event4(sentry_init, capture_envelopes, expected_error, expected_transaction):
+ _init_sentry_sdk(sentry_init)
+
+ envelopes = capture_envelopes()
+
+ with Hub(Hub.current):
+ sentry_sdk.set_tag("A", 1) # will not be added
+
+ with Hub(Hub.current): # with hub clone
+ with sentry_sdk.push_scope() as scope:
+ scope.set_tag("B", 1) # will not be added
+
+ with sentry_sdk.start_transaction(
+ name="test_transaction", op="test_transaction_op"
+ ) as trx:
+ with sentry_sdk.start_span(op="test_span") as span:
+ with sentry_sdk.configure_scope() as scope: # configure scope
+ _generate_event_data(scope)
+ _faulty_function()
+
+ (error_envelope, transaction_envelope) = envelopes
+
+ error = error_envelope.get_event()
+ transaction = transaction_envelope.get_transaction_event()
+ attachment = error_envelope.items[-1]
+
+ assert error == expected_error(trx, span)
+ assert transaction == expected_transaction(trx, span)
+ assert attachment.headers == {
+ "filename": "hello.txt",
+ "type": "attachment",
+ "content_type": "text/plain",
+ }
+ assert attachment.payload.bytes == b"Hello World"
+
+
+def test_event5(sentry_init, capture_envelopes, expected_error, expected_transaction):
+ _init_sentry_sdk(sentry_init)
+
+ envelopes = capture_envelopes()
+
+ with Hub(Hub.current):
+ sentry_sdk.set_tag("A", 1) # will not be added
+
+ with Hub(Hub.current): # with hub clone
+ with sentry_sdk.push_scope() as scope:
+ scope.set_tag("B", 1) # will not be added
+
+ with sentry_sdk.push_scope() as scope: # push scope
+ with sentry_sdk.start_transaction(
+ name="test_transaction", op="test_transaction_op"
+ ) as trx:
+ with sentry_sdk.start_span(op="test_span") as span:
+ _generate_event_data(scope)
+ _faulty_function()
+
+ (error_envelope, transaction_envelope) = envelopes
+
+ error = error_envelope.get_event()
+ transaction = transaction_envelope.get_transaction_event()
+ attachment = error_envelope.items[-1]
+
+ assert error == expected_error(trx, span)
+ assert transaction == expected_transaction(trx, span)
+ assert attachment.headers == {
+ "filename": "hello.txt",
+ "type": "attachment",
+ "content_type": "text/plain",
+ }
+ assert attachment.payload.bytes == b"Hello World"
diff --git a/tests/test_profiler.py b/tests/test_profiler.py
index 495dd3f300..433d311b43 100644
--- a/tests/test_profiler.py
+++ b/tests/test_profiler.py
@@ -3,10 +3,11 @@
import sys
import threading
import time
+from collections import defaultdict
+from unittest import mock
import pytest
-from collections import defaultdict
from sentry_sdk import start_transaction
from sentry_sdk.profiler import (
GeventScheduler,
@@ -22,23 +23,12 @@
from sentry_sdk.tracing import Transaction
from sentry_sdk._lru_cache import LRUCache
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
try:
import gevent
except ImportError:
gevent = None
-def requires_python_version(major, minor, reason=None):
- if reason is None:
- reason = "Requires Python {}.{}".format(major, minor)
- return pytest.mark.skipif(sys.version_info < (major, minor), reason=reason)
-
-
requires_gevent = pytest.mark.skipif(gevent is None, reason="gevent not enabled")
@@ -57,7 +47,6 @@ def experimental_options(mode=None, sample_rate=None):
}
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
"mode",
[
@@ -80,7 +69,6 @@ def test_profiler_invalid_mode(mode, make_options, teardown_profiling):
setup_profiler(make_options(mode))
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
"mode",
[
@@ -101,7 +89,6 @@ def test_profiler_valid_mode(mode, make_options, teardown_profiling):
setup_profiler(make_options(mode))
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
"make_options",
[
@@ -116,7 +103,6 @@ def test_profiler_setup_twice(make_options, teardown_profiling):
assert not setup_profiler(make_options())
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
"mode",
[
@@ -182,7 +168,6 @@ def test_profiles_sample_rate(
assert reports == [("sample_rate", "profile")]
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
"mode",
[
@@ -250,7 +235,6 @@ def test_profiles_sampler(
assert reports == [("sample_rate", "profile")]
-@requires_python_version(3, 3)
def test_minimum_unique_samples_required(
sentry_init,
capture_envelopes,
@@ -281,7 +265,6 @@ def test_minimum_unique_samples_required(
@pytest.mark.forked
-@requires_python_version(3, 3)
def test_profile_captured(
sentry_init,
capture_envelopes,
@@ -371,7 +354,6 @@ def static_method():
return inspect.currentframe()
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("frame", "frame_name"),
[
@@ -461,7 +443,6 @@ def test_get_frame_name(frame, frame_name):
assert get_frame_name(frame) == frame_name
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("get_frame", "function"),
[
@@ -489,7 +470,6 @@ def test_extract_frame(get_frame, function):
assert isinstance(extracted_frame["lineno"], int)
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("depth", "max_stack_depth", "actual_depth"),
[
@@ -531,7 +511,6 @@ def test_extract_stack_with_max_depth(depth, max_stack_depth, actual_depth):
assert frames[actual_depth]["function"] == "", actual_depth
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("frame", "depth"),
[(get_frame(depth=1), len(inspect.stack()))],
@@ -558,7 +537,6 @@ def get_scheduler_threads(scheduler):
return [thread for thread in threading.enumerate() if thread.name == scheduler.name]
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("scheduler_class",),
[
@@ -602,7 +580,6 @@ def test_thread_scheduler_single_background_thread(scheduler_class):
assert len(get_scheduler_threads(scheduler)) == 0
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("scheduler_class",),
[
@@ -647,7 +624,6 @@ def test_thread_scheduler_no_thread_on_shutdown(scheduler_class):
assert len(get_scheduler_threads(scheduler)) == 0
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("scheduler_class",),
[
@@ -725,7 +701,6 @@ def ensure_running(self):
]
-@requires_python_version(3, 3)
@pytest.mark.parametrize(
("samples", "expected"),
[
diff --git a/tests/test_propagationcontext.py b/tests/test_propagationcontext.py
new file mode 100644
index 0000000000..c650071511
--- /dev/null
+++ b/tests/test_propagationcontext.py
@@ -0,0 +1,83 @@
+from sentry_sdk.tracing_utils import PropagationContext
+
+
+def test_empty_context():
+ ctx = PropagationContext()
+
+ assert ctx.trace_id is not None
+ assert len(ctx.trace_id) == 32
+
+ assert ctx.span_id is not None
+ assert len(ctx.span_id) == 16
+
+ assert ctx.parent_span_id is None
+ assert ctx.parent_sampled is None
+ assert ctx.dynamic_sampling_context is None
+
+
+def test_context_with_values():
+ ctx = PropagationContext(
+ trace_id="1234567890abcdef1234567890abcdef",
+ span_id="1234567890abcdef",
+ parent_span_id="abcdef1234567890",
+ parent_sampled=True,
+ dynamic_sampling_context={
+ "foo": "bar",
+ },
+ )
+
+ assert ctx.trace_id == "1234567890abcdef1234567890abcdef"
+ assert ctx.span_id == "1234567890abcdef"
+ assert ctx.parent_span_id == "abcdef1234567890"
+ assert ctx.parent_sampled
+ assert ctx.dynamic_sampling_context == {
+ "foo": "bar",
+ }
+
+
+def test_lacy_uuids():
+ ctx = PropagationContext()
+ assert ctx._trace_id is None
+ assert ctx._span_id is None
+
+ assert ctx.trace_id is not None # this sets _trace_id
+ assert ctx._trace_id is not None
+ assert ctx._span_id is None
+
+ assert ctx.span_id is not None # this sets _span_id
+ assert ctx._trace_id is not None
+ assert ctx._span_id is not None
+
+
+def test_property_setters():
+ ctx = PropagationContext()
+ ctx.trace_id = "X234567890abcdef1234567890abcdef"
+ ctx.span_id = "X234567890abcdef"
+
+ assert ctx._trace_id == "X234567890abcdef1234567890abcdef"
+ assert ctx.trace_id == "X234567890abcdef1234567890abcdef"
+ assert ctx._span_id == "X234567890abcdef"
+ assert ctx.span_id == "X234567890abcdef"
+
+
+def test_update():
+ ctx = PropagationContext()
+
+ other_data = {
+ "trace_id": "Z234567890abcdef1234567890abcdef",
+ "parent_span_id": "Z234567890abcdef",
+ "parent_sampled": False,
+ "foo": "bar",
+ }
+ ctx.update(other_data)
+
+ assert ctx._trace_id == "Z234567890abcdef1234567890abcdef"
+ assert ctx.trace_id == "Z234567890abcdef1234567890abcdef"
+ assert ctx._span_id is None # this will be set lazily
+ assert ctx.span_id is not None # this sets _span_id
+ assert ctx._span_id is not None
+ assert ctx.parent_span_id == "Z234567890abcdef"
+ assert not ctx.parent_sampled
+ assert ctx.dynamic_sampling_context is None
+
+ assert not hasattr(ctx, "foo")
diff --git a/tests/test_scope.py b/tests/test_scope.py
index 8bdd46e02f..d5910a8c1d 100644
--- a/tests/test_scope.py
+++ b/tests/test_scope.py
@@ -1,13 +1,22 @@
import copy
import os
import pytest
-from sentry_sdk import capture_exception
-from sentry_sdk.scope import Scope
+from unittest import mock
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
+import sentry_sdk
+from sentry_sdk import (
+ capture_exception,
+ isolation_scope,
+ new_scope,
+)
+from sentry_sdk.client import Client, NonRecordingClient
+from sentry_sdk.scope import (
+ Scope,
+ ScopeType,
+ use_isolation_scope,
+ use_scope,
+ should_send_default_pii,
+)
def test_copying():
@@ -157,3 +166,633 @@ def test_load_trace_data_from_env(env, excepted_value):
s = Scope()
incoming_trace_data = s._load_trace_data_from_env()
assert incoming_trace_data == excepted_value
+
+
+def test_scope_client():
+ scope = Scope(ty="test_something")
+ assert scope._type == "test_something"
+ assert scope.client is not None
+ assert scope.client.__class__ == NonRecordingClient
+
+ custom_client = Client()
+ scope = Scope(ty="test_more", client=custom_client)
+ assert scope._type == "test_more"
+ assert scope.client is not None
+ assert scope.client.__class__ == Client
+ assert scope.client == custom_client
+
+
+def test_get_current_scope():
+ scope = Scope.get_current_scope()
+ assert scope is not None
+ assert scope.__class__ == Scope
+ assert scope._type == ScopeType.CURRENT
+
+
+def test_get_isolation_scope():
+ scope = Scope.get_isolation_scope()
+ assert scope is not None
+ assert scope.__class__ == Scope
+ assert scope._type == ScopeType.ISOLATION
+
+
+def test_get_global_scope():
+ scope = Scope.get_global_scope()
+ assert scope is not None
+ assert scope.__class__ == Scope
+ assert scope._type == ScopeType.GLOBAL
+
+
+def test_get_client():
+ client = Scope.get_client()
+ assert client is not None
+ assert client.__class__ == NonRecordingClient
+ assert not client.is_active()
+
+
+def test_set_client():
+ client1 = Client()
+ client2 = Client()
+ client3 = Client()
+
+ current_scope = Scope.get_current_scope()
+ isolation_scope = Scope.get_isolation_scope()
+ global_scope = Scope.get_global_scope()
+
+ current_scope.set_client(client1)
+ isolation_scope.set_client(client2)
+ global_scope.set_client(client3)
+
+ client = Scope.get_client()
+ assert client == client1
+
+ current_scope.set_client(None)
+ isolation_scope.set_client(client2)
+ global_scope.set_client(client3)
+
+ client = Scope.get_client()
+ assert client == client2
+
+ current_scope.set_client(None)
+ isolation_scope.set_client(None)
+ global_scope.set_client(client3)
+
+ client = Scope.get_client()
+ assert client == client3
+
+
+def test_fork():
+ scope = Scope()
+ forked_scope = scope.fork()
+
+ assert scope != forked_scope
+
+
+def test_get_global_scope_tags():
+ global_scope1 = Scope.get_global_scope()
+ global_scope2 = Scope.get_global_scope()
+ assert global_scope1 == global_scope2
+ assert global_scope1.client.__class__ == NonRecordingClient
+ assert not global_scope1.client.is_active()
+ assert global_scope2.client.__class__ == NonRecordingClient
+ assert not global_scope2.client.is_active()
+
+ global_scope1.set_tag("tag1", "value")
+ tags_scope1 = global_scope1._tags
+ tags_scope2 = global_scope2._tags
+ assert tags_scope1 == tags_scope2 == {"tag1": "value"}
+ assert global_scope1.client.__class__ == NonRecordingClient
+ assert not global_scope1.client.is_active()
+ assert global_scope2.client.__class__ == NonRecordingClient
+ assert not global_scope2.client.is_active()
+
+
+def test_get_global_with_scope():
+ original_global_scope = Scope.get_global_scope()
+
+ with new_scope() as scope:
+ in_with_global_scope = Scope.get_global_scope()
+
+ assert scope is not in_with_global_scope
+ assert in_with_global_scope is original_global_scope
+
+ after_with_global_scope = Scope.get_global_scope()
+ assert after_with_global_scope is original_global_scope
+
+
+def test_get_global_with_isolation_scope():
+ original_global_scope = Scope.get_global_scope()
+
+ with isolation_scope() as scope:
+ in_with_global_scope = Scope.get_global_scope()
+
+ assert scope is not in_with_global_scope
+ assert in_with_global_scope is original_global_scope
+
+ after_with_global_scope = Scope.get_global_scope()
+ assert after_with_global_scope is original_global_scope
+
+
+def test_get_isolation_scope_tags():
+ isolation_scope1 = Scope.get_isolation_scope()
+ isolation_scope2 = Scope.get_isolation_scope()
+ assert isolation_scope1 == isolation_scope2
+ assert isolation_scope1.client.__class__ == NonRecordingClient
+ assert not isolation_scope1.client.is_active()
+ assert isolation_scope2.client.__class__ == NonRecordingClient
+ assert not isolation_scope2.client.is_active()
+
+ isolation_scope1.set_tag("tag1", "value")
+ tags_scope1 = isolation_scope1._tags
+ tags_scope2 = isolation_scope2._tags
+ assert tags_scope1 == tags_scope2 == {"tag1": "value"}
+ assert isolation_scope1.client.__class__ == NonRecordingClient
+ assert not isolation_scope1.client.is_active()
+ assert isolation_scope2.client.__class__ == NonRecordingClient
+ assert not isolation_scope2.client.is_active()
+
+
+def test_get_current_scope_tags():
+ scope1 = Scope.get_current_scope()
+ scope2 = Scope.get_current_scope()
+ assert id(scope1) == id(scope2)
+ assert scope1.client.__class__ == NonRecordingClient
+ assert not scope1.client.is_active()
+ assert scope2.client.__class__ == NonRecordingClient
+ assert not scope2.client.is_active()
+
+ scope1.set_tag("tag1", "value")
+ tags_scope1 = scope1._tags
+ tags_scope2 = scope2._tags
+ assert tags_scope1 == tags_scope2 == {"tag1": "value"}
+ assert scope1.client.__class__ == NonRecordingClient
+ assert not scope1.client.is_active()
+ assert scope2.client.__class__ == NonRecordingClient
+ assert not scope2.client.is_active()
+
+
+def test_with_isolation_scope():
+ original_current_scope = Scope.get_current_scope()
+ original_isolation_scope = Scope.get_isolation_scope()
+
+ with isolation_scope() as scope:
+ assert scope._type == ScopeType.ISOLATION
+
+ in_with_current_scope = Scope.get_current_scope()
+ in_with_isolation_scope = Scope.get_isolation_scope()
+
+ assert scope is in_with_isolation_scope
+ assert in_with_current_scope is not original_current_scope
+ assert in_with_isolation_scope is not original_isolation_scope
+
+ after_with_current_scope = Scope.get_current_scope()
+ after_with_isolation_scope = Scope.get_isolation_scope()
+ assert after_with_current_scope is original_current_scope
+ assert after_with_isolation_scope is original_isolation_scope
+
+
+def test_with_isolation_scope_data():
+ """
+ When doing `with isolation_scope()` the isolation *and* the current scope are forked,
+ to prevent that by setting tags on the current scope in the context manager, data
+ bleeds to the outer current scope.
+ """
+ isolation_scope_before = Scope.get_isolation_scope()
+ current_scope_before = Scope.get_current_scope()
+
+ isolation_scope_before.set_tag("before_isolation_scope", 1)
+ current_scope_before.set_tag("before_current_scope", 1)
+
+ with isolation_scope() as scope:
+ assert scope._type == ScopeType.ISOLATION
+
+ isolation_scope_in = Scope.get_isolation_scope()
+ current_scope_in = Scope.get_current_scope()
+
+ assert isolation_scope_in._tags == {"before_isolation_scope": 1}
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {"before_isolation_scope": 1}
+
+ scope.set_tag("in_with_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_scope": 1,
+ }
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {"before_isolation_scope": 1, "in_with_scope": 1}
+
+ isolation_scope_in.set_tag("in_with_isolation_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {
+ "before_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+ current_scope_in.set_tag("in_with_current_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {
+ "before_current_scope": 1,
+ "in_with_current_scope": 1,
+ }
+ assert scope._tags == {
+ "before_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+ isolation_scope_after = Scope.get_isolation_scope()
+ current_scope_after = Scope.get_current_scope()
+
+ isolation_scope_after.set_tag("after_isolation_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {"before_current_scope": 1}
+
+ current_scope_after.set_tag("after_current_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {
+ "before_current_scope": 1,
+ "after_current_scope": 1,
+ }
+
+
+def test_with_use_isolation_scope():
+ original_isolation_scope = Scope.get_isolation_scope()
+ original_current_scope = Scope.get_current_scope()
+ custom_isolation_scope = Scope()
+
+ with use_isolation_scope(custom_isolation_scope) as scope:
+ assert scope._type is None # our custom scope has not type set
+
+ in_with_isolation_scope = Scope.get_isolation_scope()
+ in_with_current_scope = Scope.get_current_scope()
+
+ assert scope is custom_isolation_scope
+ assert scope is in_with_isolation_scope
+ assert scope is not in_with_current_scope
+ assert scope is not original_isolation_scope
+ assert scope is not original_current_scope
+ assert in_with_isolation_scope is not original_isolation_scope
+ assert in_with_current_scope is not original_current_scope
+
+ after_with_current_scope = Scope.get_current_scope()
+ after_with_isolation_scope = Scope.get_isolation_scope()
+
+ assert after_with_isolation_scope is original_isolation_scope
+ assert after_with_current_scope is original_current_scope
+ assert after_with_isolation_scope is not custom_isolation_scope
+ assert after_with_current_scope is not custom_isolation_scope
+
+
+def test_with_use_isolation_scope_data():
+ isolation_scope_before = Scope.get_isolation_scope()
+ current_scope_before = Scope.get_current_scope()
+ custom_isolation_scope = Scope()
+
+ isolation_scope_before.set_tag("before_isolation_scope", 1)
+ current_scope_before.set_tag("before_current_scope", 1)
+ custom_isolation_scope.set_tag("before_custom_isolation_scope", 1)
+
+ with use_isolation_scope(custom_isolation_scope) as scope:
+ assert scope._type is None # our custom scope has not type set
+
+ isolation_scope_in = Scope.get_isolation_scope()
+ current_scope_in = Scope.get_current_scope()
+
+ assert isolation_scope_in._tags == {"before_custom_isolation_scope": 1}
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {"before_custom_isolation_scope": 1}
+
+ scope.set_tag("in_with_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ }
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {"before_custom_isolation_scope": 1, "in_with_scope": 1}
+
+ isolation_scope_in.set_tag("in_with_isolation_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+ current_scope_in.set_tag("in_with_current_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {
+ "before_current_scope": 1,
+ "in_with_current_scope": 1,
+ }
+ assert scope._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+ assert custom_isolation_scope._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ isolation_scope_after = Scope.get_isolation_scope()
+ current_scope_after = Scope.get_current_scope()
+
+ isolation_scope_after.set_tag("after_isolation_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {"before_current_scope": 1}
+ assert custom_isolation_scope._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+ current_scope_after.set_tag("after_current_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {
+ "before_current_scope": 1,
+ "after_current_scope": 1,
+ }
+ assert custom_isolation_scope._tags == {
+ "before_custom_isolation_scope": 1,
+ "in_with_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+
+
+def test_with_new_scope():
+ original_current_scope = Scope.get_current_scope()
+ original_isolation_scope = Scope.get_isolation_scope()
+
+ with new_scope() as scope:
+ assert scope._type == ScopeType.CURRENT
+
+ in_with_current_scope = Scope.get_current_scope()
+ in_with_isolation_scope = Scope.get_isolation_scope()
+
+ assert scope is in_with_current_scope
+ assert in_with_current_scope is not original_current_scope
+ assert in_with_isolation_scope is original_isolation_scope
+
+ after_with_current_scope = Scope.get_current_scope()
+ after_with_isolation_scope = Scope.get_isolation_scope()
+ assert after_with_current_scope is original_current_scope
+ assert after_with_isolation_scope is original_isolation_scope
+
+
+def test_with_new_scope_data():
+ """
+ When doing `with new_scope()` the current scope is forked but the isolation
+ scope stays untouched.
+ """
+ isolation_scope_before = Scope.get_isolation_scope()
+ current_scope_before = Scope.get_current_scope()
+
+ isolation_scope_before.set_tag("before_isolation_scope", 1)
+ current_scope_before.set_tag("before_current_scope", 1)
+
+ with new_scope() as scope:
+ assert scope._type == ScopeType.CURRENT
+
+ isolation_scope_in = Scope.get_isolation_scope()
+ current_scope_in = Scope.get_current_scope()
+
+ assert isolation_scope_in._tags == {"before_isolation_scope": 1}
+ assert current_scope_in._tags == {"before_current_scope": 1}
+ assert scope._tags == {"before_current_scope": 1}
+
+ scope.set_tag("in_with_scope", 1)
+
+ assert isolation_scope_in._tags == {"before_isolation_scope": 1}
+ assert current_scope_in._tags == {"before_current_scope": 1, "in_with_scope": 1}
+ assert scope._tags == {"before_current_scope": 1, "in_with_scope": 1}
+
+ isolation_scope_in.set_tag("in_with_isolation_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {"before_current_scope": 1, "in_with_scope": 1}
+ assert scope._tags == {"before_current_scope": 1, "in_with_scope": 1}
+
+ current_scope_in.set_tag("in_with_current_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {
+ "before_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+ assert scope._tags == {
+ "before_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+
+ isolation_scope_after = Scope.get_isolation_scope()
+ current_scope_after = Scope.get_current_scope()
+
+ isolation_scope_after.set_tag("after_isolation_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {"before_current_scope": 1}
+
+ current_scope_after.set_tag("after_current_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {
+ "before_current_scope": 1,
+ "after_current_scope": 1,
+ }
+
+
+def test_with_use_scope_data():
+ isolation_scope_before = Scope.get_isolation_scope()
+ current_scope_before = Scope.get_current_scope()
+ custom_current_scope = Scope()
+
+ isolation_scope_before.set_tag("before_isolation_scope", 1)
+ current_scope_before.set_tag("before_current_scope", 1)
+ custom_current_scope.set_tag("before_custom_current_scope", 1)
+
+ with use_scope(custom_current_scope) as scope:
+ assert scope._type is None # our custom scope has not type set
+
+ isolation_scope_in = Scope.get_isolation_scope()
+ current_scope_in = Scope.get_current_scope()
+
+ assert isolation_scope_in._tags == {"before_isolation_scope": 1}
+ assert current_scope_in._tags == {"before_custom_current_scope": 1}
+ assert scope._tags == {"before_custom_current_scope": 1}
+
+ scope.set_tag("in_with_scope", 1)
+
+ assert isolation_scope_in._tags == {"before_isolation_scope": 1}
+ assert current_scope_in._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ }
+ assert scope._tags == {"before_custom_current_scope": 1, "in_with_scope": 1}
+
+ isolation_scope_in.set_tag("in_with_isolation_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ }
+ assert scope._tags == {"before_custom_current_scope": 1, "in_with_scope": 1}
+
+ current_scope_in.set_tag("in_with_current_scope", 1)
+
+ assert isolation_scope_in._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_in._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+ assert scope._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+
+ assert custom_current_scope._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+ isolation_scope_after = Scope.get_isolation_scope()
+ current_scope_after = Scope.get_current_scope()
+
+ isolation_scope_after.set_tag("after_isolation_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {"before_current_scope": 1}
+ assert custom_current_scope._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+
+ current_scope_after.set_tag("after_current_scope", 1)
+
+ assert isolation_scope_after._tags == {
+ "before_isolation_scope": 1,
+ "in_with_isolation_scope": 1,
+ "after_isolation_scope": 1,
+ }
+ assert current_scope_after._tags == {
+ "before_current_scope": 1,
+ "after_current_scope": 1,
+ }
+ assert custom_current_scope._tags == {
+ "before_custom_current_scope": 1,
+ "in_with_scope": 1,
+ "in_with_current_scope": 1,
+ }
+
+
+def test_nested_scopes_with_tags(sentry_init, capture_envelopes):
+ sentry_init(traces_sample_rate=1.0)
+ envelopes = capture_envelopes()
+
+ with sentry_sdk.isolation_scope() as scope1:
+ scope1.set_tag("isolation_scope1", 1)
+
+ with sentry_sdk.new_scope() as scope2:
+ scope2.set_tag("current_scope2", 1)
+
+ with sentry_sdk.start_transaction(name="trx") as trx:
+ trx.set_tag("trx", 1)
+
+ with sentry_sdk.start_span(op="span1") as span1:
+ span1.set_tag("a", 1)
+
+ with new_scope() as scope3:
+ scope3.set_tag("current_scope3", 1)
+
+ with sentry_sdk.start_span(op="span2") as span2:
+ span2.set_tag("b", 1)
+
+ (envelope,) = envelopes
+ transaction = envelope.items[0].get_transaction_event()
+
+ assert transaction["tags"] == {"isolation_scope1": 1, "current_scope2": 1, "trx": 1}
+ assert transaction["spans"][0]["tags"] == {"a": 1}
+ assert transaction["spans"][1]["tags"] == {"b": 1}
+
+
+def test_should_send_default_pii_true(sentry_init):
+ sentry_init(send_default_pii=True)
+
+ assert should_send_default_pii() is True
+
+
+def test_should_send_default_pii_false(sentry_init):
+ sentry_init(send_default_pii=False)
+
+ assert should_send_default_pii() is False
diff --git a/tests/test_serializer.py b/tests/test_serializer.py
index ddc65c9b3e..a3ead112a7 100644
--- a/tests/test_serializer.py
+++ b/tests/test_serializer.py
@@ -1,5 +1,5 @@
import re
-import sys
+
import pytest
from sentry_sdk.serializer import MAX_DATABAG_BREADTH, MAX_DATABAG_DEPTH, serialize
@@ -61,12 +61,9 @@ def inner(body, **kwargs):
def test_bytes_serialization_decode(message_normalizer):
binary = b"abc123\x80\xf0\x9f\x8d\x95"
result = message_normalizer(binary, should_repr_strings=False)
- # fmt: off
- assert result == u"abc123\ufffd\U0001f355"
- # fmt: on
+ assert result == "abc123\ufffd\U0001f355"
-@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
def test_bytes_serialization_repr(message_normalizer):
binary = b"abc123\x80\xf0\x9f\x8d\x95"
result = message_normalizer(binary, should_repr_strings=True)
@@ -76,12 +73,9 @@ def test_bytes_serialization_repr(message_normalizer):
def test_bytearray_serialization_decode(message_normalizer):
binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
result = message_normalizer(binary, should_repr_strings=False)
- # fmt: off
- assert result == u"abc123\ufffd\U0001f355"
- # fmt: on
+ assert result == "abc123\ufffd\U0001f355"
-@pytest.mark.xfail(sys.version_info < (3,), reason="Known safe_repr bugs in Py2.7")
def test_bytearray_serialization_repr(message_normalizer):
binary = bytearray(b"abc123\x80\xf0\x9f\x8d\x95")
result = message_normalizer(binary, should_repr_strings=True)
diff --git a/tests/test_sessions.py b/tests/test_sessions.py
index 311aa53966..91ce9cc58b 100644
--- a/tests/test_sessions.py
+++ b/tests/test_sessions.py
@@ -1,13 +1,9 @@
-import sentry_sdk
+from unittest import mock
+import sentry_sdk
from sentry_sdk import Hub
from sentry_sdk.sessions import auto_session_tracking
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
def sorted_aggregates(item):
aggregates = item["aggregates"]
diff --git a/tests/test_transport.py b/tests/test_transport.py
index 8848ad471e..73eee6d353 100644
--- a/tests/test_transport.py
+++ b/tests/test_transport.py
@@ -1,26 +1,21 @@
-# coding: utf-8
import logging
import pickle
import gzip
import io
import socket
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
+from unittest import mock
import pytest
from pytest_localserver.http import WSGIServer
from werkzeug.wrappers import Request, Response
from sentry_sdk import Hub, Client, add_breadcrumb, capture_message, Scope
-from sentry_sdk._compat import datetime_utcnow
-from sentry_sdk.transport import KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits
from sentry_sdk.envelope import Envelope, Item, parse_json
-from sentry_sdk.integrations.logging import LoggingIntegration
+from sentry_sdk.transport import KEEP_ALIVE_SOCKET_OPTIONS, _parse_rate_limits
+from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
CapturedData = namedtuple("CapturedData", ["path", "event", "envelope", "compressed"])
@@ -122,7 +117,9 @@ def test_transport_works(
Hub.current.bind_client(client)
request.addfinalizer(lambda: Hub.current.bind_client(None))
- add_breadcrumb(level="info", message="i like bread", timestamp=datetime_utcnow())
+ add_breadcrumb(
+ level="info", message="i like bread", timestamp=datetime.now(timezone.utc)
+ )
capture_message("löl")
getattr(client, client_flush_method)()
@@ -132,7 +129,7 @@ def test_transport_works(
assert capturing_server.captured
assert capturing_server.captured[0].compressed == (compressionlevel > 0)
- assert any("Sending event" in record.msg for record in caplog.records) == debug
+ assert any("Sending envelope" in record.msg for record in caplog.records) == debug
@pytest.mark.parametrize(
@@ -234,6 +231,13 @@ def test_transport_infinite_loop(capturing_server, request, make_client):
integrations=[LoggingIntegration(event_level=logging.DEBUG)],
)
+ # I am not sure why, but "werkzeug" logger makes an INFO log on sending
+ # the message "hi" and does creates an infinite look.
+ # Ignoring this for breaking the infinite loop and still we can test
+ # that our own log messages (sent from `_IGNORED_LOGGERS`) are not leading
+ # to an infinite loop
+ ignore_logger("werkzeug")
+
with Hub(client):
capture_message("hi")
client.flush()
@@ -350,7 +354,7 @@ def record_lost_event(reason, data_category=None, item=None):
client.flush()
assert len(capturing_server.captured) == 1
- assert capturing_server.captured[0].path == "/api/132/store/"
+ assert capturing_server.captured[0].path == "/api/132/envelope/"
assert captured_outcomes == [
("ratelimit_backoff", "transaction"),
@@ -429,7 +433,8 @@ def intercepting_fetch(*args, **kwargs):
assert len(capturing_server.captured) == 2
- event = capturing_server.captured[0].event
+ assert len(capturing_server.captured[0].envelope.items) == 1
+ event = capturing_server.captured[0].envelope.items[0].get_event()
assert event["type"] == "error"
assert event["release"] == "foo"
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 4b8e9087cc..dd3aa3817a 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -1,10 +1,13 @@
-import pytest
+import threading
import re
import sys
-import threading
from datetime import timedelta
+from unittest import mock
+
+import pytest
-from sentry_sdk._compat import duration_in_milliseconds
+import sentry_sdk
+from sentry_sdk.integrations import Integration
from sentry_sdk._queue import Queue
from sentry_sdk.utils import (
Components,
@@ -23,27 +26,26 @@
serialize_frame,
is_sentry_url,
_get_installed_modules,
+ ensure_integration_enabled,
+ ensure_integration_enabled_async,
)
-import sentry_sdk
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
+class TestIntegration(Integration):
+ """
+ Test integration for testing ensure_integration_enabled and
+ ensure_integration_enabled_async decorators.
+ """
+
+ identifier = "test"
+ setup_once = mock.MagicMock()
+
try:
import gevent
except ImportError:
gevent = None
-try:
- # Python 3
- FileNotFoundError
-except NameError:
- # Python 2
- FileNotFoundError = IOError
-
def _normalize_distribution_name(name):
# type: (str) -> str
@@ -99,12 +101,7 @@ def _normalize_distribution_name(name):
],
)
def test_sanitize_url(url, expected_result):
- # sort parts because old Python versions (<3.6) don't preserve order
- sanitized_url = sanitize_url(url)
- parts = sorted(re.split(r"\&|\?|\#", sanitized_url))
- expected_parts = sorted(re.split(r"\&|\?|\#", expected_result))
-
- assert parts == expected_parts
+ assert sanitize_url(url) == expected_result
@pytest.mark.parametrize(
@@ -218,13 +215,10 @@ def test_sanitize_url(url, expected_result):
)
def test_sanitize_url_and_split(url, expected_result):
sanitized_url = sanitize_url(url, split=True)
- # sort query because old Python versions (<3.6) don't preserve order
- query = sorted(sanitized_url.query.split("&"))
- expected_query = sorted(expected_result.query.split("&"))
assert sanitized_url.scheme == expected_result.scheme
assert sanitized_url.netloc == expected_result.netloc
- assert query == expected_query
+ assert sanitized_url.query == expected_result.query
assert sanitized_url.path == expected_result.path
assert sanitized_url.fragment == expected_result.fragment
@@ -351,13 +345,7 @@ def test_sanitize_url_and_split(url, expected_result):
def test_parse_url(url, sanitize, expected_url, expected_query, expected_fragment):
assert parse_url(url, sanitize=sanitize).url == expected_url
assert parse_url(url, sanitize=sanitize).fragment == expected_fragment
-
- # sort parts because old Python versions (<3.6) don't preserve order
- sanitized_query = parse_url(url, sanitize=sanitize).query
- query_parts = sorted(re.split(r"\&|\?|\#", sanitized_query))
- expected_query_parts = sorted(re.split(r"\&|\?|\#", expected_query))
-
- assert query_parts == expected_query_parts
+ assert parse_url(url, sanitize=sanitize).query == expected_query
@pytest.mark.parametrize(
@@ -463,19 +451,17 @@ def test_parse_version(version, expected_result):
@pytest.fixture
-def mock_hub_with_dsn_netloc():
+def mock_client_with_dsn_netloc():
"""
Returns a mocked hub with a DSN netloc of "abcd1234.ingest.sentry.io".
"""
+ mock_client = mock.Mock(spec=sentry_sdk.Client)
+ mock_client.transport = mock.Mock(spec=sentry_sdk.Transport)
+ mock_client.transport.parsed_dsn = mock.Mock(spec=Dsn)
- mock_hub = mock.Mock(spec=sentry_sdk.Hub)
- mock_hub.client = mock.Mock(spec=sentry_sdk.Client)
- mock_hub.client.transport = mock.Mock(spec=sentry_sdk.Transport)
- mock_hub.client.transport.parsed_dsn = mock.Mock(spec=Dsn)
+ mock_client.transport.parsed_dsn.netloc = "abcd1234.ingest.sentry.io"
- mock_hub.client.transport.parsed_dsn.netloc = "abcd1234.ingest.sentry.io"
-
- return mock_hub
+ return mock_client
@pytest.mark.parametrize(
@@ -485,19 +471,18 @@ def mock_hub_with_dsn_netloc():
["https://asdf@abcd1234.ingest.notsentry.io/123456789", False],
],
)
-def test_is_sentry_url_true(test_url, is_sentry_url_expected, mock_hub_with_dsn_netloc):
- ret_val = is_sentry_url(mock_hub_with_dsn_netloc, test_url)
+def test_is_sentry_url_true(
+ test_url, is_sentry_url_expected, mock_client_with_dsn_netloc
+):
+ ret_val = is_sentry_url(mock_client_with_dsn_netloc, test_url)
assert ret_val == is_sentry_url_expected
def test_is_sentry_url_no_client():
- hub = mock.Mock()
- hub.client = None
-
test_url = "https://asdf@abcd1234.ingest.sentry.io/123456789"
- ret_val = is_sentry_url(hub, test_url)
+ ret_val = is_sentry_url(None, test_url)
assert not ret_val
@@ -604,8 +589,162 @@ def test_default_release_empty_string():
assert release is None
+def test_ensure_integration_enabled_integration_enabled(sentry_init):
+ def original_function():
+ return "original"
+
+ def function_to_patch():
+ return "patched"
+
+ sentry_init(integrations=[TestIntegration()])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled(TestIntegration, original_function)(
+ function_to_patch
+ )
+
+ assert patched_function() == "patched"
+ assert patched_function.__name__ == "original_function"
+
+
+def test_ensure_integration_enabled_integration_disabled(sentry_init):
+ def original_function():
+ return "original"
+
+ def function_to_patch():
+ return "patched"
+
+ sentry_init(integrations=[]) # TestIntegration is disabled
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled(TestIntegration, original_function)(
+ function_to_patch
+ )
+
+ assert patched_function() == "original"
+ assert patched_function.__name__ == "original_function"
+
+
+def test_ensure_integration_enabled_no_original_function_enabled(sentry_init):
+ shared_variable = "original"
+
+ def function_to_patch():
+ nonlocal shared_variable
+ shared_variable = "patched"
+
+ sentry_init(integrations=[TestIntegration])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled(TestIntegration)(function_to_patch)
+ patched_function()
+
+ assert shared_variable == "patched"
+ assert patched_function.__name__ == "function_to_patch"
+
+
+def test_ensure_integration_enabled_no_original_function_disabled(sentry_init):
+ shared_variable = "original"
+
+ def function_to_patch():
+ nonlocal shared_variable
+ shared_variable = "patched"
+
+ sentry_init(integrations=[])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled(TestIntegration)(function_to_patch)
+ patched_function()
+
+ assert shared_variable == "original"
+ assert patched_function.__name__ == "function_to_patch"
+
+
+@pytest.mark.asyncio
+async def test_ensure_integration_enabled_async_integration_enabled(sentry_init):
+ # Setup variables and functions for the test
+ async def original_function():
+ return "original"
+
+ async def function_to_patch():
+ return "patched"
+
+ sentry_init(integrations=[TestIntegration()])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled_async(
+ TestIntegration, original_function
+ )(function_to_patch)
+
+ assert await patched_function() == "patched"
+ assert patched_function.__name__ == "original_function"
+
+
+@pytest.mark.asyncio
+async def test_ensure_integration_enabled_async_integration_disabled(sentry_init):
+ # Setup variables and functions for the test
+ async def original_function():
+ return "original"
+
+ async def function_to_patch():
+ return "patched"
+
+ sentry_init(integrations=[]) # TestIntegration is disabled
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled_async(
+ TestIntegration, original_function
+ )(function_to_patch)
+
+ assert await patched_function() == "original"
+ assert patched_function.__name__ == "original_function"
+
+
+@pytest.mark.asyncio
+async def test_ensure_integration_enabled_async_no_original_function_enabled(
+ sentry_init,
+):
+ shared_variable = "original"
+
+ async def function_to_patch():
+ nonlocal shared_variable
+ shared_variable = "patched"
+
+ sentry_init(integrations=[TestIntegration])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled_async(TestIntegration)(
+ function_to_patch
+ )
+ await patched_function()
+
+ assert shared_variable == "patched"
+ assert patched_function.__name__ == "function_to_patch"
+
+
+@pytest.mark.asyncio
+async def test_ensure_integration_enabled_async_no_original_function_disabled(
+ sentry_init,
+):
+ shared_variable = "original"
+
+ async def function_to_patch():
+ nonlocal shared_variable
+ shared_variable = "patched"
+
+ sentry_init(integrations=[])
+
+ # Test the decorator by applying to function_to_patch
+ patched_function = ensure_integration_enabled_async(TestIntegration)(
+ function_to_patch
+ )
+ await patched_function()
+
+ assert shared_variable == "original"
+ assert patched_function.__name__ == "function_to_patch"
+
+
@pytest.mark.parametrize(
- "timedelta,expected_milliseconds",
+ "delta,expected_milliseconds",
[
[timedelta(milliseconds=132), 132.0],
[timedelta(hours=1, milliseconds=132), float(60 * 60 * 1000 + 132)],
@@ -613,8 +752,8 @@ def test_default_release_empty_string():
[timedelta(microseconds=100), 0.1],
],
)
-def test_duration_in_milliseconds(timedelta, expected_milliseconds):
- assert duration_in_milliseconds(timedelta) == expected_milliseconds
+def test_duration_in_milliseconds(delta, expected_milliseconds):
+ assert delta / timedelta(milliseconds=1) == expected_milliseconds
def test_get_current_thread_meta_explicit_thread():
@@ -638,9 +777,6 @@ def target2():
assert (thread1.ident, thread1.name) == results.get(timeout=1)
-@pytest.mark.skipif(
- sys.version_info < (3, 4), reason="threading.main_thread() Not available"
-)
def test_get_current_thread_meta_bad_explicit_thread():
thread = "fake thread"
@@ -696,9 +832,6 @@ def target():
assert (thread.ident, thread.name) == results.get(timeout=1)
-@pytest.mark.skipif(
- sys.version_info < (3, 4), reason="threading.main_thread() Not available"
-)
def test_get_current_thread_meta_bad_running_thread():
results = Queue(maxsize=1)
@@ -714,9 +847,6 @@ def target():
assert (main_thread.ident, main_thread.name) == results.get(timeout=1)
-@pytest.mark.skipif(
- sys.version_info < (3, 4), reason="threading.main_thread() Not available"
-)
def test_get_current_thread_meta_main_thread():
results = Queue(maxsize=1)
@@ -733,9 +863,6 @@ def target():
assert (main_thread.ident, main_thread.name) == results.get(timeout=1)
-@pytest.mark.skipif(
- sys.version_info < (3, 4), reason="threading.main_thread() Not available"
-)
def test_get_current_thread_meta_failed_to_get_main_thread():
results = Queue(maxsize=1)
diff --git a/tests/tracing/test_baggage.py b/tests/tracing/test_baggage.py
index fa856e0af4..1e0075feaa 100644
--- a/tests/tracing/test_baggage.py
+++ b/tests/tracing/test_baggage.py
@@ -1,4 +1,3 @@
-# coding: utf-8
from sentry_sdk.tracing_utils import Baggage
@@ -8,14 +7,16 @@ def test_third_party_baggage():
assert baggage.mutable
assert baggage.sentry_items == {}
- assert sorted(baggage.third_party_items.split(",")) == sorted(
- "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+ assert (
+ baggage.third_party_items
+ == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
)
assert baggage.dynamic_sampling_context() == {}
assert baggage.serialize() == ""
- assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
- "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;".split(",")
+ assert (
+ baggage.serialize(include_third_party=True)
+ == "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
)
@@ -51,22 +52,18 @@ def test_mixed_baggage():
"foo": "bar",
}
- assert sorted(baggage.serialize().split(",")) == sorted(
- (
- "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
- "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
- "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
- "sentry-foo=bar"
- ).split(",")
+ assert baggage.serialize() == (
+ "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+ "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+ "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,"
+ "sentry-foo=bar"
)
- assert sorted(baggage.serialize(include_third_party=True).split(",")) == sorted(
- (
- "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
- "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
- "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
- "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
- ).split(",")
+ assert baggage.serialize(include_third_party=True) == (
+ "sentry-trace_id=771a43a4192642f0b136d5159a501700,"
+ "sentry-public_key=49d0f7386ad645858ae85020e393bef3,"
+ "sentry-sample_rate=0.01337,sentry-user_id=Am%C3%A9lie,sentry-foo=bar,"
+ "other-vendor-value-1=foo;bar;baz,other-vendor-value-2=foo;bar;"
)
diff --git a/tests/tracing/test_decorator.py b/tests/tracing/test_decorator.py
new file mode 100644
index 0000000000..0f9ebf23b5
--- /dev/null
+++ b/tests/tracing/test_decorator.py
@@ -0,0 +1,78 @@
+from unittest import mock
+
+import pytest
+
+from sentry_sdk.tracing_utils import start_child_span_decorator
+from sentry_sdk.utils import logger
+from tests.conftest import patch_start_tracing_child
+
+
+def my_example_function():
+ return "return_of_sync_function"
+
+
+async def my_async_example_function():
+ return "return_of_async_function"
+
+
+@pytest.mark.forked
+def test_trace_decorator():
+ with patch_start_tracing_child() as fake_start_child:
+ result = my_example_function()
+ fake_start_child.assert_not_called()
+ assert result == "return_of_sync_function"
+
+ result2 = start_child_span_decorator(my_example_function)()
+ fake_start_child.assert_called_once_with(
+ op="function", description="test_decorator.my_example_function"
+ )
+ assert result2 == "return_of_sync_function"
+
+
+def test_trace_decorator_no_trx():
+ with patch_start_tracing_child(fake_transaction_is_none=True):
+ with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+ result = my_example_function()
+ fake_warning.assert_not_called()
+ assert result == "return_of_sync_function"
+
+ result2 = start_child_span_decorator(my_example_function)()
+ fake_warning.assert_called_once_with(
+ "Can not create a child span for %s. "
+ "Please start a Sentry transaction before calling this function.",
+ "test_decorator.my_example_function",
+ )
+ assert result2 == "return_of_sync_function"
+
+
+@pytest.mark.forked
+@pytest.mark.asyncio
+async def test_trace_decorator_async():
+ with patch_start_tracing_child() as fake_start_child:
+ result = await my_async_example_function()
+ fake_start_child.assert_not_called()
+ assert result == "return_of_async_function"
+
+ result2 = await start_child_span_decorator(my_async_example_function)()
+ fake_start_child.assert_called_once_with(
+ op="function",
+ description="test_decorator.my_async_example_function",
+ )
+ assert result2 == "return_of_async_function"
+
+
+@pytest.mark.asyncio
+async def test_trace_decorator_async_no_trx():
+ with patch_start_tracing_child(fake_transaction_is_none=True):
+ with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
+ result = await my_async_example_function()
+ fake_warning.assert_not_called()
+ assert result == "return_of_async_function"
+
+ result2 = await start_child_span_decorator(my_async_example_function)()
+ fake_warning.assert_called_once_with(
+ "Can not create a child span for %s. "
+ "Please start a Sentry transaction before calling this function.",
+ "test_decorator.my_async_example_function",
+ )
+ assert result2 == "return_of_async_function"
diff --git a/tests/tracing/test_decorator_async_py3.py b/tests/tracing/test_decorator_async_py3.py
deleted file mode 100644
index 401180ad39..0000000000
--- a/tests/tracing/test_decorator_async_py3.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from unittest import mock
-import pytest
-import sys
-
-from tests.conftest import patch_start_tracing_child
-
-from sentry_sdk.tracing_utils_py3 import (
- start_child_span_decorator as start_child_span_decorator_py3,
-)
-from sentry_sdk.utils import logger
-
-if sys.version_info < (3, 6):
- pytest.skip("Async decorator only works on Python 3.6+", allow_module_level=True)
-
-
-async def my_async_example_function():
- return "return_of_async_function"
-
-
-@pytest.mark.asyncio
-async def test_trace_decorator_async_py3():
- with patch_start_tracing_child() as fake_start_child:
- result = await my_async_example_function()
- fake_start_child.assert_not_called()
- assert result == "return_of_async_function"
-
- result2 = await start_child_span_decorator_py3(my_async_example_function)()
- fake_start_child.assert_called_once_with(
- op="function",
- description="test_decorator_async_py3.my_async_example_function",
- )
- assert result2 == "return_of_async_function"
-
-
-@pytest.mark.asyncio
-async def test_trace_decorator_async_py3_no_trx():
- with patch_start_tracing_child(fake_transaction_is_none=True):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- result = await my_async_example_function()
- fake_warning.assert_not_called()
- assert result == "return_of_async_function"
-
- result2 = await start_child_span_decorator_py3(my_async_example_function)()
- fake_warning.assert_called_once_with(
- "Can not create a child span for %s. "
- "Please start a Sentry transaction before calling this function.",
- "test_decorator_async_py3.my_async_example_function",
- )
- assert result2 == "return_of_async_function"
diff --git a/tests/tracing/test_decorator_sync.py b/tests/tracing/test_decorator_sync.py
deleted file mode 100644
index 6d7be8b8f9..0000000000
--- a/tests/tracing/test_decorator_sync.py
+++ /dev/null
@@ -1,48 +0,0 @@
-from sentry_sdk._compat import PY2
-
-if PY2:
- from sentry_sdk.tracing_utils_py2 import start_child_span_decorator
-else:
- from sentry_sdk.tracing_utils_py3 import start_child_span_decorator
-
-from sentry_sdk.utils import logger
-
-from tests.conftest import patch_start_tracing_child
-
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-
-def my_example_function():
- return "return_of_sync_function"
-
-
-def test_trace_decorator():
- with patch_start_tracing_child() as fake_start_child:
- result = my_example_function()
- fake_start_child.assert_not_called()
- assert result == "return_of_sync_function"
-
- result2 = start_child_span_decorator(my_example_function)()
- fake_start_child.assert_called_once_with(
- op="function", description="test_decorator_sync.my_example_function"
- )
- assert result2 == "return_of_sync_function"
-
-
-def test_trace_decorator_no_trx():
- with patch_start_tracing_child(fake_transaction_is_none=True):
- with mock.patch.object(logger, "warning", mock.Mock()) as fake_warning:
- result = my_example_function()
- fake_warning.assert_not_called()
- assert result == "return_of_sync_function"
-
- result2 = start_child_span_decorator(my_example_function)()
- fake_warning.assert_called_once_with(
- "Can not create a child span for %s. "
- "Please start a Sentry transaction before calling this function.",
- "test_decorator_sync.my_example_function",
- )
- assert result2 == "return_of_sync_function"
diff --git a/tests/tracing/test_deprecated.py b/tests/tracing/test_deprecated.py
index 0ce9096b6e..ba296350ec 100644
--- a/tests/tracing/test_deprecated.py
+++ b/tests/tracing/test_deprecated.py
@@ -1,8 +1,10 @@
+import pytest
from sentry_sdk import start_span
from sentry_sdk.tracing import Span
+@pytest.mark.skip(reason="This deprecated feature has been removed in SDK 2.0.")
def test_start_span_to_start_transaction(sentry_init, capture_events):
# XXX: this only exists for backwards compatibility with code before
# Transaction / start_transaction were introduced.
diff --git a/tests/tracing/test_http_headers.py b/tests/tracing/test_http_headers.py
index 443bb163e8..6a8467101e 100644
--- a/tests/tracing/test_http_headers.py
+++ b/tests/tracing/test_http_headers.py
@@ -1,15 +1,11 @@
+from unittest import mock
+
import pytest
from sentry_sdk.tracing import Transaction
from sentry_sdk.tracing_utils import extract_sentrytrace_data
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
-
@pytest.mark.parametrize("sampled", [True, False, None])
def test_to_traceparent(sampled):
transaction = Transaction(
diff --git a/tests/tracing/test_integration_tests.py b/tests/tracing/test_integration_tests.py
index 0fe8117c8e..9543014cac 100644
--- a/tests/tracing/test_integration_tests.py
+++ b/tests/tracing/test_integration_tests.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import weakref
import gc
import re
@@ -7,8 +6,8 @@
from sentry_sdk import (
capture_message,
- configure_scope,
Hub,
+ Scope,
start_span,
start_transaction,
)
@@ -98,10 +97,9 @@ def test_continue_from_headers(sentry_init, capture_envelopes, sampled, sample_r
# be tagged with the trace id (since it happens while the transaction is
# open)
with start_transaction(child_transaction):
- with configure_scope() as scope:
- # change the transaction name from "WRONG" to make sure the change
- # is reflected in the final data
- scope.transaction = "ho"
+ # change the transaction name from "WRONG" to make sure the change
+ # is reflected in the final data
+ Scope.get_current_scope().transaction = "ho"
capture_message("hello")
# in this case the child transaction won't be captured
@@ -178,10 +176,15 @@ def test_dynamic_sampling_head_sdk_creates_dsc(
}
expected_baggage = (
- "sentry-environment=production,sentry-release=foo,sentry-sample_rate=%s,sentry-transaction=Head%%20SDK%%20tx,sentry-trace_id=%s,sentry-sampled=%s"
- % (sample_rate, trace_id, "true" if transaction.sampled else "false")
+ "sentry-trace_id=%s,"
+ "sentry-environment=production,"
+ "sentry-release=foo,"
+ "sentry-transaction=Head%%20SDK%%20tx,"
+ "sentry-sample_rate=%s,"
+ "sentry-sampled=%s"
+ % (trace_id, sample_rate, "true" if transaction.sampled else "false")
)
- assert sorted(baggage.serialize().split(",")) == sorted(expected_baggage.split(","))
+ assert baggage.serialize() == expected_baggage
(envelope,) = envelopes
assert envelope.headers["trace"] == baggage.dynamic_sampling_context()
diff --git a/tests/tracing/test_misc.py b/tests/tracing/test_misc.py
index 3668f1b3a8..af1837f12c 100644
--- a/tests/tracing/test_misc.py
+++ b/tests/tracing/test_misc.py
@@ -2,21 +2,16 @@
import gc
import uuid
import os
+from unittest import mock
+from unittest.mock import MagicMock
import sentry_sdk
-from sentry_sdk import Hub, start_span, start_transaction, set_measurement, push_scope
+from sentry_sdk import Hub, Scope, start_span, start_transaction, set_measurement
from sentry_sdk.consts import MATCH_ALL
from sentry_sdk.tracing import Span, Transaction
from sentry_sdk.tracing_utils import should_propagate_trace
from sentry_sdk.utils import Dsn
-try:
- from unittest import mock # python 3.3 and above
- from unittest.mock import MagicMock
-except ImportError:
- import mock # python < 3.3
- from mock import MagicMock
-
def test_span_trimming(sentry_init, capture_events):
sentry_init(traces_sample_rate=1.0, _experiments={"max_spans": 3})
@@ -303,17 +298,16 @@ def test_set_meaurement_public_api(sentry_init, capture_events):
def test_should_propagate_trace(
trace_propagation_targets, url, expected_propagation_decision
):
- hub = MagicMock()
- hub.client = MagicMock()
+ client = MagicMock()
# This test assumes the urls are not Sentry URLs. Use test_should_propagate_trace_to_sentry for sentry URLs.
- hub.is_sentry_url = lambda _: False
+ client.is_sentry_url = lambda _: False
- hub.client.options = {"trace_propagation_targets": trace_propagation_targets}
- hub.client.transport = MagicMock()
- hub.client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")
+ client.options = {"trace_propagation_targets": trace_propagation_targets}
+ client.transport = MagicMock()
+ client.transport.parsed_dsn = Dsn("https://bla@xxx.sentry.io/12312012")
- assert should_propagate_trace(hub, url) == expected_propagation_decision
+ assert should_propagate_trace(client, url) == expected_propagation_decision
@pytest.mark.parametrize(
@@ -354,15 +348,56 @@ def test_should_propagate_trace_to_sentry(
traces_sample_rate=1.0,
)
- Hub.current.client.transport.parsed_dsn = Dsn(dsn)
+ client = sentry_sdk.get_client()
+ client.transport.parsed_dsn = Dsn(dsn)
- assert should_propagate_trace(Hub.current, url) == expected_propagation_decision
+ assert should_propagate_trace(client, url) == expected_propagation_decision
def test_start_transaction_updates_scope_name_source(sentry_init):
sentry_init(traces_sample_rate=1.0)
- with push_scope() as scope:
- with start_transaction(name="foobar", source="route"):
- assert scope._transaction == "foobar"
- assert scope._transaction_info == {"source": "route"}
+ scope = Scope.get_current_scope()
+
+ with start_transaction(name="foobar", source="route"):
+ assert scope._transaction == "foobar"
+ assert scope._transaction_info == {"source": "route"}
+
+
+@pytest.mark.parametrize("sampled", (True, None))
+def test_transaction_dropped_debug_not_started(sentry_init, sampled):
+ sentry_init(enable_tracing=True)
+
+ tx = Transaction(sampled=sampled)
+
+ with mock.patch("sentry_sdk.tracing.logger") as mock_logger:
+ with tx:
+ pass
+
+ mock_logger.debug.assert_any_call(
+ "Discarding transaction because it was not started with sentry_sdk.start_transaction"
+ )
+
+ with pytest.raises(AssertionError):
+ # We should NOT see the "sampled = False" message here
+ mock_logger.debug.assert_any_call(
+ "Discarding transaction because sampled = False"
+ )
+
+
+def test_transaction_dropeed_sampled_false(sentry_init):
+ sentry_init(enable_tracing=True)
+
+ tx = Transaction(sampled=False)
+
+ with mock.patch("sentry_sdk.tracing.logger") as mock_logger:
+ with sentry_sdk.start_transaction(tx):
+ pass
+
+ mock_logger.debug.assert_any_call("Discarding transaction because sampled = False")
+
+ with pytest.raises(AssertionError):
+ # We should not see the "not started" message here
+ mock_logger.debug.assert_any_call(
+ "Discarding transaction because it was not started with sentry_sdk.start_transaction"
+ )
diff --git a/tests/tracing/test_noop_span.py b/tests/tracing/test_noop_span.py
index 9896afb007..59f8cae489 100644
--- a/tests/tracing/test_noop_span.py
+++ b/tests/tracing/test_noop_span.py
@@ -9,44 +9,44 @@
def test_noop_start_transaction(sentry_init):
- sentry_init(instrumenter="otel", debug=True)
+ sentry_init(instrumenter="otel")
with sentry_sdk.start_transaction(
op="task", name="test_transaction_name"
) as transaction:
assert isinstance(transaction, NoOpSpan)
- assert sentry_sdk.Hub.current.scope.span is transaction
+ assert sentry_sdk.Scope.get_current_scope().span is transaction
transaction.name = "new name"
def test_noop_start_span(sentry_init):
- sentry_init(instrumenter="otel", debug=True)
+ sentry_init(instrumenter="otel")
with sentry_sdk.start_span(op="http", description="GET /") as span:
assert isinstance(span, NoOpSpan)
- assert sentry_sdk.Hub.current.scope.span is span
+ assert sentry_sdk.Scope.get_current_scope().span is span
span.set_tag("http.response.status_code", 418)
span.set_data("http.entity_type", "teapot")
def test_noop_transaction_start_child(sentry_init):
- sentry_init(instrumenter="otel", debug=True)
+ sentry_init(instrumenter="otel")
transaction = sentry_sdk.start_transaction(name="task")
assert isinstance(transaction, NoOpSpan)
with transaction.start_child(op="child_task") as child:
assert isinstance(child, NoOpSpan)
- assert sentry_sdk.Hub.current.scope.span is child
+ assert sentry_sdk.Scope.get_current_scope().span is child
def test_noop_span_start_child(sentry_init):
- sentry_init(instrumenter="otel", debug=True)
+ sentry_init(instrumenter="otel")
span = sentry_sdk.start_span(name="task")
assert isinstance(span, NoOpSpan)
with span.start_child(op="child_task") as child:
assert isinstance(child, NoOpSpan)
- assert sentry_sdk.Hub.current.scope.span is child
+ assert sentry_sdk.Scope.get_current_scope().span is child
diff --git a/tests/tracing/test_sampling.py b/tests/tracing/test_sampling.py
index 6101a948ef..1940656bdf 100644
--- a/tests/tracing/test_sampling.py
+++ b/tests/tracing/test_sampling.py
@@ -1,16 +1,12 @@
import random
+from unittest import mock
import pytest
-from sentry_sdk import Hub, start_span, start_transaction, capture_exception
+from sentry_sdk import Hub, Scope, start_span, start_transaction, capture_exception
from sentry_sdk.tracing import Transaction
from sentry_sdk.utils import logger
-try:
- from unittest import mock # python 3.3 and above
-except ImportError:
- import mock # python < 3.3
-
def test_sampling_decided_only_for_transactions(sentry_init, capture_events):
sentry_init(traces_sample_rate=0.5)
@@ -59,7 +55,7 @@ def test_get_transaction_and_span_from_scope_regardless_of_sampling_decision(
with start_transaction(name="/", sampled=sampling_decision):
with start_span(op="child-span"):
with start_span(op="child-child-span"):
- scope = Hub.current.scope
+ scope = Scope.get_current_scope()
assert scope.span.op == "child-child-span"
assert scope.transaction.name == "/"
diff --git a/tests/utils/test_contextvars.py b/tests/utils/test_contextvars.py
index faf33e8580..a6d296bb1f 100644
--- a/tests/utils/test_contextvars.py
+++ b/tests/utils/test_contextvars.py
@@ -12,7 +12,7 @@ def test_leaks(maybe_monkeypatched_threading):
from sentry_sdk import utils
- _, ContextVar, _ = utils._get_contextvars() # noqa: N806
+ _, ContextVar = utils._get_contextvars() # noqa: N806
ts = []
diff --git a/tests/utils/test_general.py b/tests/utils/test_general.py
index d4067bd5c6..1b689ec735 100644
--- a/tests/utils/test_general.py
+++ b/tests/utils/test_general.py
@@ -1,4 +1,3 @@
-# coding: utf-8
import sys
import os
@@ -18,7 +17,7 @@
strip_string,
AnnotatedValue,
)
-from sentry_sdk._compat import text_type, string_types
+from sentry_sdk.consts import EndpointType
try:
@@ -32,24 +31,16 @@
@given(x=any_string)
def test_safe_repr_never_broken_for_strings(x):
r = safe_repr(x)
- assert isinstance(r, text_type)
+ assert isinstance(r, str)
assert "broken repr" not in r
def test_safe_repr_regressions():
- # fmt: off
- assert u"лошадь" in safe_repr(u"лошадь")
- # fmt: on
+ assert "лошадь" in safe_repr("лошадь")
-@pytest.mark.xfail(
- sys.version_info < (3,),
- reason="Fixing this in Python 2 would break other behaviors",
-)
-# fmt: off
-@pytest.mark.parametrize("prefix", ("", "abcd", u"лошадь"))
-@pytest.mark.parametrize("character", u"\x00\x07\x1b\n")
-# fmt: on
+@pytest.mark.parametrize("prefix", ("", "abcd", "лошадь"))
+@pytest.mark.parametrize("character", "\x00\x07\x1b\n")
def test_safe_repr_non_printable(prefix, character):
"""Check that non-printable characters are escaped"""
string = prefix + character
@@ -91,31 +82,27 @@ def test_filename():
@pytest.mark.parametrize(
- "given,expected_store,expected_envelope",
+ "given,expected_envelope",
[
(
"https://foobar@sentry.io/123",
- "https://sentry.io/api/123/store/",
"https://sentry.io/api/123/envelope/",
),
(
"https://foobar@sentry.io/bam/123",
- "https://sentry.io/bam/api/123/store/",
"https://sentry.io/bam/api/123/envelope/",
),
(
"https://foobar@sentry.io/bam/baz/123",
- "https://sentry.io/bam/baz/api/123/store/",
"https://sentry.io/bam/baz/api/123/envelope/",
),
],
)
-def test_parse_dsn_paths(given, expected_store, expected_envelope):
+def test_parse_dsn_paths(given, expected_envelope):
dsn = Dsn(given)
auth = dsn.to_auth()
- assert auth.store_api_url == expected_store
- assert auth.get_api_url("store") == expected_store
- assert auth.get_api_url("envelope") == expected_envelope
+ assert auth.get_api_url() == expected_envelope
+ assert auth.get_api_url(EndpointType.ENVELOPE) == expected_envelope
@pytest.mark.parametrize(
@@ -517,27 +504,25 @@ def test_iter_stacktraces():
) == {1, 2, 3}
-# fmt: off
@pytest.mark.parametrize(
("original", "base64_encoded"),
[
# ascii only
("Dogs are great!", "RG9ncyBhcmUgZ3JlYXQh"),
# emoji
- (u"🐶", "8J+Qtg=="),
+ ("🐶", "8J+Qtg=="),
# non-ascii
(
- u"Καλό κορίτσι, Μάιζεϊ!",
+ "Καλό κορίτσι, Μάιζεϊ!",
"zprOsc67z4wgzrrOv8+Bzq/PhM+DzrksIM6czqzOuc62zrXPiiE=",
),
# mix of ascii and non-ascii
(
- u"Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
+ "Of margir hundar! Ég geri ráð fyrir að ég þurfi stærra rúm.",
"T2YgbWFyZ2lyIGh1bmRhciEgw4lnIGdlcmkgcsOhw7AgZnlyaXIgYcOwIMOpZyDDvnVyZmkgc3TDpnJyYSByw7ptLg==",
),
],
)
-# fmt: on
def test_successful_base64_conversion(original, base64_encoded):
# all unicode characters should be handled correctly
assert to_base64(original) == base64_encoded
@@ -568,7 +553,7 @@ def test_failed_base64_conversion(input):
# any string can be converted to base64, so only type errors will cause
# failures
- if type(input) not in string_types:
+ if not isinstance(input, str):
assert to_base64(input) is None
@@ -585,10 +570,6 @@ def test_failed_base64_conversion(input):
metadata={"len": 257, "rem": [["!limit", "x", 253, 256]]},
),
],
- # fmt: off
- [u"éééé", None, u"éééé"],
- [u"éééé", 5, AnnotatedValue(value=u"é...", metadata={"len": 8, "rem": [["!limit", "x", 2, 5]]})],
- # fmt: on
["éééé", None, "éééé"],
[
"éééé",
diff --git a/tests/utils/test_transaction.py b/tests/utils/test_transaction.py
index bfb87f4c29..96145e092a 100644
--- a/tests/utils/test_transaction.py
+++ b/tests/utils/test_transaction.py
@@ -1,15 +1,7 @@
-import sys
-from functools import partial
-
-import pytest
+from functools import partial, partialmethod
from sentry_sdk.utils import transaction_from_function
-try:
- from functools import partialmethod
-except ImportError:
- pass
-
class MyClass:
def myfunc(self):
@@ -48,7 +40,6 @@ def test_transaction_from_function():
)
-@pytest.mark.skipif(sys.version_info < (3, 4), reason="Require python 3.4 or higher")
def test_transaction_from_function_partialmethod():
x = transaction_from_function
diff --git a/tox.ini b/tox.ini
index 1e7ba06a00..e193de52b1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -6,10 +6,10 @@
[tox]
envlist =
# === Common ===
- {py2.7,py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common
+ {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common
# === Gevent ===
- {py2.7,py3.6,py3.8,py3.10,py3.11}-gevent
+ {py3.6,py3.8,py3.10,py3.11}-gevent
# === Integrations ===
# General format is {pythonversion}-{integrationname}-v{frameworkversion}
@@ -55,18 +55,17 @@ envlist =
{py3.8,py3.11}-beam-latest
# Boto3
- {py2.7,py3.6,py3.7}-boto3-v{1.12}
+ {py3.6,py3.7}-boto3-v{1.12}
{py3.7,py3.11,py3.12}-boto3-v{1.21}
{py3.7,py3.11,py3.12}-boto3-v{1.29}
{py3.7,py3.11,py3.12}-boto3-latest
# Bottle
- {py2.7,py3.5,py3.9}-bottle-v{0.12}
- {py3.5,py3.11,py3.12}-bottle-latest
+ {py3.6,py3.9}-bottle-v{0.12}
+ {py3.6,py3.11,py3.12}-bottle-latest
# Celery
- {py2.7}-celery-v{3}
- {py2.7,py3.5,py3.8}-celery-v{4}
+ {py3.6,py3.8}-celery-v{4}
{py3.6,py3.8}-celery-v{5.0}
{py3.7,py3.10}-celery-v{5.1,5.2}
{py3.8,py3.11}-celery-v{5.3}
@@ -85,11 +84,10 @@ envlist =
# Django
# - Django 1.x
- {py2.7,py3.5}-django-v{1.8}
- {py2.7,py3.5,py3.7}-django-v{1.11}
+ {py3.6,py3.7}-django-v{1.11}
# - Django 2.x
- {py3.5,py3.7}-django-v{2.0}
- {py3.5,py3.9}-django-v{2.2}
+ {py3.6,py3.7}-django-v{2.0}
+ {py3.6,py3.9}-django-v{2.2}
# - Django 3.x
{py3.6,py3.9}-django-v{3.0}
{py3.6,py3.11}-django-v{3.2}
@@ -100,8 +98,8 @@ envlist =
{py3.10,py3.11,py3.12}-django-latest
# Falcon
- {py2.7,py3.5,py3.7}-falcon-v{1,1.4,2}
- {py3.5,py3.6,py3.11,py3.12}-falcon-v{3}
+ {py3.6,py3.7}-falcon-v{1,1.4,2}
+ {py3.6,py3.11,py3.12}-falcon-v{3}
{py3.7,py3.11,py3.12}-falcon-latest
# FastAPI
@@ -109,8 +107,7 @@ envlist =
{py3.8,py3.11,py3.12}-fastapi-latest
# Flask
- {py2.7,py3.5}-flask-v{0,0.11}
- {py2.7,py3.5,py3.8}-flask-v{1}
+ {py3.6,py3.8}-flask-v{1}
{py3.8,py3.11,py3.12}-flask-v{2}
{py3.10,py3.11,py3.12}-flask-v{3}
{py3.10,py3.11,py3.12}-flask-latest
@@ -127,8 +124,9 @@ envlist =
{py3.7,py3.11,py3.12}-graphene-latest
# gRPC
- {py3.7,py3.10}-grpc-v{1.21,1.30,1.40}
- {py3.7,py3.11}-grpc-v{1.50}
+ {py3.7,py3.9}-grpc-v{1.39}
+ {py3.7,py3.10}-grpc-v{1.49}
+ {py3.7,py3.11}-grpc-v{1.59}
{py3.8,py3.11,py3.12}-grpc-latest
# HTTPX
@@ -139,12 +137,17 @@ envlist =
{py3.9,py3.11,py3.12}-httpx-latest
# Huey
- {py2.7,py3.5,py3.11,py3.12}-huey-v{2.0}
- {py3.5,py3.11,py3.12}-huey-latest
+ {py3.6,py3.11,py3.12}-huey-v{2.0}
+ {py3.6,py3.11,py3.12}-huey-latest
# Loguru
- {py3.5,py3.11,py3.12}-loguru-v{0.5}
- {py3.5,py3.11,py3.12}-loguru-latest
+ {py3.6,py3.11,py3.12}-loguru-v{0.5}
+ {py3.6,py3.11,py3.12}-loguru-latest
+
+ # OpenAI
+ {py3.9,py3.11,py3.12}-openai-v1
+ {py3.9,py3.11,py3.12}-openai-latest
+ {py3.9,py3.11,py3.12}-openai-notiktoken
# OpenAI
{py3.9,py3.11,py3.12}-openai-v1
@@ -155,18 +158,18 @@ envlist =
{py3.7,py3.9,py3.11,py3.12}-opentelemetry
# pure_eval
- {py3.5,py3.11,py3.12}-pure_eval
+ {py3.6,py3.11,py3.12}-pure_eval
# PyMongo (Mongo DB)
- {py2.7,py3.6}-pymongo-v{3.1}
- {py2.7,py3.6,py3.9}-pymongo-v{3.12}
+ {py3.6}-pymongo-v{3.1}
+ {py3.6,py3.9}-pymongo-v{3.12}
{py3.6,py3.11}-pymongo-v{4.0}
{py3.7,py3.11,py3.12}-pymongo-v{4.3,4.6}
{py3.7,py3.11,py3.12}-pymongo-latest
# Pyramid
- {py2.7,py3.5,py3.11}-pyramid-v{1.6}
- {py2.7,py3.5,py3.11,py3.12}-pyramid-v{1.10}
+ {py3.6,py3.11}-pyramid-v{1.6}
+ {py3.6,py3.11,py3.12}-pyramid-v{1.10}
{py3.6,py3.11,py3.12}-pyramid-v{2.0}
{py3.6,py3.11,py3.12}-pyramid-latest
@@ -176,27 +179,27 @@ envlist =
{py3.8,py3.11,py3.12}-quart-latest
# Redis
- {py2.7,py3.7,py3.8}-redis-v{3}
+ {py3.6,py3.8}-redis-v{3}
{py3.7,py3.8,py3.11}-redis-v{4}
{py3.7,py3.11,py3.12}-redis-v{5}
{py3.7,py3.11,py3.12}-redis-latest
# Redis Cluster
- {py2.7,py3.7,py3.8}-rediscluster-v{1,2}
+ {py3.6,py3.8}-rediscluster-v{1,2}
# no -latest, not developed anymore
# Requests
- {py2.7,py3.8,py3.11,py3.12}-requests
+ {py3.6,py3.8,py3.11,py3.12}-requests
# RQ (Redis Queue)
- {py2.7,py3.5,py3.6}-rq-v{0.6}
- {py2.7,py3.5,py3.9}-rq-v{0.13,1.0}
- {py3.5,py3.11}-rq-v{1.5,1.10}
+ {py3.6}-rq-v{0.6}
+ {py3.6,py3.9}-rq-v{0.13,1.0}
+ {py3.6,py3.11}-rq-v{1.5,1.10}
{py3.7,py3.11,py3.12}-rq-v{1.15}
{py3.7,py3.11,py3.12}-rq-latest
# Sanic
- {py3.5,py3.7}-sanic-v{0.8}
+ {py3.6,py3.7}-sanic-v{0.8}
{py3.6,py3.8}-sanic-v{20}
{py3.7,py3.11}-sanic-v{22}
{py3.7,py3.11}-sanic-v{23}
@@ -213,7 +216,7 @@ envlist =
# 1.51.14 is the last starlite version; the project continues as litestar
# SQL Alchemy
- {py2.7,py3.7,py3.9}-sqlalchemy-v{1.2,1.4}
+ {py3.6,py3.9}-sqlalchemy-v{1.2,1.4}
{py3.7,py3.11}-sqlalchemy-v{2.0}
{py3.7,py3.11,py3.12}-sqlalchemy-latest
@@ -222,12 +225,12 @@ envlist =
{py3.8,py3.11,py3.12}-strawberry-latest
# Tornado
- {py3.7,py3.9}-tornado-v{5}
+ {py3.8,py3.11,py3.12}-tornado-v{6.0}
{py3.8,py3.11,py3.12}-tornado-v{6}
{py3.8,py3.11,py3.12}-tornado-latest
# Trytond
- {py3.5,py3.6}-trytond-v{4}
+ {py3.6}-trytond-v{4}
{py3.6,py3.8}-trytond-v{5}
{py3.6,py3.11}-trytond-v{6}
{py3.8,py3.11,py3.12}-trytond-v{7}
@@ -252,7 +255,7 @@ deps =
{py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-common: pytest<7.0.0
# === Gevent ===
- {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: gevent>=22.10.0, <22.11.0
+ {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: gevent>=22.10.0, <22.11.0
# See https://github.com/pytest-dev/pytest/issues/9621
# and https://github.com/pytest-dev/pytest-forked/issues/67
# for justification of the upper bound on pytest
@@ -312,7 +315,6 @@ deps =
# Celery
celery: redis
- celery-v3: Celery~=3.0
celery-v4: Celery~=4.0
celery-v5.0: Celery~=5.0.0
celery-v5.1: Celery~=5.1.0
@@ -320,9 +322,8 @@ deps =
celery-v5.3: Celery~=5.3.0
celery-latest: Celery
- {py3.5}-celery: newrelic<6.0.0
{py3.7}-celery: importlib-metadata<5.0
- {py2.7,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
+ {py3.6,py3.7,py3.8,py3.9,py3.10,py3.11}-celery: newrelic
# Chalice
chalice-v1.16: chalice~=1.16.0
@@ -340,9 +341,9 @@ deps =
django: psycopg2-binary
django-v{1.11,2.0,2.1,2.2,3.0,3.1,3.2}: djangorestframework>=3.0.0,<4.0.0
django-v{2.0,2.2,3.0,3.2,4.0,4.1,4.2,5.0}: channels[daphne]
- django-v{1.8,1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0
- django-v{1.8,1.11,2.0}: pytest-django<4.0
- django-v{2.2,3.0,3.2,4.0,4.1,4.2,5.0}: pytest-django
+ django-v{1.11,2.0,2.2,3.0,3.2}: Werkzeug<2.1.0
+ django-v{1.11,2.0,2.2,3.0}: pytest-django<4.0
+ django-v{3.2,4.0,4.1,4.2,5.0}: pytest-django
django-v{4.0,4.1,4.2,5.0}: djangorestframework
django-v{4.0,4.1,4.2,5.0}: pytest-asyncio<=0.21.1
django-v{4.0,4.1,4.2,5.0}: Werkzeug
@@ -352,7 +353,6 @@ deps =
django-latest: Werkzeug
django-latest: channels[daphne]
- django-v1.8: Django~=1.8.0
django-v1.11: Django~=1.11.0
django-v2.0: Django~=2.0.0
django-v2.2: Django~=2.2.0
@@ -383,11 +383,9 @@ deps =
# Flask
flask: flask-login
- flask-v{0.11,0,1,2.0}: Werkzeug<2.1.0
- flask-v{0.11,0,1,2.0}: markupsafe<2.1.0
+ flask-v{1,2.0}: Werkzeug<2.1.0
+ flask-v{1,2.0}: markupsafe<2.1.0
flask-v{3}: Werkzeug
- flask-v0.11: Flask~=0.11.0
- flask-v0: Flask~=0.11
flask-v1: Flask~=1.0
flask-v2: Flask~=2.0
flask-v3: Flask~=3.0
@@ -410,11 +408,10 @@ deps =
grpc: mypy-protobuf
grpc: types-protobuf
grpc: pytest-asyncio<=0.21.1
- grpc-v1.21: grpcio-tools~=1.21.0
- grpc-v1.30: grpcio-tools~=1.30.0
- grpc-v1.40: grpcio-tools~=1.40.0
- grpc-v1.50: grpcio-tools~=1.50.0
- grpc-latest: grpcio-tools
+ grpc-v1.39: grpcio~=1.39.0
+ grpc-v1.49: grpcio~=1.49.1
+ grpc-v1.59: grpcio~=1.59.0
+ grpc-latest: grpcio
# HTTPX
httpx-v0.16: pytest-httpx==0.10.0
@@ -487,6 +484,7 @@ deps =
# Redis
redis: fakeredis!=1.7.4
+ redis: pytest<8.0.0
{py3.7,py3.8,py3.9,py3.10,py3.11}-redis: pytest-asyncio<=0.21.1
redis-v3: redis~=3.0
redis-v4: redis~=4.0
@@ -520,8 +518,7 @@ deps =
sanic: aiohttp
sanic-v{22,23}: sanic_testing
sanic-latest: sanic_testing
- {py3.5,py3.6}-sanic: aiocontextvars==0.2.1
- {py3.5}-sanic: ujson<4
+ {py3.6}-sanic: aiocontextvars==0.2.1
sanic-v0.8: sanic~=0.8.0
sanic-v20: sanic~=20.0
sanic-v22: sanic~=22.0
@@ -566,7 +563,7 @@ deps =
strawberry-latest: strawberry-graphql[fastapi,flask]
# Tornado
- tornado-v5: tornado~=5.0
+ tornado-v6.0: tornado~=6.0.0
tornado-v6: tornado~=6.0
tornado-latest: tornado
@@ -633,10 +630,10 @@ setenv =
passenv =
SENTRY_PYTHON_TEST_AWS_ACCESS_KEY_ID
SENTRY_PYTHON_TEST_AWS_SECRET_ACCESS_KEY
+ SENTRY_PYTHON_TEST_POSTGRES_HOST
SENTRY_PYTHON_TEST_POSTGRES_USER
SENTRY_PYTHON_TEST_POSTGRES_PASSWORD
SENTRY_PYTHON_TEST_POSTGRES_NAME
- SENTRY_PYTHON_TEST_POSTGRES_HOST
usedevelop = True
extras =
bottle: bottle
@@ -645,8 +642,6 @@ extras =
pymongo: pymongo
basepython =
- py2.7: python2.7
- py3.5: python3.5
py3.6: python3.6
py3.7: python3.7
py3.8: python3.8
@@ -665,22 +660,13 @@ basepython =
commands =
{py3.7,py3.8}-boto3: pip install urllib3<2.0.0
- ; https://github.com/pytest-dev/pytest/issues/5532
- {py3.5,py3.6,py3.7,py3.8,py3.9}-flask-v{0.11,0.12}: pip install pytest<5
- {py3.6,py3.7,py3.8,py3.9}-flask-v{0.11}: pip install Werkzeug<2
; https://github.com/pallets/flask/issues/4455
- {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{0.11,0.12,1.0,1.1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
- ; https://github.com/more-itertools/more-itertools/issues/578
- py3.5-flask-v{0.11,0.12}: pip install more-itertools<8.11.0
-
- ; use old pytest for old Python versions:
- {py2.7,py3.5}: pip install pytest-forked==1.1.3
+ {py3.7,py3.8,py3.9,py3.10,py3.11}-flask-v{1}: pip install "itsdangerous>=0.24,<2.0" "markupsafe<2.0.0" "jinja2<3.1.1"
; Running `py.test` as an executable suffers from an import error
; when loading tests in scenarios. In particular, django fails to
; load the settings from the test module.
- {py2.7}: python -m pytest --ignore-glob='*py3.py' -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
- {py3.5,py3.6,py3.7,py3.8,py3.9,py3.10,py3.11,py3.12}: python -m pytest -rsx -s --durations=5 -vvv {env:TESTPATH} {posargs}
+ python -m pytest -rfEs -s --durations=5 -vvv {env:TESTPATH} {posargs}
[testenv:linters]
commands =